repo_name
stringlengths
5
100
path
stringlengths
4
294
copies
stringclasses
990 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
ldgarcia/django-allauth
allauth/socialaccount/providers/xing/tests.py
62
1975
# -*- coding: utf-8 -*- from __future__ import unicode_literals from allauth.socialaccount.tests import create_oauth_tests from allauth.tests import MockedResponse from allauth.socialaccount.providers import registry from .provider import XingProvider class XingTests(create_oauth_tests(registry.by_id(XingProvider.id))): def get_mocked_response(self): return [MockedResponse(200, """ {"users":[{"id":"20493333_1cd028","active_email":"raymond.penners@gmail.com", "badges":[],"birth_date":{"year":null,"month":null,"day":null}, "business_address":{"street":null,"zip_code":null,"city":null,"province":null, "country":"NL","email":null,"fax":null,"phone":null,"mobile_phone":null}, "display_name":"Raymond Penners","educational_background": {"primary_school_id":null,"schools":[],"qualifications":[]}, "employment_status":"EMPLOYEE","first_name":"Raymond","gender":"m", "haves":null,"instant_messaging_accounts":{},"interests":null,"languages": {"nl":null},"last_name":"Penners","organisation_member":null, "page_name":"Raymond_Penners", "permalink":"https://www.xing.com/profile/Raymond_Penners", "photo_urls":{"thumb":"https://www.xing.com/img/n/nobody_m.30x40.jpg", "large":"https://www.xing.com/img/n/nobody_m.140x185.jpg","mini_thumb": "https://www.xing.com/img/n/nobody_m.18x24.jpg","maxi_thumb": "https://www.xing.com/img/n/nobody_m.70x93.jpg","medium_thumb": "https://www.xing.com/img/n/nobody_m.57x75.jpg"},"premium_services":[], "private_address":{"street":null,"zip_code":null,"city":null,"province":null, "country":null,"email":"raymond.penners@gmail.com","fax":null, "phone":null,"mobile_phone":null},"professional_experience": {"primary_company":{"name":null,"url":null,"tag":null,"title":null, "begin_date":null,"end_date":null,"description":null,"industry":"OTHERS", "company_size":null,"career_level":null},"non_primary_companies":[], "awards":[]},"time_zone":{"utc_offset":2.0,"name":"Europe/Berlin"}, "wants":null,"web_profiles":{}}]} """)]
mit
robin900/sqlalchemy
examples/vertical/dictlike.py
30
5034
"""Mapping a vertical table as a dictionary. This example illustrates accessing and modifying a "vertical" (or "properties", or pivoted) table via a dict-like interface. These are tables that store free-form object properties as rows instead of columns. For example, instead of:: # A regular ("horizontal") table has columns for 'species' and 'size' Table('animal', metadata, Column('id', Integer, primary_key=True), Column('species', Unicode), Column('size', Unicode)) A vertical table models this as two tables: one table for the base or parent entity, and another related table holding key/value pairs:: Table('animal', metadata, Column('id', Integer, primary_key=True)) # The properties table will have one row for a 'species' value, and # another row for the 'size' value. Table('properties', metadata Column('animal_id', Integer, ForeignKey('animal.id'), primary_key=True), Column('key', UnicodeText), Column('value', UnicodeText)) Because the key/value pairs in a vertical scheme are not fixed in advance, accessing them like a Python dict can be very convenient. The example below can be used with many common vertical schemas as-is or with minor adaptations. """ from __future__ import unicode_literals class ProxiedDictMixin(object): """Adds obj[key] access to a mapped class. This class basically proxies dictionary access to an attribute called ``_proxied``. The class which inherits this class should have an attribute called ``_proxied`` which points to a dictionary. """ def __len__(self): return len(self._proxied) def __iter__(self): return iter(self._proxied) def __getitem__(self, key): return self._proxied[key] def __contains__(self, key): return key in self._proxied def __setitem__(self, key, value): self._proxied[key] = value def __delitem__(self, key): del self._proxied[key] if __name__ == '__main__': from sqlalchemy import (Column, Integer, Unicode, ForeignKey, UnicodeText, and_, create_engine) from sqlalchemy.orm import relationship, Session from sqlalchemy.orm.collections import attribute_mapped_collection from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.associationproxy import association_proxy Base = declarative_base() class AnimalFact(Base): """A fact about an animal.""" __tablename__ = 'animal_fact' animal_id = Column(ForeignKey('animal.id'), primary_key=True) key = Column(Unicode(64), primary_key=True) value = Column(UnicodeText) class Animal(ProxiedDictMixin, Base): """an Animal""" __tablename__ = 'animal' id = Column(Integer, primary_key=True) name = Column(Unicode(100)) facts = relationship("AnimalFact", collection_class=attribute_mapped_collection('key')) _proxied = association_proxy("facts", "value", creator= lambda key, value: AnimalFact(key=key, value=value)) def __init__(self, name): self.name = name def __repr__(self): return "Animal(%r)" % self.name @classmethod def with_characteristic(self, key, value): return self.facts.any(key=key, value=value) engine = create_engine("sqlite://") Base.metadata.create_all(engine) session = Session(bind=engine) stoat = Animal('stoat') stoat['color'] = 'reddish' stoat['cuteness'] = 'somewhat' # dict-like assignment transparently creates entries in the # stoat.facts collection: print(stoat.facts['color']) session.add(stoat) session.commit() critter = session.query(Animal).filter(Animal.name == 'stoat').one() print(critter['color']) print(critter['cuteness']) critter['cuteness'] = 'very' print('changing cuteness:') marten = Animal('marten') marten['color'] = 'brown' marten['cuteness'] = 'somewhat' session.add(marten) shrew = Animal('shrew') shrew['cuteness'] = 'somewhat' shrew['poisonous-part'] = 'saliva' session.add(shrew) loris = Animal('slow loris') loris['cuteness'] = 'fairly' loris['poisonous-part'] = 'elbows' session.add(loris) q = (session.query(Animal). filter(Animal.facts.any( and_(AnimalFact.key == 'color', AnimalFact.value == 'reddish')))) print('reddish animals', q.all()) q = session.query(Animal).\ filter(Animal.with_characteristic("color", 'brown')) print('brown animals', q.all()) q = session.query(Animal).\ filter(~Animal.with_characteristic("poisonous-part", 'elbows')) print('animals without poisonous-part == elbows', q.all()) q = (session.query(Animal). filter(Animal.facts.any(value='somewhat'))) print('any animal with any .value of "somewhat"', q.all())
mit
jbarcia/weevely3
testsuite/test_file_gzip.py
14
5299
from testsuite.base_test import BaseTest from core import modules from core.sessions import SessionURL from testfixtures import log_capture from core import messages import logging import config import os class FileGzip(BaseTest): def setUp(self): session = SessionURL(self.url, self.password, volatile = True) modules.load_modules(session) # Create and gzip binary files for the test self.string = [ '\\xe0\\xf5\\xfe\\xe2\\xbd\\x0c\\xbc\\x9b\\xa0\\x8f\\xed?\\xa1\\xe1', '\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x06\\x00\\x00\\x00' ] self.uncompressed = [ os.path.join(config.script_folder, 'binfile0'), os.path.join(config.script_folder, 'binfile1') ] self.compressed = [ os.path.join(config.script_folder, 'binfile0.gz'), os.path.join(config.script_folder, 'binfile1.gz') ] for index in range(0, len(self.string)): self.check_call(config.cmd_env_content_s_to_s % (self.string[index], self.uncompressed[index])) self.check_call(config.cmd_env_gzip_s % (self.uncompressed[index])) self.run_argv = modules.loaded['file_gzip'].run_argv def tearDown(self): for f in self.uncompressed + self.compressed: self.check_call(config.cmd_env_remove_s % (f)) def test_compress_decompress(self): # Decompress and check test file self.assertTrue(self.run_argv(["--decompress", self.compressed[0]])); self.assertEqual( self.check_output(config.cmd_env_print_repr_s % self.uncompressed[0]), self.string[0] ) # Let's re-compress it, and decompress and check again self.assertTrue(self.run_argv([self.uncompressed[0]])) self.assertTrue(self.run_argv(["--decompress", self.compressed[0]])); self.assertEqual( self.check_output(config.cmd_env_print_repr_s % self.uncompressed[0]), self.string[0] ) # Recompress it keeping the original file self.assertTrue(self.run_argv([self.uncompressed[0], '--keep'])) # Check the existance of the original file and remove it self.check_call(config.cmd_env_stat_permissions_s % self.uncompressed[0]) self.check_call(config.cmd_env_remove_s % self.uncompressed[0]) # Do the same check self.assertTrue(self.run_argv(["--decompress", self.compressed[0]])); self.assertEqual( self.check_output(config.cmd_env_print_repr_s % self.uncompressed[0]), self.string[0] ) def test_compress_decompress_multiple(self): for index in range(0, len(self.compressed)): # Decompress and check test file self.assertTrue(self.run_argv(["--decompress", self.compressed[index]])); self.assertEqual( self.check_output(config.cmd_env_print_repr_s % self.uncompressed[index]), self.string[index] ) # Let's re-compress it, and decompress and check again self.assertTrue(self.run_argv([self.uncompressed[index]])) self.assertTrue(self.run_argv(["--decompress", self.compressed[index]])); self.assertEqual( self.check_output(config.cmd_env_print_repr_s % self.uncompressed[index]), self.string[index] ) @log_capture() def test_already_exists(self, log_captured): # Decompress keeping it and check test file self.assertTrue(self.run_argv(["--decompress", self.compressed[0], '--keep'])); self.assertEqual( self.check_output(config.cmd_env_print_repr_s % self.uncompressed[0]), self.string[0] ) # Do it again and trigger that the file decompressed already exists self.assertIsNone(self.run_argv(["--decompress", self.compressed[0]])); self.assertEqual(log_captured.records[-1].msg, "File '%s' already exists, skipping decompressing" % self.uncompressed[0]) # Compress and trigger that the file compressed already exists self.assertIsNone(self.run_argv([self.uncompressed[0]])); self.assertEqual(log_captured.records[-1].msg, "File '%s' already exists, skipping compressing" % self.compressed[0]) @log_capture() def test_wrong_ext(self, log_captured): # Decompress it and check test file self.assertTrue(self.run_argv(["--decompress", self.compressed[0]])); self.assertEqual( self.check_output(config.cmd_env_print_repr_s % self.uncompressed[0]), self.string[0] ) # Decompress the decompressed, wrong ext self.assertIsNone(self.run_argv(["--decompress", self.uncompressed[0]])); self.assertEqual(log_captured.records[-1].msg, "Unknown suffix, skipping decompressing") @log_capture() def test_unexistant(self, log_captured): # Decompress it and check test file self.assertIsNone(self.run_argv(["--decompress", 'bogus'])); self.assertEqual(log_captured.records[-1].msg, "Skipping file '%s', check existance and permission" % 'bogus')
gpl-3.0
paeaetech/pybp
pybp.py
1
21540
# -*- coding: utf-8 -*- # Pybp interface to BusPirate # Copyright (C) 2010 Paeae Technologies # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import serial import time import struct #exceptions class BusPirateError(Exception): pass class CommandError(Exception): pass class SerialError(Exception): pass #protocol mode definitions class Modes: UART = "uart" SPI = "spi" I2C = "i2c" ONEWIRE = "1wire" RAW = "raw" class Commands: RESET_BINARY = chr(0b0) RESET_USER = chr(0b1111) #protocol modes ENTER_SPI = chr(0b1) ENTER_I2C = chr(0b10) ENTER_UART = chr(0b11) ENTER_1WIRE = chr(0b100) ENTER_RAW = chr(0b101) #uart mode commands UART_START_ECHO = chr(0b10) UART_STOP_ECHO = chr(0b11) UART_WRITE = chr(0b10000) UART_SET_BAUDRATE = chr(0b01100000) UART_SET_CONFIG = chr(0b10000000) UART_ENTER_BRIDGE = chr(0b1111) #tests TEST_SHORT = chr(0b10000) TEST_LONG = chr(0b10001) TEST_EXIT = chr(0xff) #pwm PWM_SET = chr(0b10010) PWM_CLEAR = chr(0b10011) #pins CONFIG_PINS = chr(0b01000000) SET_PINS = chr(0b10000000) #probes PROBE_VOLTAGE = chr(0b10100) class Responses: RESET_BINARY = 'BBIO1' ENTER_SPI = 'SPI1' ENTER_I2C = 'I2C1' ENTER_UART = 'ART1' ENTER_1WIRE = '1W01' ENTER_RAW = 'RAW1' OK = chr(0x1) #generic OK response for most commands class CommandResponsePairs: RESET_BINARY = (Commands.RESET_BINARY,Responses.RESET_BINARY) ENTER_SPI = (Commands.ENTER_SPI,Responses.ENTER_SPI) ENTER_UART = (Commands.ENTER_UART,Responses.ENTER_UART) ENTER_I2C = (Commands.ENTER_I2C,Responses.ENTER_I2C) ENTER_1WIRE = (Commands.ENTER_1WIRE,Responses.ENTER_1WIRE) ENTER_RAW = (Commands.ENTER_RAW,Responses.ENTER_RAW) TEST_EXIT = (Commands.TEST_EXIT,Responses.OK) PWM_CLEAR = (Commands.PWM_CLEAR,Responses.OK) class BusPirate(object): """BusPirate interface class. """ def __init__(self, port,**kwargs): """ @param port: serial port to use @keyword baudrate: baudrate to use, defaults to 115200 @raise BusPirateError: if cannot enter Binary mode """ super(BusPirate, self).__init__() #use dummy serial device in unittests self.unittestSerial = kwargs.pop('unittestserial',None) self.port = port self.baudrate = kwargs.pop('baudrate',115200) self.serial = None self.open() def open(self): """Opens connection to BusPirate and enters binary mode. Closes existing connection if already open @raise BusPirateError: if cannot enter Binary mode """ self.close() self.serial = self.unittestSerial or serial.Serial(self.port,self.baudrate) self.serial.timeout = 2.0 #seconds self.lastresponse=None self.mode = None self._uartEcho = False self._enterBinaryMode() def close(self): """Resets BusPirate and closes serial connection""" if self.serial: self._sendCmd(Commands.RESET_USER) self.serial.close() self.serial=None def selfTest(self,longTest=False): """Perform self-test. @param longTest: Perform long self-test. Requires jumpers between +5 and Vpu, +3.3 and ADC. Defaults to False @return: True or False @rtype: bool """ if longTest: self._sendCmd(Commands.TEST_LONG) else: self._sendCmd(Commands.TEST_SHORT) result = None failed = True for i in range(10): result = self._read(1) if len(result) > 0: failed = False break if failed: raise BusPirateError("Selftest timeout") self._sendCmd(*CommandResponsePairs.TEST_EXIT) return result[0] == chr(0) def enterMode(self,mode=""): """Enter protocol mode. @param mode: "spi","uart","i2c","1wire","raw" @raise BusPirateError: if command fails @raise ValueError: if parameters are invalid """ if self.mode: self.leaveMode() mode = mode.lower() try: pair = None #command and response pair if mode == Modes.UART: pair = CommandResponsePairs.ENTER_UART elif mode == Modes.SPI: pair = CommandResponsePairs.ENTER_SPI elif mode == Modes.I2C: pair = CommandResponsePairs.ENTER_I2C elif mode == Modes.ONEWIRE: pair = CommandResponsePairs.ENTER_1WIRE elif mode == Modes.RAW: pair = CommandResponsePairs.ENTER_RAW else: raise ValueError("Invalid mode '%s'" % mode) cmd,response = pair self._sendCmd(cmd,response) self.mode = mode except CommandError as err: raise BusPirateError(err) return True def leaveMode(self): """Leave protocol mode.""" if not self.mode: raise BusPirateError("Not in protocol mode") self._sendCmd(*CommandResponsePairs.RESET_BINARY) self.mode = None return True #voltage probe def probeVoltage(self): """Perform AUX pin voltage measurement. @return: measured voltage. @rtype: float """ self._write(Commands.PROBE_VOLTAGE) data = self._read(2) v, = struct.unpack('>H',data) return (float(v)/1024)*6.6 def configPins(self,**kwargs): """Configure AUX,MOSI,CLK,MISO and CS as inputs (1) or outputs (0). @keyword aux: defaults to 1 @keyword mosi: defaults to 1 @keyword clk: defaults to 1 @keyword miso: defaults to 1 @keyword cs: defauls to 1 @return: pin directions after update @rtype: int """ aux = kwargs.pop('aux',1) mosi = kwargs.pop('mosi',1) miso = kwargs.pop('miso',1) clk = kwargs.pop('clk',1) cs = kwargs.pop('cs',1) pins = (aux<<4) | (mosi<<3) | (clk<<2) | (miso<<1) | cs self._write(chr(ord(Commands.CONFIG_PINS)|pins)) pins = self._read(1) return ord(pins) def setPins(self,**kwargs): """Set pins POWER,AUX,MOSI,CLK,MISO,CS and PULLUP on (1) or off (0). @keyword power: defaults to 0 @keyword pullup: defaults to 0 @keyword aux: defaults to 0 @keyword mosi: defaults to 0 @keyword clk: defaults to 0 @keyword miso: defaults to 0 @keyword cs: defauls to 0 @return: pin state after update @rtype: int """ power = kwargs.pop("power",0) aux = kwargs.pop('aux',0) mosi = kwargs.pop('mosi',0) miso = kwargs.pop('miso',0) clk = kwargs.pop('clk',0) cs = kwargs.pop('cs',0) pullup = kwargs.pop('pullup',0) pins = (power<<6)| (pullup<<5)| (aux<<4) | (mosi<<3) | (clk<<2) | (miso<<1) | cs self._write(chr(ord(Commands.SET_PINS)|pins)) pins = self._read(1) return ord(pins) #pwm def setPWM(self,dutycycle,hz): """Set AUX pin PWM @keyword duty: dutycycle (0.0 - 1.0) @keyword hz: pwm frequency """ prescaler = 0 period = 0 div = 0 #adapted from bus pirate sources if hz < 4: prescaler=0b11 div = 62 elif hz < 31: div = 250 prescaler = 0b10 elif hz < 245: div = 2000 prescaler = 0b01 else: div = 16000 if dutycycle < 0.0: dutycycle = 0.0 elif dutycycle > 1.0: dutycycle=1.0 period = int((div / hz)-1) duty = int(period * dutycycle) cmd = [ Commands.PWM_SET, chr(prescaler),chr(duty>>8),chr(duty&0xff),chr(period>>8),chr(period&0xff)] self._sendCmd("".join(cmd),Responses.OK) return True def stopPWM(self): """Stop outputting PWM @raise BusPirateError: if command fails. """ self._sendCmd(*CommandResponsePairs.PWM_CLEAR) return True #{ UART functions def uartEnter(self): return self.enterMode("uart") def uartStartEcho(self): """Start uart echo mode. Incoming data can be inspected with uartBytesAvailable and uartReceive @raise BusPirateError: if command fails. """ self._checkMode(Modes.UART) self._sendCmd(*CommandResponsePairs.UART_START_ECHO) self._uartEcho = True return True def uartStopEcho(self): """Stop uart echo mode. @raise BusPirateError: if command fails. """ self._checkMode(Modes.UART) self._sendCmd(*CommandResponsePairs.UART_STOP_ECHO) return True def uartWrite(self,data): """Writes data to uart @raise BusPirateError: if command fails. """ self._checkMode(Modes.UART) if not self._uartEcho: raise BusPirateError("Not in UART echo mode") #Up to 16 data bytes can be sent at once. #TODO: implement >16byte transfer if len(data) <= 16: cmd = chr(ord(Commands.UART_WRITE) | (len(data)-1)) #Note that 0000 indicates 1 byte because there’s no reason to send 0 self._sendCmd(cmd,Responses.OK) for c in data: self._write(c) if self._read(1) != response: raise BusPirateError("uartWrite received invalid response") else: raise NotImplementedError(">16byte uartwrite") return True def uartBytesAvailable(self): """Returns number of bytes avaiable in uart echo mode. @raise BusPirateError: if command fails. """ self._checkMode(Modes.UART) if not self._uartEcho: raise BusPirateError("Not in UART echo mode") return self._available() def uartReceive(self,size=1): """Receive bytes in uart echo mode. @param size: number of bytes to read @raise BusPirateError: if command fails. """ self._checkMode(Modes.UART) if not self._uartEcho: raise BusPirateError("Not in UART echo mode") return self._read(size) def uartSetSpeed(self,baudrate=9600): """configures uart baudrate @param baudrate: 300,1200,2400,4800,9600,19200,31250,28400,57600,115200 @raise BusPirateError: if command fails. @raise ValueError: if parameter is invalid """ self._checkMode(Modes.UART) rates = { 300 : 0, 1200 : 0b0001, 2400: 0b0010,4800 : 0b0011,9600 : 0b0100,19200: 0b0101,31250 : 0b0110, 38400 : 0b0111,57600 : 0b1000,115200 : 0b1010} if baudrate not in rates: raise ValueError("Invalid baudrate '%d'" % baudrate) self._sendCmd(chr(ord(Commands.UART_SET_BAUDRATE)|rates[baudrate]),Responses.OK) return True def uartSetPins(self,**kwargs): """Configure peripherals. @keyword power: defaults to 0 @keyword pullups: defaults to 0 @keyword aux: defaults to 0 @keyword cs: defaults to 0 @raise BusPirateError: if command fails. """ self._checkMode(Modes.UART) return self._setPins(**kwargs) def uartSetConfig(self,**kwargs): """Set UART configuration. @keyword output: 0 = HiZ, 1 = 3.3v, defaults to HiZ (0) @keyword databits: 8 or 9, defaults to 8 @keyword parity: 'N' (none) or 'E' (even) or 'O' (odd), defaults to 'N' @keyword stopbits: stop bits , defaults to 1 @keyword polarity: 0 = idle high, 1 = idle low, defaults to idle high (0) @raise BusPirateError: if command fails. @raise ValueError: if parameters are invalid. """ self._checkMode(Modes.UART) output = kwargs.pop("output",0) databits = kwargs.pop("databits",8) parity = kwargs.pop('parity','N').lower() stopbits = kwargs.pop("stopbits",1) polarity = kwargs.pop("polarity",0) #verify parameters if output != 0 and output != 1: raise ValueError("Output value is invalid") if stopbits != 0 and stopbits != 1: raise ValueError("Stopbits value is invalid") if polarity != 0 and polarity != 1: raise ValueError("Polarity is invalid") #combine databits and parity dp = 0 if parity not in 'eno': raise ValueError("Parity is invalid") if databits == 9: if parity == 'e': raise ValueError("Parity cannot be even if databits is 9") else: dp = 3 elif databits == 8: if parity == 'n': dp = 0 elif parity == 'e': dp = 1 elif parity == 'o': dp = 2 else: raise ValueError("Databits value is invalid.") self._sendCmd(chr(ord(Commands.UART_SET_CONFIG) | (output<<4) | (dp << 3) | (stopbits << 1) | polarity),Responses.OK) return True def uartBridgeMode(self): """Enters UART bridge mode. NOTE: Bridge mode cannot be exited programmatically, Bus Pirate has to be reseted manually. @return: a pyserial instance which can be used directly """ self._checkMode(Modes.UART) self._sendCmd(Commands.UART_ENTER_BRIDGE) return self.serial #} #{ I2C functions def i2cEnter(self): return self.enterMode("i2c") def i2cSendStart(self): self._checkMode(Modes.I2C) raise NotImplementedError def i2cSendStop(self): self._checkMode(Modes.I2C) raise NotImplementedError def i2cSendAck(self): self._checkMode(Modes.I2C) raise NotImplementedError def i2cSendNack(self): self._checkMode(Modes.I2C) raise NotImplementedError def i2cReadByte(self): self._checkMode(Modes.I2C) raise NotImplementedError def i2cStartSniffer(self): self._checkMode(Modes.I2C) raise NotImplementedError def i2cWrite(self,data): self._checkMode(Modes.I2C) raise NotImplementedError def i2cSetPins(self,**kwargs): """Configure peripherals. @keyword power: defaults to 0 @keyword pullups: defaults to 0 @keyword aux: defaults to 0 @keyword cs: defaults to 0 """ self._checkMode(Modes.I2C) return self._setPins(**kwargs) def i2cSetSpeed(self,speed=400): self._checkMode(Modes.I2C) raise NotImplementedError #} #{ SPI functions def spiEnter(self): return self.enterMode("spi") def spiStartSniffer(self): self._checkMode(Modes.SPI) raise NotImplementedError def spiSetSnifferMode(self,mode): self._checkMode(Modes.SPI) raise NotImplementedError def spiWrite(self,data): self._checkMode(Modes.SPI) raise NotImplementedError def spiWriteByte(self,data): self._checkMode(Modes.SPI) raise NotImplementedError def spiSetSpeed(self,speed=30): self._checkMode(Modes.SPI) raise NotImplementedError def spiGetSpeed(self): self._checkMode(Modes.SPI) raise NotImplementedError def spiSetPins(self,**kwargs): """Configure peripherals. @keyword power: defaults to 0 @keyword pullups: defaults to 0 @keyword aux: defaults to 0 @keyword cs: defaults to 0 """ self._checkMode(Modes.SPI) return self._setPins(**kwargs) def spiGetPins(self): self._checkMode(Modes.SPI) raise NotImplementedError def spiSetConfig(self,**kwargs): self._checkMode(Modes.SPI) raise NotImplementedError def spiGetConfig(self): self._checkMode(Modes.SPI) raise NotImplementedError #} #{ 1-wire functions def onewireEnter(self): self._checkMode(Modes.ONEWIRE) return self.enterMode("1wire") def onewireReset(self): self._checkMode(Modes.ONEWIRE) raise NotImplementedError def onewireReadByte(self): self._checkMode(Modes.ONEWIRE) raise NotImplementedError def onewireSearchRom(self): self._checkMode(Modes.ONEWIRE) raise NotImplementedError def onewireSearchAlarm(self): self._checkMode(Modes.ONEWIRE) raise NotImplementedError def onewireWrite(self,data): self._checkMode(Modes.ONEWIRE) raise NotImplementedError def onewireSetPins(self,**kwargs): """Configure peripherals. @keyword power: defaults to 0 @keyword pullups: defaults to 0 @keyword aux: defaults to 0 @keyword cs: defaults to 0 """ self._checkMode(Modes.ONEWIRE) return self._setPins(**kwargs) #} #{ RAW functions def rawEnter(self): return self.enterMode("raw") def rawSetCS(self,pin=0): self._checkMode(Modes.RAW) raise NotImplementedError def rawReadByte(self): self._checkMode(Modes.RAW) raise NotImplementedError def rawReadBit(self): self._checkMode(Modes.RAW) raise NotImplementedError def rawPeekInput(self): self._checkMode(Modes.RAW) raise NotImplementedError def rawClockTick(self,ticks=1): self._checkMode(Modes.RAW) raise NotImplementedError def rawSetClock(self,pin=0): self._checkMode(Modes.RAW) raise NotImplementedError def rawSetData(self,pin=0): self._checkMode(Modes.RAW) raise NotImplementedError def rawWrite(self,data): self._checkMode(Modes.RAW) raise NotImplementedError def rawSetPins(self,**kwargs): """Configure peripherals. @keyword power: defaults to 0 @keyword pullups: defaults to 0 @keyword aux: defaults to 0 @keyword cs: defaults to 0 """ self._checkMode(Modes.RAW) return self._setPins(**kwargs) def rawSetSpeed(self,speed=5): self._checkMode(Modes.RAW) raise NotImplementedError def rawSetConfig(self,**kwargs): self._checkMode(Modes.RAW) raise NotImplementedError #} #internal functions def _setPins(self,**kwargs): power = kwargs.pop("power",0) pullups = kwargs.pop("pullups",0) aux = kwargs.pop("aux",0) cs = kwargs.pop("cs",0) cmd = chr(ord(Commands.CONFIG_PINS) | (power << 3) | (pullups << 2) | (aux << 1) | cs) self._sendCmd(cmd,Responses.OK) return True def _enterBinaryMode(self,short=False): #From the manual: Send 0x00 to the user terminal 20 times to enter the raw binary bitbang mode. # One way to ensure that you're at the command line is to send <enter> at least 10 times, # and then send '#' to reset. Next, send 0x00 to the command line 20+ times # until you get the BBIOx version string. if short: self._sendCmd(*CommandResponsePairs.RESET_BINARY) else: try: #TODO: this is a very naive version,make it better for i in range(10): self._write('\n') time.sleep(0.001) self._write('#\n') time.sleep(0.001) for i in range(25): self._write(chr(0x0)) time.sleep(0.001) #read binary mode protocol version time.sleep(0.5) result = self._read(self._available()) if result and result.endswith(Responses.RESET_BINARY): return True raise BusPirateError("Failed to enter binary mode") except SerialError: raise BusPirateError("Serial exception raised while trying to enter binary mode") def _getResponse(self): return self.lastresponse def _sendCmd(self,cmd,expect=None): self._write(cmd) if expect: time.sleep(0.001) #maybe not necessary response = self._read(len(expect)) self.lastresponse = response if response != expect: raise CommandError("Sent command %s and expected return '%s' but received '%s'" % (hex(ord(cmd)),expect,response)) return True def _available(self): return self.serial.inWaiting() def _read(self,length): try: ret = "" i=0 while len(ret) < length and i < 10: ret += self.serial.read(length) i+=1 except: return None return ret def _write(self,data): try: ret = self.serial.write(data) except serial.SerialTimeoutException: raise SerialError("_write timeout") return ret def _checkMode(self,mode): if not self.mode or self.mode != mode: raise BusPirateError("Not in protocol mode '%s'" % mode) def _printhex(self,data): s = [hex(ord(x)) for x in data] print s #unittests if __name__ == '__main__': import unittest import re device = None #tests that use real device class BPTests(unittest.TestCase): def testEnterBinmode(self): bp = BusPirate(device) def testShortSelfTest(self): bp = BusPirate(device) self.assertTrue(bp.selfTest()) def testEnterInvalidMode(self): bp = BusPirate(device) self.assertRaises(BusPirateError,bp.enterMode,'foo') def testUartMode(self): bp = BusPirate(device) self.assertTrue(bp.enterMode('uart')) self.assertTrue(bp.uartSetPins(power=1)) self.assertTrue(bp.uartSetSpeed(300)) self.assertTrue(bp.uartSetSpeed(9600)) self.assertTrue(bp.uartSetConfig(output=1,parity='e')) self.assertRaises(BusPirateError,bp.uartSetConfig,output=1,parity='d') self.assertTrue(bp.uartSetConfig()) self.assertTrue(bp.uartSetPins()) self.assertTrue(bp.leaveMode()) self.assertTrue(bp.uartEnter()) def testSpiMode(self): bp = BusPirate(device) self.assertTrue(bp.enterMode("SPI")) self.assertTrue(bp.spiEnter()) self.assertTrue(bp.leaveMode()) def testI2CMode(self): bp = BusPirate(device) self.assertTrue(bp.enterMode("I2C")) self.assertTrue(bp.i2cEnter()) self.assertTrue(bp.leaveMode()) def test1WireMode(self): bp = BusPirate(device) self.assertTrue(bp.enterMode("1WIRE")) self.assertTrue(bp.onewireEnter()) self.assertTrue(bp.leaveMode()) def testRawMode(self): bp = BusPirate(device) self.assertTrue(bp.enterMode("RAW")) self.assertTrue(bp.rawEnter()) self.assertTrue(bp.leaveMode()) def testProbeVoltage(self): bp = BusPirate(device) self.assertEqual(0.0,bp.probeVoltage()) def testPWM(self): bp = BusPirate(device) self.assertTrue(bp.setPWM(0.5,2500)) self.assertTrue(bp.stopPWM()) def testConfigPins(self): bp = BusPirate(device) self.assertTrue(0b01010100,bp.configPins(aux=1,clk=1)) self.assertTrue(0b01000000,bp.configPins()) def testSetPins(self): bp = BusPirate(device) self.assertTrue(0b1110000,bp.setPins(power=1,pullup=1)) self.assertTrue(0b1000000,bp.setPins()) import re import os files = os.listdir("/dev") usbdevices = [] for f in files: if f.startswith("tty.usbserial"): usbdevices.append("/dev/"+f) if len(usbdevices) > 0: device = usbdevices[0] if len(usbdevices) > 1: print "Warning: multiple usbserial devices found, selecting %s" % device suite = unittest.TestLoader().loadTestsFromTestCase(BPTests) unittest.TextTestRunner(verbosity=2).run(suite) else: print "BusPirate not found, cannot run tests" # unittest.main()
gpl-3.0
facebook/buck
programs/test_buck_tool.py
5
7418
# Copyright (c) Facebook, Inc. and its affiliates. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import unittest from programs.buck_tool import BuckToolException, CommandLineArgs, MovableTemporaryFile class TestCommandLineArgs(unittest.TestCase): def test_empty_command(self): args = CommandLineArgs(["buck"]) self.assertEqual(args.command, None) self.assertEqual(args.buck_options, []) self.assertEqual(args.command_options, []) self.assertTrue(args.is_help(), "With no arguments should show help") def test_single_command(self): args = CommandLineArgs(["buck", "clean"]) self.assertEqual(args.command, "clean") self.assertEqual(args.buck_options, []) self.assertEqual(args.command_options, []) self.assertFalse(args.is_help()) def test_global_short_help(self): args = CommandLineArgs(["buck", "-h"]) self.assertEqual(args.command, None) self.assertEqual(args.buck_options, ["-h"]) self.assertEqual(args.command_options, []) self.assertTrue(args.is_help()) def test_global_help(self): args = CommandLineArgs(["buck", "--help"]) self.assertEqual(args.command, None) self.assertEqual(args.buck_options, ["--help"]) self.assertEqual(args.command_options, []) self.assertTrue(args.is_help()) def test_global_version(self): args = CommandLineArgs(["buck", "--version"]) self.assertEqual(args.command, None) self.assertEqual(args.buck_options, ["--version"]) self.assertEqual(args.command_options, []) self.assertTrue(args.is_help(), "--version does not require a build") self.assertTrue(args.is_version()) def test_command_help(self): args = CommandLineArgs(["buck", "clean", "--help"]) self.assertEqual(args.command, "clean") self.assertEqual(args.buck_options, []) self.assertEqual(args.command_options, ["--help"]) self.assertTrue(args.is_help()) def test_help_command(self): args = CommandLineArgs(["buck", "--help", "clean"]) self.assertEqual(args.command, "clean") self.assertEqual(args.buck_options, ["--help"]) self.assertEqual(args.command_options, []) self.assertTrue(args.is_help()) def test_short_help_before_command(self): args = CommandLineArgs(["buck", "-h", "clean"]) self.assertEqual(args.command, "clean") self.assertEqual(args.buck_options, ["-h"]) self.assertEqual(args.command_options, []) self.assertTrue(args.is_help()) def test_short_help_after_command(self): args = CommandLineArgs(["buck", "clean", "-h"]) self.assertEqual(args.command, "clean") self.assertEqual(args.buck_options, []) self.assertEqual(args.command_options, ["-h"]) self.assertTrue(args.is_help()) def test_short_help_after_external(self): args = CommandLineArgs(["buck", "test", "--", "-h"]) self.assertEqual(args.command, "test") self.assertEqual(args.buck_options, []) self.assertEqual(args.command_options, []) self.assertFalse(args.is_help()) def test_command_all(self): args = CommandLineArgs( ["buck", "--help", "--version", "clean", "--help", "all"] ) self.assertEqual(args.command, "clean") self.assertEqual(args.buck_options, ["--help", "--version"]) self.assertEqual(args.command_options, ["--help", "all"]) self.assertTrue(args.is_help()) def test_run_command(self): args = CommandLineArgs(["buck", "run", "--help"]) self.assertEqual(args.command, "run") self.assertEqual(args.buck_options, []) self.assertEqual(args.command_options, ["--help"]) self.assertTrue(args.is_help()) def test_run_command_help_for_program(self): args = CommandLineArgs(["buck", "run", "//some:cli", "--", "--help"]) self.assertEqual(args.command, "run") self.assertEqual(args.buck_options, []) self.assertEqual(args.command_options, ["//some:cli"]) self.assertFalse(args.is_help()) def test_run_command_help_for_program_and_buck(self): args = CommandLineArgs(["buck", "--help", "run", "//some:cli", "--", "--help"]) self.assertEqual(args.command, "run") self.assertEqual(args.buck_options, ["--help"]) self.assertEqual(args.command_options, ["//some:cli"]) self.assertTrue(args.is_help()) def test_run_command_help_for_program_and_command(self): args = CommandLineArgs(["buck", "run", "--help", "//some:cli", "--", "--help"]) self.assertEqual(args.command, "run") self.assertEqual(args.buck_options, []) self.assertEqual(args.command_options, ["--help", "//some:cli"]) self.assertTrue(args.is_help()) class TestMovableTemporaryFile(unittest.TestCase): def test_cleans_up_if_not_moved(self): path = None with MovableTemporaryFile() as f: f.close() path = f.name self.assertTrue(os.path.exists(path)) self.assertIsNotNone(path) self.assertFalse(os.path.exists(path)) def test_leaves_file_if_moved(self): path = None moved = None with MovableTemporaryFile() as f: f.close() path = f.name self.assertTrue(os.path.exists(path)) moved = f.move() try: self.assertIsNotNone(path) self.assertIsNotNone(moved) self.assertTrue(os.path.exists(path)) finally: if path and os.path.exists(path): os.unlink(path) def test_cleans_up_if_moved_context_is_entered(self): path = None moved = None path = None moved = None with MovableTemporaryFile() as f: f.close() path = f.name self.assertTrue(os.path.exists(path)) moved = f.move() try: with moved as f2: self.assertEquals(path, f2.file.name) self.assertTrue(os.path.exists(path)) self.assertFalse(os.path.exists(path)) finally: if path and os.path.exists(path): os.unlink(path) def test_close_and_name(self): with MovableTemporaryFile() as f: self.assertFalse(f.file.closed) f.close() self.assertTrue(f.file.closed) self.assertEquals(f.file.name, f.name) def test_handles_file_going_missing_while_entered(self): path = None with MovableTemporaryFile() as f: f.close() path = f.name self.assertTrue(os.path.exists(path)) os.unlink(path) self.assertFalse(os.path.exists(path)) if __name__ == "__main__": unittest.main()
apache-2.0
Novasoft-India/OperERP-AM-Motors
openerp/addons/marketing/__openerp__.py
55
1677
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Marketing', 'version': '1.1', 'depends': ['base', 'base_setup', 'crm'], 'author': 'OpenERP SA', 'category': 'Hidden/Dependency', 'description': """ Menu for Marketing. =================== Contains the installer for marketing-related modules. """, 'website': 'http://www.openerp.com', 'data': [ 'security/marketing_security.xml', 'security/ir.model.access.csv', 'marketing_view.xml', 'res_config_view.xml', ], 'demo': ['marketing_demo.xml'], 'installable': True, 'auto_install': False, 'images': ['images/config_marketing.jpeg'], } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
Davideddu/karaokivy
chardet/mbcsgroupprober.py
236
1889
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Universal charset detector code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 2001 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # Shy Shalom - original C code # Proofpoint, Inc. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from charsetgroupprober import CharSetGroupProber from utf8prober import UTF8Prober from sjisprober import SJISProber from eucjpprober import EUCJPProber from gb2312prober import GB2312Prober from euckrprober import EUCKRProber from big5prober import Big5Prober from euctwprober import EUCTWProber class MBCSGroupProber(CharSetGroupProber): def __init__(self): CharSetGroupProber.__init__(self) self._mProbers = [ \ UTF8Prober(), SJISProber(), EUCJPProber(), GB2312Prober(), EUCKRProber(), Big5Prober(), EUCTWProber()] self.reset()
gpl-3.0
Tritlo/Nannon
Board.py
1
5373
class Board: """Class that implements the Nannon Board""" board = None #: The current state of the board home = lambda self,x: 0 if x == -1 else 7 #: Function that says what is home for black and white safety = lambda self,x: 7 if x == -1 else 0 #: Function that states what is safety for black and white homes = {-1:1,1:1} #: How many checkers are in the home base of each side def __init__(self, board = [-1,-1,0,0,1,1], homes= {-1:1,1:1}): """ #Use: s = Board() #Pre: None #Post: s is a new Nannon game Board() """ self.board = board self.homes = homes def move(self,fr,to): """ #Use: b = n.move(f,t) #Pre: f,t are legal places to move a checker from and to on n which is a board #Post: a checker has been moved from f to t, b is True if the game is over """ co = self.color(fr) self.changeFrom(fr) self.changeTo(to,co) return self.checkWin() def changeFrom(self,fr): """ #Use: s.changeFrom(f) #Pre: s is a board, f is a legal place to move from #Post: The checker has been moved from from """ co = self.color(fr) ho = self.home(co) if fr == ho and self.homes[co] > 0: self.homes[co] = self.homes[co]-1 else: self.board[fr-1] = 0 def changeTo(self,to,col): """ #Use: s.changeTo(t) #Pre: s is a Board, t is a legal place to move to, col is the color of the checker that is being moved #Post: The checker of color col has been moved to to """ sa = self.safety(col) if -1*col*to >= sa: pass else: if self.board[to-1] != 0: self.homes[self.board[to-1]] = self.homes[self.board[to-1]] + 1 self.board[to-1] = col def checkWin(self): """ #Use: b = s.checkWin() #Pre: s is a board #Post: b True if the game is over else false """ if (-1 not in self.board and self.homes[-1] == 0): return True if (1 not in self.board and self.homes[1] == 0): return True return False def color(self,checker): """ #Use: c = s.color(ch) #Pre: s is a board, ch i a legal checker #Post: c is the color of the checker on ch, 0 if no checker there """ if checker == 0 or checker == 7: return -1 if checker == 0 else 1 else: return self.board[checker-1] def prime(self,fr): """ #Use: b = s.prime(ch) #Pre: s is a board, ch i a legal checker #Post: b is true if the checker on fr is in a prime position, false otherwise. false for no checker. """ if fr not in range(0,8): return False co = self.color(fr) if co == 0: return False sa = self.safety(co) ho = self.home(co) if fr == sa: return False if fr == ho: return (self.color(abs(ho-1)) == co) and (self.homes[co] > 0) return fr == abs(ho-1) and (self.homes[co] > 0 or self.color(abs(ho-2)) == co) or self.color(fr-1) == co or self.color(fr+1) == co def validTo(self,to,col): """ #Use: b = s.validTo(ch) #Pre: s is a board, ch is a legal checker #Post: b is true if it is valid to move a checker of color col to to. """ sa = self.safety(col) ho = self.home(col) #if to not in range(0,8): # return False if -1*col*to >= sa: #White: if to >= 7, black if -to >= 0 <=> to <= 0 return True if to == ho or to == sa: return (not self.prime(to)) if (self.board[to-1] != col and not self.prime(to)): return True return False def __str__(self): """ #Use: k = str(s) #Pre: s is a board, #Post: k is a string representing the board """ iTC = lambda x: "w" if x == -1 else " " if x == 0 else "b" printList = ["w" if self.homes[-1] >=1 else " "] + map(iTC, self.board)+ [ "b" if self.homes[1] >=1 else " "] r = "" r = r + " %s | | | | | | %s \n" % ("w" if self.homes[-1] >=3 else " ","b" if self.homes[1] ==3 else " ") r = r + " %s / \ /:\ / \ /:\ / \ /:\ %s \n" % ("w" if self.homes[-1] >=2 else " ","b" if self.homes[1] >=2 else " ") r = r + " %s / %s \/:%s:\/ %s \/:%s:\/ %s \/:%s:\ %s \n" % tuple(printList) r = r + " W 1 2 3 4 5 6 B " return r def validMoves(self,roll,col): """ #Use: b = s.validMoves(r,c) #Pre: s is a board, r is a integer, c is a color #Post: b is a list of valid moves on the board for the color c with the roll r, on the format (from,to) """ val = [] if self.homes[col] > 0: if self.validTo(self.home(col) - col*roll,col): val.append((self.home(col),self.home(col) - col*roll)) for i,c in enumerate(self.board): if c != col: continue if self.validTo(i+1 - col*roll,col): val.append((i+1,i+1 - col*roll)) return val
gpl-3.0
douggeiger/gnuradio
gnuradio-runtime/python/gnuradio/gru/msgq_runner.py
94
2529
# # Copyright 2009 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # """ Convenience class for dequeuing messages from a gr.msg_queue and invoking a callback. Creates a Python thread that does a blocking read on the supplied gr.msg_queue, then invokes callback each time a msg is received. If the msg type is not 0, then it is treated as a signal to exit its loop. If the callback raises an exception, and the runner was created with 'exit_on_error' equal to True, then the runner will store the exception and exit its loop, otherwise the exception is ignored. To get the exception that the callback raised, if any, call exit_error() on the object. To manually stop the runner, call stop() on the object. To determine if the runner has exited, call exited() on the object. """ from gnuradio import gr import gnuradio.gr.gr_threading as _threading class msgq_runner(_threading.Thread): def __init__(self, msgq, callback, exit_on_error=False): _threading.Thread.__init__(self) self._msgq = msgq self._callback = callback self._exit_on_error = exit_on_error self._done = False self._exited = False self._exit_error = None self.setDaemon(1) self.start() def run(self): while not self._done: msg = self._msgq.delete_head() if msg.type() != 0: self.stop() else: try: self._callback(msg) except Exception, e: if self._exit_on_error: self._exit_error = e self.stop() self._exited = True def stop(self): self._done = True def exited(self): return self._exited def exit_error(self): return self._exit_error
gpl-3.0
sandan/sqlalchemy
test/orm/test_lazy_relations.py
10
35919
"""basic tests of lazy loaded attributes""" from sqlalchemy.testing import assert_raises import datetime from sqlalchemy.orm import attributes, exc as orm_exc, configure_mappers import sqlalchemy as sa from sqlalchemy import testing, and_ from sqlalchemy import Integer, String, ForeignKey, SmallInteger, Boolean from sqlalchemy.types import TypeDecorator from sqlalchemy.testing.schema import Table from sqlalchemy.testing.schema import Column from sqlalchemy import orm from sqlalchemy.orm import mapper, relationship, create_session, Session from sqlalchemy.testing import eq_ from sqlalchemy.testing import fixtures from test.orm import _fixtures from sqlalchemy.testing.assertsql import CompiledSQL class LazyTest(_fixtures.FixtureTest): run_inserts = 'once' run_deletes = None def test_basic(self): users, Address, addresses, User = ( self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User) mapper(User, users, properties={ 'addresses': relationship( mapper(Address, addresses), lazy='select') }) sess = create_session() q = sess.query(User) eq_( [User(id=7, addresses=[Address(id=1, email_address='jack@bean.com')])], q.filter(users.c.id == 7).all() ) def test_needs_parent(self): """test the error raised when parent object is not bound.""" users, Address, addresses, User = ( self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User) mapper(User, users, properties={ 'addresses': relationship( mapper(Address, addresses), lazy='select') }) sess = create_session() q = sess.query(User) u = q.filter(users.c.id == 7).first() sess.expunge(u) assert_raises(orm_exc.DetachedInstanceError, getattr, u, 'addresses') def test_orderby(self): users, Address, addresses, User = ( self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User) mapper(User, users, properties={ 'addresses': relationship( mapper(Address, addresses), lazy='select', order_by=addresses.c.email_address), }) q = create_session().query(User) assert [ User(id=7, addresses=[ Address(id=1) ]), User(id=8, addresses=[ Address(id=3, email_address='ed@bettyboop.com'), Address(id=4, email_address='ed@lala.com'), Address(id=2, email_address='ed@wood.com') ]), User(id=9, addresses=[ Address(id=5) ]), User(id=10, addresses=[]) ] == q.all() def test_orderby_secondary(self): """tests that a regular mapper select on a single table can order by a relationship to a second table""" Address, addresses, users, User = ( self.classes.Address, self.tables.addresses, self.tables.users, self.classes.User) mapper(Address, addresses) mapper(User, users, properties=dict( addresses=relationship(Address, lazy='select'), )) q = create_session().query(User) l = q.filter(users.c.id == addresses.c.user_id).\ order_by(addresses.c.email_address).all() assert [ User(id=8, addresses=[ Address(id=2, email_address='ed@wood.com'), Address(id=3, email_address='ed@bettyboop.com'), Address(id=4, email_address='ed@lala.com'), ]), User(id=9, addresses=[ Address(id=5) ]), User(id=7, addresses=[ Address(id=1) ]), ] == l def test_orderby_desc(self): Address, addresses, users, User = ( self.classes.Address, self.tables.addresses, self.tables.users, self.classes.User) mapper(Address, addresses) mapper(User, users, properties=dict( addresses=relationship( Address, lazy='select', order_by=[sa.desc(addresses.c.email_address)]), )) sess = create_session() assert [ User(id=7, addresses=[ Address(id=1) ]), User(id=8, addresses=[ Address(id=2, email_address='ed@wood.com'), Address(id=4, email_address='ed@lala.com'), Address(id=3, email_address='ed@bettyboop.com'), ]), User(id=9, addresses=[ Address(id=5) ]), User(id=10, addresses=[]) ] == sess.query(User).all() def test_no_orphan(self): """test that a lazily loaded child object is not marked as an orphan""" users, Address, addresses, User = ( self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User) mapper(User, users, properties={ 'addresses': relationship( Address, cascade="all,delete-orphan", lazy='select') }) mapper(Address, addresses) sess = create_session() user = sess.query(User).get(7) assert getattr(User, 'addresses').hasparent( attributes.instance_state(user.addresses[0]), optimistic=True) assert not sa.orm.class_mapper(Address)._is_orphan( attributes.instance_state(user.addresses[0])) def test_limit(self): """test limit operations combined with lazy-load relationships.""" users, items, order_items, orders, Item, \ User, Address, Order, addresses = ( self.tables.users, self.tables.items, self.tables.order_items, self.tables.orders, self.classes.Item, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses) mapper(Item, items) mapper(Order, orders, properties={ 'items': relationship(Item, secondary=order_items, lazy='select') }) mapper(User, users, properties={ 'addresses': relationship( mapper(Address, addresses), lazy='select'), 'orders': relationship(Order, lazy='select') }) sess = create_session() q = sess.query(User) if testing.against('mssql'): l = q.limit(2).all() assert self.static.user_all_result[:2] == l else: l = q.limit(2).offset(1).all() assert self.static.user_all_result[1:3] == l def test_distinct(self): users, items, order_items, orders, \ Item, User, Address, Order, addresses = ( self.tables.users, self.tables.items, self.tables.order_items, self.tables.orders, self.classes.Item, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses) mapper(Item, items) mapper(Order, orders, properties={ 'items': relationship(Item, secondary=order_items, lazy='select') }) mapper(User, users, properties={ 'addresses': relationship( mapper(Address, addresses), lazy='select'), 'orders': relationship(Order, lazy='select') }) sess = create_session() q = sess.query(User) # use a union all to get a lot of rows to join against u2 = users.alias('u2') s = sa.union_all( u2.select(use_labels=True), u2.select(use_labels=True), u2.select(use_labels=True)).alias('u') l = q.filter(s.c.u2_id == User.id).order_by(User.id).distinct().all() eq_(self.static.user_all_result, l) def test_uselist_false_warning(self): """test that multiple rows received by a uselist=False raises a warning.""" User, users, orders, Order = ( self.classes.User, self.tables.users, self.tables.orders, self.classes.Order) mapper(User, users, properties={ 'order': relationship(Order, uselist=False) }) mapper(Order, orders) s = create_session() u1 = s.query(User).filter(User.id == 7).one() assert_raises(sa.exc.SAWarning, getattr, u1, 'order') def test_one_to_many_scalar(self): Address, addresses, users, User = ( self.classes.Address, self.tables.addresses, self.tables.users, self.classes.User) mapper(User, users, properties=dict( address=relationship( mapper(Address, addresses), lazy='select', uselist=False) )) q = create_session().query(User) l = q.filter(users.c.id == 7).all() assert [User(id=7, address=Address(id=1))] == l def test_many_to_one_binds(self): Address, addresses, users, User = ( self.classes.Address, self.tables.addresses, self.tables.users, self.classes.User) mapper(Address, addresses, primary_key=[addresses.c.user_id, addresses.c.email_address]) mapper(User, users, properties=dict( address=relationship( Address, uselist=False, primaryjoin=sa.and_( users.c.id == addresses.c.user_id, addresses.c.email_address == 'ed@bettyboop.com')) )) q = create_session().query(User) eq_( [ User(id=7, address=None), User(id=8, address=Address(id=3)), User(id=9, address=None), User(id=10, address=None), ], list(q) ) def test_double(self): """tests lazy loading with two relationships simulatneously, from the same table, using aliases. """ users, orders, User, Address, Order, addresses = ( self.tables.users, self.tables.orders, self.classes.User, self.classes.Address, self.classes.Order, self.tables.addresses) openorders = sa.alias(orders, 'openorders') closedorders = sa.alias(orders, 'closedorders') mapper(Address, addresses) mapper(Order, orders) open_mapper = mapper(Order, openorders, non_primary=True) closed_mapper = mapper(Order, closedorders, non_primary=True) mapper(User, users, properties=dict( addresses=relationship(Address, lazy=True), open_orders=relationship( open_mapper, primaryjoin=sa.and_( openorders.c.isopen == 1, users.c.id == openorders.c.user_id), lazy='select'), closed_orders=relationship( closed_mapper, primaryjoin=sa.and_( closedorders.c.isopen == 0, users.c.id == closedorders.c.user_id), lazy='select') )) q = create_session().query(User) assert [ User( id=7, addresses=[Address(id=1)], open_orders=[Order(id=3)], closed_orders=[Order(id=1), Order(id=5)] ), User( id=8, addresses=[Address(id=2), Address(id=3), Address(id=4)], open_orders=[], closed_orders=[] ), User( id=9, addresses=[Address(id=5)], open_orders=[Order(id=4)], closed_orders=[Order(id=2)] ), User(id=10) ] == q.all() sess = create_session() user = sess.query(User).get(7) eq_( [Order(id=1), Order(id=5)], create_session().query(closed_mapper).with_parent( user, property='closed_orders').all() ) eq_( [Order(id=3)], create_session().query(open_mapper). with_parent(user, property='open_orders').all() ) def test_many_to_many(self): keywords, items, item_keywords, Keyword, Item = ( self.tables.keywords, self.tables.items, self.tables.item_keywords, self.classes.Keyword, self.classes.Item) mapper(Keyword, keywords) mapper(Item, items, properties=dict( keywords=relationship( Keyword, secondary=item_keywords, lazy='select'), )) q = create_session().query(Item) assert self.static.item_keyword_result == q.all() eq_( self.static.item_keyword_result[0:2], q.join('keywords').filter(keywords.c.name == 'red').all() ) def test_uses_get(self): """test that a simple many-to-one lazyload optimizes to use query.get().""" Address, addresses, users, User = ( self.classes.Address, self.tables.addresses, self.tables.users, self.classes.User) for pj in ( None, users.c.id == addresses.c.user_id, addresses.c.user_id == users.c.id ): mapper(Address, addresses, properties=dict( user=relationship( mapper(User, users), lazy='select', primaryjoin=pj) )) sess = create_session() # load address a1 = sess.query(Address).\ filter_by(email_address="ed@wood.com").one() # load user that is attached to the address u1 = sess.query(User).get(8) def go(): # lazy load of a1.user should get it from the session assert a1.user is u1 self.assert_sql_count(testing.db, go, 0) sa.orm.clear_mappers() def test_uses_get_compatible_types(self): """test the use_get optimization with compatible but non-identical types""" User, Address = self.classes.User, self.classes.Address class IntDecorator(TypeDecorator): impl = Integer class SmallintDecorator(TypeDecorator): impl = SmallInteger class SomeDBInteger(sa.Integer): pass for tt in [ Integer, SmallInteger, IntDecorator, SmallintDecorator, SomeDBInteger, ]: m = sa.MetaData() users = Table( 'users', m, Column( 'id', Integer, primary_key=True, test_needs_autoincrement=True), Column('name', String(30), nullable=False), ) addresses = Table( 'addresses', m, Column( 'id', Integer, primary_key=True, test_needs_autoincrement=True), Column('user_id', tt, ForeignKey('users.id')), Column('email_address', String(50), nullable=False), ) mapper(Address, addresses, properties=dict( user=relationship(mapper(User, users)) )) sess = create_session(bind=testing.db) # load address a1 = sess.query(Address).\ filter_by(email_address="ed@wood.com").one() # load user that is attached to the address u1 = sess.query(User).get(8) def go(): # lazy load of a1.user should get it from the session assert a1.user is u1 self.assert_sql_count(testing.db, go, 0) sa.orm.clear_mappers() def test_many_to_one(self): users, Address, addresses, User = ( self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User) mapper(Address, addresses, properties=dict( user=relationship(mapper(User, users), lazy='select') )) sess = create_session() q = sess.query(Address) a = q.filter(addresses.c.id == 1).one() assert a.user is not None u1 = sess.query(User).get(7) assert a.user is u1 def test_backrefs_dont_lazyload(self): users, Address, addresses, User = ( self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User) mapper(User, users, properties={ 'addresses': relationship(Address, backref='user') }) mapper(Address, addresses) sess = create_session() ad = sess.query(Address).filter_by(id=1).one() assert ad.user.id == 7 def go(): ad.user = None assert ad.user is None self.assert_sql_count(testing.db, go, 0) u1 = sess.query(User).filter_by(id=7).one() def go(): assert ad not in u1.addresses self.assert_sql_count(testing.db, go, 1) sess.expire(u1, ['addresses']) def go(): assert ad in u1.addresses self.assert_sql_count(testing.db, go, 1) sess.expire(u1, ['addresses']) ad2 = Address() def go(): ad2.user = u1 assert ad2.user is u1 self.assert_sql_count(testing.db, go, 0) def go(): assert ad2 in u1.addresses self.assert_sql_count(testing.db, go, 1) class GetterStateTest(_fixtures.FixtureTest): """test lazyloader on non-existent attribute returns expected attribute symbols, maintain expected state""" run_inserts = None def _unhashable_fixture(self, metadata, load_on_pending=False): class MyHashType(sa.TypeDecorator): impl = sa.String(100) def process_bind_param(self, value, dialect): return ";".join( "%s=%s" % (k, v) for k, v in sorted(value.items(), key=lambda key: key[0])) def process_result_value(self, value, dialect): return dict(elem.split("=", 1) for elem in value.split(";")) category = Table( 'category', metadata, Column('id', Integer, primary_key=True), Column('data', MyHashType()) ) article = Table( 'article', metadata, Column('id', Integer, primary_key=True), Column('data', MyHashType()) ) class Category(fixtures.ComparableEntity): pass class Article(fixtures.ComparableEntity): pass mapper(Category, category) mapper(Article, article, properties={ "category": relationship( Category, primaryjoin=orm.foreign(article.c.data) == category.c.data, load_on_pending=load_on_pending ) }) metadata.create_all() sess = Session(autoflush=False) data = {"im": "unhashable"} a1 = Article(id=1, data=data) c1 = Category(id=1, data=data) if load_on_pending: sess.add(c1) else: sess.add_all([c1, a1]) sess.flush() if load_on_pending: sess.add(a1) return Category, Article, sess, a1, c1 def _u_ad_fixture(self, populate_user, dont_use_get=False): users, Address, addresses, User = ( self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User) mapper(User, users, properties={ 'addresses': relationship(Address, back_populates='user') }) mapper(Address, addresses, properties={ 'user': relationship( User, primaryjoin=and_( users.c.id == addresses.c.user_id, users.c.id != 27) if dont_use_get else None, back_populates='addresses' ) }) sess = create_session() a1 = Address(email_address='a1') sess.add(a1) if populate_user: a1.user = User(name='ed') sess.flush() if populate_user: sess.expire_all() return User, Address, sess, a1 def test_no_use_get_params_missing(self): User, Address, sess, a1 = self._u_ad_fixture(False, True) def go(): eq_(a1.user, None) # doesn't emit SQL self.assert_sql_count( testing.db, go, 0 ) @testing.provide_metadata def test_no_use_get_params_not_hashable(self): Category, Article, sess, a1, c1 = \ self._unhashable_fixture(self.metadata) def go(): eq_(a1.category, c1) self.assert_sql_count( testing.db, go, 1 ) @testing.provide_metadata def test_no_use_get_params_not_hashable_on_pending(self): Category, Article, sess, a1, c1 = \ self._unhashable_fixture(self.metadata, load_on_pending=True) def go(): eq_(a1.category, c1) self.assert_sql_count( testing.db, go, 1 ) def test_get_empty_passive_return_never_set(self): User, Address, sess, a1 = self._u_ad_fixture(False) eq_( Address.user.impl.get( attributes.instance_state(a1), attributes.instance_dict(a1), passive=attributes.PASSIVE_RETURN_NEVER_SET), attributes.NEVER_SET ) assert 'user_id' not in a1.__dict__ assert 'user' not in a1.__dict__ def test_history_empty_passive_return_never_set(self): User, Address, sess, a1 = self._u_ad_fixture(False) eq_( Address.user.impl.get_history( attributes.instance_state(a1), attributes.instance_dict(a1), passive=attributes.PASSIVE_RETURN_NEVER_SET), ((), (), ()) ) assert 'user_id' not in a1.__dict__ assert 'user' not in a1.__dict__ def test_get_empty_passive_no_initialize(self): User, Address, sess, a1 = self._u_ad_fixture(False) eq_( Address.user.impl.get( attributes.instance_state(a1), attributes.instance_dict(a1), passive=attributes.PASSIVE_NO_INITIALIZE), attributes.PASSIVE_NO_RESULT ) assert 'user_id' not in a1.__dict__ assert 'user' not in a1.__dict__ def test_history_empty_passive_no_initialize(self): User, Address, sess, a1 = self._u_ad_fixture(False) eq_( Address.user.impl.get_history( attributes.instance_state(a1), attributes.instance_dict(a1), passive=attributes.PASSIVE_NO_INITIALIZE), attributes.HISTORY_BLANK ) assert 'user_id' not in a1.__dict__ assert 'user' not in a1.__dict__ def test_get_populated_passive_no_initialize(self): User, Address, sess, a1 = self._u_ad_fixture(True) eq_( Address.user.impl.get( attributes.instance_state(a1), attributes.instance_dict(a1), passive=attributes.PASSIVE_NO_INITIALIZE), attributes.PASSIVE_NO_RESULT ) assert 'user_id' not in a1.__dict__ assert 'user' not in a1.__dict__ def test_history_populated_passive_no_initialize(self): User, Address, sess, a1 = self._u_ad_fixture(True) eq_( Address.user.impl.get_history( attributes.instance_state(a1), attributes.instance_dict(a1), passive=attributes.PASSIVE_NO_INITIALIZE), attributes.HISTORY_BLANK ) assert 'user_id' not in a1.__dict__ assert 'user' not in a1.__dict__ def test_get_populated_passive_return_never_set(self): User, Address, sess, a1 = self._u_ad_fixture(True) eq_( Address.user.impl.get( attributes.instance_state(a1), attributes.instance_dict(a1), passive=attributes.PASSIVE_RETURN_NEVER_SET), User(name='ed') ) def test_history_populated_passive_return_never_set(self): User, Address, sess, a1 = self._u_ad_fixture(True) eq_( Address.user.impl.get_history( attributes.instance_state(a1), attributes.instance_dict(a1), passive=attributes.PASSIVE_RETURN_NEVER_SET), ((), [User(name='ed'), ], ()) ) class M2OGetTest(_fixtures.FixtureTest): run_inserts = 'once' run_deletes = None def test_m2o_noload(self): """test that a NULL foreign key doesn't trigger a lazy load""" users, Address, addresses, User = ( self.tables.users, self.classes.Address, self.tables.addresses, self.classes.User) mapper(User, users) mapper(Address, addresses, properties={ 'user': relationship(User) }) sess = create_session() ad1 = Address(email_address='somenewaddress', id=12) sess.add(ad1) sess.flush() sess.expunge_all() ad2 = sess.query(Address).get(1) ad3 = sess.query(Address).get(ad1.id) def go(): # one lazy load assert ad2.user.name == 'jack' # no lazy load assert ad3.user is None self.assert_sql_count(testing.db, go, 1) class CorrelatedTest(fixtures.MappedTest): @classmethod def define_tables(self, meta): Table('user_t', meta, Column('id', Integer, primary_key=True), Column('name', String(50))) Table('stuff', meta, Column('id', Integer, primary_key=True), Column('date', sa.Date), Column('user_id', Integer, ForeignKey('user_t.id'))) @classmethod def insert_data(cls): stuff, user_t = cls.tables.stuff, cls.tables.user_t user_t.insert().execute( {'id': 1, 'name': 'user1'}, {'id': 2, 'name': 'user2'}, {'id': 3, 'name': 'user3'}) stuff.insert().execute( {'id': 1, 'user_id': 1, 'date': datetime.date(2007, 10, 15)}, {'id': 2, 'user_id': 1, 'date': datetime.date(2007, 12, 15)}, {'id': 3, 'user_id': 1, 'date': datetime.date(2007, 11, 15)}, {'id': 4, 'user_id': 2, 'date': datetime.date(2008, 1, 15)}, {'id': 5, 'user_id': 3, 'date': datetime.date(2007, 6, 15)}) def test_correlated_lazyload(self): stuff, user_t = self.tables.stuff, self.tables.user_t class User(fixtures.ComparableEntity): pass class Stuff(fixtures.ComparableEntity): pass mapper(Stuff, stuff) stuff_view = sa.select([stuff.c.id]).\ where(stuff.c.user_id == user_t.c.id).correlate(user_t).\ order_by(sa.desc(stuff.c.date)).limit(1) mapper(User, user_t, properties={ 'stuff': relationship( Stuff, primaryjoin=sa.and_( user_t.c.id == stuff.c.user_id, stuff.c.id == (stuff_view.as_scalar()))) }) sess = create_session() eq_( sess.query(User).all(), [ User( name='user1', stuff=[Stuff(date=datetime.date(2007, 12, 15), id=2)]), User( name='user2', stuff=[Stuff(id=4, date=datetime.date(2008, 1, 15))]), User( name='user3', stuff=[Stuff(id=5, date=datetime.date(2007, 6, 15))]) ] ) class O2MWOSideFixedTest(fixtures.MappedTest): # test #2948 - o2m backref with a "m2o does/does not count" # criteria doesn't scan the "o" table @classmethod def define_tables(self, meta): Table('city', meta, Column('id', Integer, primary_key=True), Column('deleted', Boolean), ) Table('person', meta, Column('id', Integer, primary_key=True), Column('city_id', ForeignKey('city.id')) ) @classmethod def setup_classes(cls): class Person(cls.Basic): pass class City(cls.Basic): pass @classmethod def setup_mappers(cls): Person, City = cls.classes.Person, cls.classes.City city, person = cls.tables.city, cls.tables.person mapper(Person, person, properties={ 'city': relationship(City, primaryjoin=and_( person.c.city_id == city.c.id, city.c.deleted == False), backref='people' ) }) mapper(City, city) def _fixture(self, include_other): city, person = self.tables.city, self.tables.person if include_other: city.insert().execute( {"id": 1, "deleted": False}, ) person.insert().execute( {"id": 1, "city_id": 1}, {"id": 2, "city_id": 1}, ) city.insert().execute( {"id": 2, "deleted": True}, ) person.insert().execute( {"id": 3, "city_id": 2}, {"id": 4, "city_id": 2}, ) def test_lazyload_assert_expected_sql(self): self._fixture(True) City = self.classes.City sess = Session(testing.db) c1, c2 = sess.query(City).order_by(City.id).all() def go(): eq_( [p.id for p in c2.people], [] ) self.assert_sql_execution( testing.db, go, CompiledSQL( "SELECT person.id AS person_id, person.city_id AS " "person_city_id FROM person " "WHERE person.city_id = :param_1 AND :param_2 = 0", {"param_1": 2, "param_2": 1} ) ) def test_lazyload_people_other_exists(self): self._fixture(True) City = self.classes.City sess = Session(testing.db) c1, c2 = sess.query(City).order_by(City.id).all() eq_( [p.id for p in c1.people], [1, 2] ) eq_( [p.id for p in c2.people], [] ) def test_lazyload_people_no_other_exists(self): # note that if we revert #2948, *this still passes!* # e.g. due to the scan of the "o" table, whether or not *another* # row exists determines if this works. self._fixture(False) City = self.classes.City sess = Session(testing.db) c2, = sess.query(City).order_by(City.id).all() eq_( [p.id for p in c2.people], [] ) class RefersToSelfLazyLoadInterferenceTest(fixtures.MappedTest): """Test [issue:3145]. This involves an object that refers to itself, which isn't entirely a supported use case. Here, we're able to fix it, but long term it's not clear if future needs will affect this. The use case is not super-critical. """ @classmethod def define_tables(cls, metadata): Table( 'a', metadata, Column('a_id', Integer, primary_key=True), Column('b_id', ForeignKey('b.b_id')), ) Table( 'b', metadata, Column('b_id', Integer, primary_key=True), Column('parent_id', ForeignKey('b.b_id')), ) Table( 'c', metadata, Column('c_id', Integer, primary_key=True), Column('b_id', ForeignKey('b.b_id')), ) @classmethod def setup_classes(cls): class A(cls.Basic): pass class B(cls.Basic): pass class C(cls.Basic): pass @classmethod def setup_mappers(cls): mapper(cls.classes.A, cls.tables.a, properties={ "b": relationship(cls.classes.B) }) bm = mapper(cls.classes.B, cls.tables.b, properties={ "parent": relationship( cls.classes.B, remote_side=cls.tables.b.c.b_id), "zc": relationship(cls.classes.C) }) mapper(cls.classes.C, cls.tables.c) bmp = bm._props configure_mappers() # Bug is order-dependent, must sort the "zc" property to the end bmp.sort() def test_lazy_doesnt_interfere(self): A, B, C = self.classes("A", "B", "C") session = Session() b = B() session.add(b) session.flush() b.parent_id = b.b_id b.zc.append(C()) b.zc.append(C()) session.commit() # If the bug is here, the next line throws an exception session.query(B).options( sa.orm.joinedload('parent').joinedload('zc')).all() class TypeCoerceTest(fixtures.MappedTest, testing.AssertsExecutionResults,): """ORM-level test for [ticket:3531]""" # mysql is having a recursion issue in the bind_expression __only_on__ = ('sqlite', 'postgresql') class StringAsInt(TypeDecorator): impl = String(50) def column_expression(self, col): return sa.cast(col, Integer) def bind_expression(self, col): return sa.cast(col, String) @classmethod def define_tables(cls, metadata): Table( 'person', metadata, Column("id", cls.StringAsInt, primary_key=True), ) Table( "pets", metadata, Column("id", Integer, primary_key=True), Column("person_id", Integer), ) @classmethod def setup_classes(cls): class Person(cls.Basic): pass class Pet(cls.Basic): pass @classmethod def setup_mappers(cls): mapper(cls.classes.Person, cls.tables.person, properties=dict( pets=relationship( cls.classes.Pet, primaryjoin=( orm.foreign(cls.tables.pets.c.person_id) == sa.cast( sa.type_coerce(cls.tables.person.c.id, Integer), Integer ) ) ) )) mapper(cls.classes.Pet, cls.tables.pets) def test_lazyload_singlecast(self): Person = self.classes.Person Pet = self.classes.Pet s = Session() s.add_all([ Person(id=5), Pet(id=1, person_id=5) ]) s.commit() p1 = s.query(Person).first() with self.sql_execution_asserter() as asserter: p1.pets asserter.assert_( CompiledSQL( "SELECT pets.id AS pets_id, pets.person_id " "AS pets_person_id FROM pets " "WHERE pets.person_id = CAST(:param_1 AS INTEGER)", [{'param_1': 5}] ) )
mit
asadziach/tensorflow
tensorflow/python/kernel_tests/string_to_hash_bucket_op_test.py
134
4034
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for StringToHashBucket op from string_ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.ops import array_ops from tensorflow.python.ops import string_ops from tensorflow.python.platform import test class StringToHashBucketOpTest(test.TestCase): def testStringToOneHashBucketFast(self): with self.test_session(): input_string = array_ops.placeholder(dtypes.string) output = string_ops.string_to_hash_bucket_fast(input_string, 1) result = output.eval(feed_dict={input_string: ['a', 'b', 'c']}) self.assertAllEqual([0, 0, 0], result) def testStringToHashBucketsFast(self): with self.test_session(): input_string = array_ops.placeholder(dtypes.string) output = string_ops.string_to_hash_bucket_fast(input_string, 10) result = output.eval(feed_dict={input_string: ['a', 'b', 'c', 'd']}) # Fingerprint64('a') -> 12917804110809363939 -> mod 10 -> 9 # Fingerprint64('b') -> 11795596070477164822 -> mod 10 -> 2 # Fingerprint64('c') -> 11430444447143000872 -> mod 10 -> 2 # Fingerprint64('d') -> 4470636696479570465 -> mod 10 -> 5 self.assertAllEqual([9, 2, 2, 5], result) def testStringToOneHashBucketLegacyHash(self): with self.test_session(): input_string = array_ops.placeholder(dtypes.string) output = string_ops.string_to_hash_bucket(input_string, 1) result = output.eval(feed_dict={input_string: ['a', 'b', 'c']}) self.assertAllEqual([0, 0, 0], result) def testStringToHashBucketsLegacyHash(self): with self.test_session(): input_string = array_ops.placeholder(dtypes.string) output = string_ops.string_to_hash_bucket(input_string, 10) result = output.eval(feed_dict={input_string: ['a', 'b', 'c']}) # Hash64('a') -> 2996632905371535868 -> mod 10 -> 8 # Hash64('b') -> 5795986006276551370 -> mod 10 -> 0 # Hash64('c') -> 14899841994519054197 -> mod 10 -> 7 self.assertAllEqual([8, 0, 7], result) def testStringToOneHashBucketStrongOneHashBucket(self): with self.test_session(): input_string = constant_op.constant(['a', 'b', 'c']) output = string_ops.string_to_hash_bucket_strong( input_string, 1, key=[123, 345]) self.assertAllEqual([0, 0, 0], output.eval()) def testStringToHashBucketsStrong(self): with self.test_session(): input_string = constant_op.constant(['a', 'b', 'c']) output = string_ops.string_to_hash_bucket_strong( input_string, 10, key=[98765, 132]) # key = [98765, 132] # StrongKeyedHash(key, 'a') -> 7157389809176466784 -> mod 10 -> 4 # StrongKeyedHash(key, 'b') -> 15805638358933211562 -> mod 10 -> 2 # StrongKeyedHash(key, 'c') -> 18100027895074076528 -> mod 10 -> 8 self.assertAllEqual([4, 2, 8], output.eval()) def testStringToHashBucketsStrongInvalidKey(self): with self.test_session(): input_string = constant_op.constant(['a', 'b', 'c']) with self.assertRaisesOpError('Key must have 2 elements'): string_ops.string_to_hash_bucket_strong( input_string, 10, key=[98765]).eval() if __name__ == '__main__': test.main()
apache-2.0
omnirom/android_external_chromium-org
third_party/cython/src/Cython/Compiler/StringEncoding.py
97
9235
# # Cython -- encoding related tools # import re import sys if sys.version_info[0] >= 3: _unicode, _str, _bytes = str, str, bytes IS_PYTHON3 = True else: _unicode, _str, _bytes = unicode, str, str IS_PYTHON3 = False empty_bytes = _bytes() empty_unicode = _unicode() join_bytes = empty_bytes.join class UnicodeLiteralBuilder(object): """Assemble a unicode string. """ def __init__(self): self.chars = [] def append(self, characters): if isinstance(characters, _bytes): # this came from a Py2 string literal in the parser code characters = characters.decode("ASCII") assert isinstance(characters, _unicode), str(type(characters)) self.chars.append(characters) if sys.maxunicode == 65535: def append_charval(self, char_number): if char_number > 65535: # wide Unicode character on narrow platform => replace # by surrogate pair char_number -= 0x10000 self.chars.append( unichr((char_number // 1024) + 0xD800) ) self.chars.append( unichr((char_number % 1024) + 0xDC00) ) else: self.chars.append( unichr(char_number) ) else: def append_charval(self, char_number): self.chars.append( unichr(char_number) ) def append_uescape(self, char_number, escape_string): self.append_charval(char_number) def getstring(self): return EncodedString(u''.join(self.chars)) def getstrings(self): return (None, self.getstring()) class BytesLiteralBuilder(object): """Assemble a byte string or char value. """ def __init__(self, target_encoding): self.chars = [] self.target_encoding = target_encoding def append(self, characters): if isinstance(characters, _unicode): characters = characters.encode(self.target_encoding) assert isinstance(characters, _bytes), str(type(characters)) self.chars.append(characters) def append_charval(self, char_number): self.chars.append( unichr(char_number).encode('ISO-8859-1') ) def append_uescape(self, char_number, escape_string): self.append(escape_string) def getstring(self): # this *must* return a byte string! s = BytesLiteral(join_bytes(self.chars)) s.encoding = self.target_encoding return s def getchar(self): # this *must* return a byte string! return self.getstring() def getstrings(self): return (self.getstring(), None) class StrLiteralBuilder(object): """Assemble both a bytes and a unicode representation of a string. """ def __init__(self, target_encoding): self._bytes = BytesLiteralBuilder(target_encoding) self._unicode = UnicodeLiteralBuilder() def append(self, characters): self._bytes.append(characters) self._unicode.append(characters) def append_charval(self, char_number): self._bytes.append_charval(char_number) self._unicode.append_charval(char_number) def append_uescape(self, char_number, escape_string): self._bytes.append(escape_string) self._unicode.append_charval(char_number) def getstrings(self): return (self._bytes.getstring(), self._unicode.getstring()) class EncodedString(_unicode): # unicode string subclass to keep track of the original encoding. # 'encoding' is None for unicode strings and the source encoding # otherwise encoding = None def __deepcopy__(self, memo): return self def byteencode(self): assert self.encoding is not None return self.encode(self.encoding) def utf8encode(self): assert self.encoding is None return self.encode("UTF-8") @property def is_unicode(self): return self.encoding is None def contains_surrogates(self): return string_contains_surrogates(self) def string_contains_surrogates(ustring): """ Check if the unicode string contains surrogate code points on a CPython platform with wide (UCS-4) or narrow (UTF-16) Unicode, i.e. characters that would be spelled as two separate code units on a narrow platform. """ for c in map(ord, ustring): if c > 65535: # can only happen on wide platforms return True if 0xD800 <= c <= 0xDFFF: return True return False class BytesLiteral(_bytes): # bytes subclass that is compatible with EncodedString encoding = None def __deepcopy__(self, memo): return self def byteencode(self): if IS_PYTHON3: return _bytes(self) else: # fake-recode the string to make it a plain bytes object return self.decode('ISO-8859-1').encode('ISO-8859-1') def utf8encode(self): assert False, "this is not a unicode string: %r" % self def __str__(self): """Fake-decode the byte string to unicode to support % formatting of unicode strings. """ return self.decode('ISO-8859-1') is_unicode = False char_from_escape_sequence = { r'\a' : u'\a', r'\b' : u'\b', r'\f' : u'\f', r'\n' : u'\n', r'\r' : u'\r', r'\t' : u'\t', r'\v' : u'\v', }.get _c_special = ('\\', '??', '"') + tuple(map(chr, range(32))) def _to_escape_sequence(s): if s in '\n\r\t': return repr(s)[1:-1] elif s == '"': return r'\"' elif s == '\\': return r'\\' else: # within a character sequence, oct passes much better than hex return ''.join(['\\%03o' % ord(c) for c in s]) def _build_specials_replacer(): subexps = [] replacements = {} for special in _c_special: regexp = ''.join(['[%s]' % c.replace('\\', '\\\\') for c in special]) subexps.append(regexp) replacements[special.encode('ASCII')] = _to_escape_sequence(special).encode('ASCII') sub = re.compile(('(%s)' % '|'.join(subexps)).encode('ASCII')).sub def replace_specials(m): return replacements[m.group(1)] def replace(s): return sub(replace_specials, s) return replace _replace_specials = _build_specials_replacer() def escape_char(c): if IS_PYTHON3: c = c.decode('ISO-8859-1') if c in '\n\r\t\\': return repr(c)[1:-1] elif c == "'": return "\\'" n = ord(c) if n < 32 or n > 127: # hex works well for characters return "\\x%02X" % n else: return c def escape_byte_string(s): """Escape a byte string so that it can be written into C code. Note that this returns a Unicode string instead which, when encoded as ISO-8859-1, will result in the correct byte sequence being written. """ s = _replace_specials(s) try: return s.decode("ASCII") # trial decoding: plain ASCII => done except UnicodeDecodeError: pass if IS_PYTHON3: s_new = bytearray() append, extend = s_new.append, s_new.extend for b in s: if b >= 128: extend(('\\%3o' % b).encode('ASCII')) else: append(b) return s_new.decode('ISO-8859-1') else: l = [] append = l.append for c in s: o = ord(c) if o >= 128: append('\\%3o' % o) else: append(c) return join_bytes(l).decode('ISO-8859-1') def split_string_literal(s, limit=2000): # MSVC can't handle long string literals. if len(s) < limit: return s else: start = 0 chunks = [] while start < len(s): end = start + limit if len(s) > end-4 and '\\' in s[end-4:end]: end -= 4 - s[end-4:end].find('\\') # just before the backslash while s[end-1] == '\\': end -= 1 if end == start: # must have been a long line of backslashes end = start + limit - (limit % 2) - 4 break chunks.append(s[start:end]) start = end return '""'.join(chunks) def encode_pyunicode_string(s): """Create Py_UNICODE[] representation of a given unicode string. """ s = map(ord, s) + [0] if sys.maxunicode >= 0x10000: # Wide build or Py3.3 utf16, utf32 = [], s for code_point in s: if code_point >= 0x10000: # outside of BMP high, low = divmod(code_point - 0x10000, 1024) utf16.append(high + 0xD800) utf16.append(low + 0xDC00) else: utf16.append(code_point) else: utf16, utf32 = s, [] for code_unit in s: if 0xDC00 <= code_unit <= 0xDFFF and utf32 and 0xD800 <= utf32[-1] <= 0xDBFF: high, low = utf32[-1], code_unit utf32[-1] = ((high & 0x3FF) << 10) + (low & 0x3FF) + 0x10000 else: utf32.append(code_unit) if utf16 == utf32: utf16 = [] return ",".join(map(unicode, utf16)), ",".join(map(unicode, utf32))
bsd-3-clause
mjuric/duplicity
duplicity/cached_ops.py
6
1652
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright 2012 Google Inc. # # This file is part of duplicity. # # Duplicity is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 2 of the License, or (at your # option) any later version. # # Duplicity is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with duplicity; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA """Cache-wrapped functions for grp and pwd lookups.""" import grp import pwd class CachedCall(object): """Decorator for caching the results of function calls.""" def __init__(self, f): self.cache = {} self.f = f def __call__(self, *args): try: return self.cache[args] except (KeyError, TypeError) as e: result = self.f(*args) if not isinstance(e, TypeError): # TypeError most likely means that args is not hashable self.cache[args] = result return result @CachedCall def getgrgid(gid): return grp.getgrgid(gid) @CachedCall def getgrnam(name): return grp.getgrnam(name) @CachedCall def getpwnam(name): return pwd.getpwnam(name) @CachedCall def getpwuid(uid): return pwd.getpwuid(uid)
gpl-2.0
ujjvala-addsol/addsol_hr
openerp/addons/delivery/partner.py
383
1404
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv class res_partner(osv.osv): _inherit = 'res.partner' _columns = { 'property_delivery_carrier': fields.property( type='many2one', relation='delivery.carrier', string="Delivery Method", help="This delivery method will be used when invoicing from picking."), } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
jeffery-do/Vizdoombot
doom/lib/python3.5/site-packages/matplotlib/tests/test_arrow_patches.py
7
1799
from __future__ import (absolute_import, division, print_function, unicode_literals) from matplotlib.externals import six import matplotlib.pyplot as plt from matplotlib.testing.decorators import image_comparison import matplotlib def draw_arrow(ax, t, r): ax.annotate('', xy=(0.5, 0.5 + r), xytext=(0.5, 0.5), size=30, arrowprops=dict(arrowstyle=t, fc="b", ec='k')) @image_comparison(baseline_images=['fancyarrow_test_image']) def test_fancyarrow(): # Added 0 to test division by zero error described in issue 3930 r = [0.4, 0.3, 0.2, 0.1, 0] t = ["fancy", "simple", matplotlib.patches.ArrowStyle.Fancy()] fig, axes = plt.subplots(len(t), len(r), squeeze=False, subplot_kw=dict(aspect=True), figsize=(8, 4.5)) for i_r, r1 in enumerate(r): for i_t, t1 in enumerate(t): ax = axes[i_t, i_r] draw_arrow(ax, t1, r1) ax.tick_params(labelleft=False, labelbottom=False) @image_comparison(baseline_images=['boxarrow_test_image'], extensions=['png']) def test_boxarrow(): styles = matplotlib.patches.BoxStyle.get_styles() n = len(styles) spacing = 1.2 figheight = (n * spacing + .5) fig1 = plt.figure(1, figsize=(4 / 1.5, figheight / 1.5)) fontsize = 0.3 * 72 for i, stylename in enumerate(sorted(styles.keys())): fig1.text(0.5, ((n - i) * spacing - 0.5)/figheight, stylename, ha="center", size=fontsize, transform=fig1.transFigure, bbox=dict(boxstyle=stylename, fc="w", ec="k")) if __name__ == '__main__': import nose nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
mit
cschnei3/forseti-security
google/cloud/security/common/util/email_util.py
2
6349
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Email utility module.""" import base64 import os import urllib2 import gflags as flags import jinja2 from retrying import retry import sendgrid from sendgrid.helpers import mail from google.cloud.security.common.util import errors as util_errors from google.cloud.security.common.util import log_util from google.cloud.security.common.util import retryable_exceptions # TODO: The next editor must remove this disable and correct issues. # pylint: disable=missing-type-doc,missing-return-type-doc,redundant-returns-doc FLAGS = flags.FLAGS flags.DEFINE_string('email_recipient', None, 'Email address of the notification recipient.') flags.DEFINE_string('email_sender', None, 'Email address of the notification sender.') flags.DEFINE_string('sendgrid_api_key', None, 'API key to authenticate with SendGrid email service.') LOGGER = log_util.get_logger(__name__) class EmailUtil(object): """Utility for sending emails.""" def __init__(self, api_key): """Initialize the email util. Args: api_key: String of the sendgrid api key to auth email service. """ self.sendgrid = sendgrid.SendGridAPIClient(apikey=api_key) @retry(retry_on_exception=retryable_exceptions.is_retryable_exception, wait_exponential_multiplier=1000, wait_exponential_max=10000, stop_max_attempt_number=5) def _execute_send(self, email): """Executes the sending of the email. This needs to be a standalone method so that we can wrap it with retry, and the final exception can be gracefully handled upstream. Args: email: sendgrid mail object Returns: urllib2 response object """ return self.sendgrid.client.mail.send.post(request_body=email.get()) def send(self, email_sender=None, email_recipient=None, email_subject=None, email_content=None, content_type=None, attachment=None): """Send an email. This uses SendGrid. https://github.com/sendgrid/sendgrid-python The minimum required info to send email are: sender, recipient, subject, and content (the body) Args: email_sender: String of the email sender. email_recipient: String of the email recipient. email_subject: String of the email subject. email_content: String of the email content (aka, body). content_type: String of the email content type. attachment: A SendGrid Attachment. Returns: None. Raises: EmailSendError: An error with sending email has occurred. """ if not email_sender or not email_recipient: LOGGER.warn('Unable to send email: sender=%s, recipient=%s', email_sender, email_recipient) raise util_errors.EmailSendError email = mail.Mail( mail.Email(email_sender), email_subject, mail.Email(email_recipient), mail.Content(content_type, email_content) ) if attachment: email.add_attachment(attachment) try: response = self._execute_send(email) except urllib2.HTTPError as e: LOGGER.error('Unable to send email: %s %s', e.code, e.reason) raise util_errors.EmailSendError if response.status_code == 202: LOGGER.info('Email accepted for delivery:\n%s', email_subject) else: LOGGER.error('Unable to send email:\n%s\n%s\n%s\n%s', email_subject, response.status_code, response.body, response.headers) raise util_errors.EmailSendError @classmethod def render_from_template(cls, template_file, template_vars): """Fill out an email template with template variables. Args: template_file: The string location of email template in filesystem. template_vars: The dict of template variables to fill into the template. Returns: String of template content rendered with the provided variables. """ template_searchpath = os.path.abspath( os.path.join(os.path.dirname(__file__), '../email_templates')) template_loader = jinja2.FileSystemLoader( searchpath=template_searchpath) template_env = jinja2.Environment(loader=template_loader) template = template_env.get_template(template_file) return template.render(template_vars) @classmethod def create_attachment(cls, file_location, content_type, filename, disposition='attachment', content_id=None): """Create a SendGrid attachment. SendGrid attachments file content must be base64 encoded. Args: file_location: The string path of the file. content_type: The content type string. filename: The string filename of attachment. disposition: Content disposition string, defaults to "attachment". content_id: The content id string. Returns: A SendGrid Attachment. """ file_content = '' with open(file_location, 'rb') as f: file_content = f.read() content = base64.b64encode(file_content) attachment = mail.Attachment() attachment.set_content(content) attachment.set_type(content_type) attachment.set_filename(filename) attachment.set_disposition(disposition) attachment.set_content_id(content_id) return attachment
apache-2.0
mlperf/inference_results_v0.7
closed/Gigabyte/code/common/accuracy.py
13
3141
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os, sys sys.path.append(os.getcwd()) import time import numpy as np from code.common import logging class AccuracyRunner(object): def __init__(self, runner, val_map, image_dir, verbose=False): self.runner = runner self.val_map = val_map self.image_dir = image_dir self.verbose = verbose self.image_list = [] self.class_list = [] def reset(self): self.image_list = [] self.class_list = [] def load_val_images(self): self.reset() with open(self.val_map) as f: for line in f: self.image_list.append(line.split()[0]) self.class_list.append(int(line.split()[1])) def run(self): raise NotImplementedError("AccuracyRunner.run() is not implemented") class ImageNetAccuracyRunner(AccuracyRunner): def __init__(self, runner, batch_size, image_dir, num_images, verbose=False): super().__init__(runner, "data_maps/imagenet/val_map.txt", image_dir, verbose=verbose) self.batch_size = batch_size self.num_images = num_images def run(self): self.load_val_images() logging.info("Running accuracy check on {:} images.".format(self.num_images)) class_predictions = [] batch_idx = 0 for image_idx in range(0, self.num_images, self.batch_size): actual_batch_size = self.batch_size if image_idx + self.batch_size <= self.num_images else self.num_images - image_idx batch_images = self.image_list[image_idx:image_idx + actual_batch_size] # DLA does not support batches that are less than the engine's configured batch size. Pad with junk. while len(batch_images) < self.batch_size: batch_images.append(self.image_list[0]) batch_images = np.ascontiguousarray(np.stack([np.load(os.path.join(self.image_dir, name + ".npy")) for name in batch_images])) start_time = time.time() outputs = self.runner([batch_images], self.batch_size) if self.verbose: logging.info("Batch {:d} (Size {:}) >> Inference time: {:f}".format(batch_idx, actual_batch_size, time.time() - start_time)) class_predictions.extend(outputs[0][:actual_batch_size]) batch_idx += 1 class_list = self.class_list[:self.num_images] num_matches = np.sum(np.array(class_list) == np.array(class_predictions)) accuracy = float(num_matches) / len(class_list) return accuracy
apache-2.0
hall-lab/svtools
tests/lsort_tests.py
2
3857
from unittest import TestCase, main import tempfile import os import sys import difflib import svtools.lsort as lsort class Test_lsort(TestCase): def test_parser(self): parser = lsort.command_parser() args = parser.parse_args('file1 file2 file3'.split()) self.assertEqual(args.vcf_files, ['file1', 'file2', 'file3']) self.assertEqual(args.batchsize, 200) self.assertEqual(args.tempdir, tempfile.gettempdir()) args2 = parser.parse_args('-b 2 -t temp file1 file2'.split()) self.assertEqual(args2.batchsize, 2) self.assertEqual(args2.tempdir, 'temp') self.assertEqual(args2.vcf_files, ['file1', 'file2']) def test_lsort_init_defaults(self): file_list = ['file1', 'file2'] lsort_class = lsort.Lsort(file_list) self.assertEqual(lsort_class.vcf_file_names, file_list) self.assertEqual(lsort_class.batchsize, 200) self.assertEqual(lsort_class.tempdir, tempfile.gettempdir()) def test_lsort_init_full(self): file_list = ['file1', 'file2'] lsort_class = lsort.Lsort(file_list, tempdir='tempydir', batchsize=5 ) self.assertEqual(lsort_class.vcf_file_names, file_list) self.assertEqual(lsort_class.batchsize, 5) self.assertEqual(lsort_class.tempdir, 'tempydir') class LsortIntegrationTest(TestCase): def run_integration_test(self): test_directory = os.path.dirname(os.path.abspath(__file__)) self.test_data_dir = os.path.join(test_directory, 'test_data', 'lsort') # glob vcfs vcfs = list() for sample in ('NA12878', 'NA12891', 'NA12892'): vcfs.append(os.path.join(self.test_data_dir, '{0}.vcf'.format(sample))) expected_result = os.path.join(self.test_data_dir, 'lsort_expected') temp_descriptor, temp_output_path = tempfile.mkstemp(suffix='.vcf') with os.fdopen(temp_descriptor, 'w') as output_handle: sorter = lsort.Lsort(vcfs, tempdir=None, batchsize=2, output_handle=output_handle) sorter.execute() output_handle.flush() expected_lines = open(expected_result).readlines() produced_lines = open(temp_output_path).readlines() diff = difflib.unified_diff(produced_lines, expected_lines, fromfile=temp_output_path, tofile=expected_result) result = ''.join(diff) if result != '': for line in result: sys.stdout.write(line) self.assertFalse(result) os.remove(temp_output_path) def run_file_list_integration_test(self): test_directory = os.path.dirname(os.path.abspath(__file__)) self.test_data_dir = os.path.join(test_directory, 'test_data', 'lsort') # glob vcfs vcfs = list() for sample in ('NA12878', 'NA12891', 'NA12892'): vcfs.append(os.path.join(self.test_data_dir, '{0}.sv.vcf.gz'.format(sample))) expected_result = os.path.join(self.test_data_dir, 'lsort_expected') temp_descriptor, temp_output_path = tempfile.mkstemp(suffix='.vcf') with os.fdopen(temp_descriptor, 'w') as output_handle: sorter = lsort.Lsort(vcfs, tempdir=None, batchsize=2, include_ref=False, output_handle=output_handle) sorter.execute() output_handle.flush() expected_lines = open(expected_result).readlines() produced_lines = open(temp_output_path).readlines() diff = difflib.unified_diff(produced_lines, expected_lines, fromfile=temp_output_path, tofile=expected_result) result = ''.join(diff) if result != '': for line in result: sys.stdout.write(line) self.assertFalse(result) os.remove(temp_output_path) if __name__ == "__main__": main()
mit
alimuldal/numpy
numpy/lib/_iotools.py
72
32062
"""A collection of functions designed to help I/O with ascii files. """ from __future__ import division, absolute_import, print_function __docformat__ = "restructuredtext en" import sys import numpy as np import numpy.core.numeric as nx from numpy.compat import asbytes, bytes, asbytes_nested, basestring if sys.version_info[0] >= 3: from builtins import bool, int, float, complex, object, str unicode = str else: from __builtin__ import bool, int, float, complex, object, unicode, str if sys.version_info[0] >= 3: def _bytes_to_complex(s): return complex(s.decode('ascii')) def _bytes_to_name(s): return s.decode('ascii') else: _bytes_to_complex = complex _bytes_to_name = str def _is_string_like(obj): """ Check whether obj behaves like a string. """ try: obj + '' except (TypeError, ValueError): return False return True def _is_bytes_like(obj): """ Check whether obj behaves like a bytes object. """ try: obj + asbytes('') except (TypeError, ValueError): return False return True def _to_filehandle(fname, flag='r', return_opened=False): """ Returns the filehandle corresponding to a string or a file. If the string ends in '.gz', the file is automatically unzipped. Parameters ---------- fname : string, filehandle Name of the file whose filehandle must be returned. flag : string, optional Flag indicating the status of the file ('r' for read, 'w' for write). return_opened : boolean, optional Whether to return the opening status of the file. """ if _is_string_like(fname): if fname.endswith('.gz'): import gzip fhd = gzip.open(fname, flag) elif fname.endswith('.bz2'): import bz2 fhd = bz2.BZ2File(fname) else: fhd = file(fname, flag) opened = True elif hasattr(fname, 'seek'): fhd = fname opened = False else: raise ValueError('fname must be a string or file handle') if return_opened: return fhd, opened return fhd def has_nested_fields(ndtype): """ Returns whether one or several fields of a dtype are nested. Parameters ---------- ndtype : dtype Data-type of a structured array. Raises ------ AttributeError If `ndtype` does not have a `names` attribute. Examples -------- >>> dt = np.dtype([('name', 'S4'), ('x', float), ('y', float)]) >>> np.lib._iotools.has_nested_fields(dt) False """ for name in ndtype.names or (): if ndtype[name].names: return True return False def flatten_dtype(ndtype, flatten_base=False): """ Unpack a structured data-type by collapsing nested fields and/or fields with a shape. Note that the field names are lost. Parameters ---------- ndtype : dtype The datatype to collapse flatten_base : {False, True}, optional Whether to transform a field with a shape into several fields or not. Examples -------- >>> dt = np.dtype([('name', 'S4'), ('x', float), ('y', float), ... ('block', int, (2, 3))]) >>> np.lib._iotools.flatten_dtype(dt) [dtype('|S4'), dtype('float64'), dtype('float64'), dtype('int32')] >>> np.lib._iotools.flatten_dtype(dt, flatten_base=True) [dtype('|S4'), dtype('float64'), dtype('float64'), dtype('int32'), dtype('int32'), dtype('int32'), dtype('int32'), dtype('int32'), dtype('int32')] """ names = ndtype.names if names is None: if flatten_base: return [ndtype.base] * int(np.prod(ndtype.shape)) return [ndtype.base] else: types = [] for field in names: info = ndtype.fields[field] flat_dt = flatten_dtype(info[0], flatten_base) types.extend(flat_dt) return types class LineSplitter(object): """ Object to split a string at a given delimiter or at given places. Parameters ---------- delimiter : str, int, or sequence of ints, optional If a string, character used to delimit consecutive fields. If an integer or a sequence of integers, width(s) of each field. comments : str, optional Character used to mark the beginning of a comment. Default is '#'. autostrip : bool, optional Whether to strip each individual field. Default is True. """ def autostrip(self, method): """ Wrapper to strip each member of the output of `method`. Parameters ---------- method : function Function that takes a single argument and returns a sequence of strings. Returns ------- wrapped : function The result of wrapping `method`. `wrapped` takes a single input argument and returns a list of strings that are stripped of white-space. """ return lambda input: [_.strip() for _ in method(input)] # def __init__(self, delimiter=None, comments=asbytes('#'), autostrip=True): self.comments = comments # Delimiter is a character if isinstance(delimiter, unicode): delimiter = delimiter.encode('ascii') if (delimiter is None) or _is_bytes_like(delimiter): delimiter = delimiter or None _handyman = self._delimited_splitter # Delimiter is a list of field widths elif hasattr(delimiter, '__iter__'): _handyman = self._variablewidth_splitter idx = np.cumsum([0] + list(delimiter)) delimiter = [slice(i, j) for (i, j) in zip(idx[:-1], idx[1:])] # Delimiter is a single integer elif int(delimiter): (_handyman, delimiter) = ( self._fixedwidth_splitter, int(delimiter)) else: (_handyman, delimiter) = (self._delimited_splitter, None) self.delimiter = delimiter if autostrip: self._handyman = self.autostrip(_handyman) else: self._handyman = _handyman # def _delimited_splitter(self, line): if self.comments is not None: line = line.split(self.comments)[0] line = line.strip(asbytes(" \r\n")) if not line: return [] return line.split(self.delimiter) # def _fixedwidth_splitter(self, line): if self.comments is not None: line = line.split(self.comments)[0] line = line.strip(asbytes("\r\n")) if not line: return [] fixed = self.delimiter slices = [slice(i, i + fixed) for i in range(0, len(line), fixed)] return [line[s] for s in slices] # def _variablewidth_splitter(self, line): if self.comments is not None: line = line.split(self.comments)[0] if not line: return [] slices = self.delimiter return [line[s] for s in slices] # def __call__(self, line): return self._handyman(line) class NameValidator(object): """ Object to validate a list of strings to use as field names. The strings are stripped of any non alphanumeric character, and spaces are replaced by '_'. During instantiation, the user can define a list of names to exclude, as well as a list of invalid characters. Names in the exclusion list are appended a '_' character. Once an instance has been created, it can be called with a list of names, and a list of valid names will be created. The `__call__` method accepts an optional keyword "default" that sets the default name in case of ambiguity. By default this is 'f', so that names will default to `f0`, `f1`, etc. Parameters ---------- excludelist : sequence, optional A list of names to exclude. This list is appended to the default list ['return', 'file', 'print']. Excluded names are appended an underscore: for example, `file` becomes `file_` if supplied. deletechars : str, optional A string combining invalid characters that must be deleted from the names. case_sensitive : {True, False, 'upper', 'lower'}, optional * If True, field names are case-sensitive. * If False or 'upper', field names are converted to upper case. * If 'lower', field names are converted to lower case. The default value is True. replace_space : '_', optional Character(s) used in replacement of white spaces. Notes ----- Calling an instance of `NameValidator` is the same as calling its method `validate`. Examples -------- >>> validator = np.lib._iotools.NameValidator() >>> validator(['file', 'field2', 'with space', 'CaSe']) ['file_', 'field2', 'with_space', 'CaSe'] >>> validator = np.lib._iotools.NameValidator(excludelist=['excl'], deletechars='q', case_sensitive='False') >>> validator(['excl', 'field2', 'no_q', 'with space', 'CaSe']) ['excl_', 'field2', 'no_', 'with_space', 'case'] """ # defaultexcludelist = ['return', 'file', 'print'] defaultdeletechars = set("""~!@#$%^&*()-=+~\|]}[{';: /?.>,<""") # def __init__(self, excludelist=None, deletechars=None, case_sensitive=None, replace_space='_'): # Process the exclusion list .. if excludelist is None: excludelist = [] excludelist.extend(self.defaultexcludelist) self.excludelist = excludelist # Process the list of characters to delete if deletechars is None: delete = self.defaultdeletechars else: delete = set(deletechars) delete.add('"') self.deletechars = delete # Process the case option ..... if (case_sensitive is None) or (case_sensitive is True): self.case_converter = lambda x: x elif (case_sensitive is False) or case_sensitive.startswith('u'): self.case_converter = lambda x: x.upper() elif case_sensitive.startswith('l'): self.case_converter = lambda x: x.lower() else: msg = 'unrecognized case_sensitive value %s.' % case_sensitive raise ValueError(msg) # self.replace_space = replace_space def validate(self, names, defaultfmt="f%i", nbfields=None): """ Validate a list of strings as field names for a structured array. Parameters ---------- names : sequence of str Strings to be validated. defaultfmt : str, optional Default format string, used if validating a given string reduces its length to zero. nbfields : integer, optional Final number of validated names, used to expand or shrink the initial list of names. Returns ------- validatednames : list of str The list of validated field names. Notes ----- A `NameValidator` instance can be called directly, which is the same as calling `validate`. For examples, see `NameValidator`. """ # Initial checks .............. if (names is None): if (nbfields is None): return None names = [] if isinstance(names, basestring): names = [names, ] if nbfields is not None: nbnames = len(names) if (nbnames < nbfields): names = list(names) + [''] * (nbfields - nbnames) elif (nbnames > nbfields): names = names[:nbfields] # Set some shortcuts ........... deletechars = self.deletechars excludelist = self.excludelist case_converter = self.case_converter replace_space = self.replace_space # Initializes some variables ... validatednames = [] seen = dict() nbempty = 0 # for item in names: item = case_converter(item).strip() if replace_space: item = item.replace(' ', replace_space) item = ''.join([c for c in item if c not in deletechars]) if item == '': item = defaultfmt % nbempty while item in names: nbempty += 1 item = defaultfmt % nbempty nbempty += 1 elif item in excludelist: item += '_' cnt = seen.get(item, 0) if cnt > 0: validatednames.append(item + '_%d' % cnt) else: validatednames.append(item) seen[item] = cnt + 1 return tuple(validatednames) # def __call__(self, names, defaultfmt="f%i", nbfields=None): return self.validate(names, defaultfmt=defaultfmt, nbfields=nbfields) def str2bool(value): """ Tries to transform a string supposed to represent a boolean to a boolean. Parameters ---------- value : str The string that is transformed to a boolean. Returns ------- boolval : bool The boolean representation of `value`. Raises ------ ValueError If the string is not 'True' or 'False' (case independent) Examples -------- >>> np.lib._iotools.str2bool('TRUE') True >>> np.lib._iotools.str2bool('false') False """ value = value.upper() if value == asbytes('TRUE'): return True elif value == asbytes('FALSE'): return False else: raise ValueError("Invalid boolean") class ConverterError(Exception): """ Exception raised when an error occurs in a converter for string values. """ pass class ConverterLockError(ConverterError): """ Exception raised when an attempt is made to upgrade a locked converter. """ pass class ConversionWarning(UserWarning): """ Warning issued when a string converter has a problem. Notes ----- In `genfromtxt` a `ConversionWarning` is issued if raising exceptions is explicitly suppressed with the "invalid_raise" keyword. """ pass class StringConverter(object): """ Factory class for function transforming a string into another object (int, float). After initialization, an instance can be called to transform a string into another object. If the string is recognized as representing a missing value, a default value is returned. Attributes ---------- func : function Function used for the conversion. default : any Default value to return when the input corresponds to a missing value. type : type Type of the output. _status : int Integer representing the order of the conversion. _mapper : sequence of tuples Sequence of tuples (dtype, function, default value) to evaluate in order. _locked : bool Holds `locked` parameter. Parameters ---------- dtype_or_func : {None, dtype, function}, optional If a `dtype`, specifies the input data type, used to define a basic function and a default value for missing data. For example, when `dtype` is float, the `func` attribute is set to `float` and the default value to `np.nan`. If a function, this function is used to convert a string to another object. In this case, it is recommended to give an associated default value as input. default : any, optional Value to return by default, that is, when the string to be converted is flagged as missing. If not given, `StringConverter` tries to supply a reasonable default value. missing_values : sequence of str, optional Sequence of strings indicating a missing value. locked : bool, optional Whether the StringConverter should be locked to prevent automatic upgrade or not. Default is False. """ # _mapper = [(nx.bool_, str2bool, False), (nx.integer, int, -1)] # On 32-bit systems, we need to make sure that we explicitly include # nx.int64 since ns.integer is nx.int32. if nx.dtype(nx.integer).itemsize < nx.dtype(nx.int64).itemsize: _mapper.append((nx.int64, int, -1)) _mapper.extend([(nx.floating, float, nx.nan), (complex, _bytes_to_complex, nx.nan + 0j), (nx.longdouble, nx.longdouble, nx.nan), (nx.string_, bytes, asbytes('???'))]) (_defaulttype, _defaultfunc, _defaultfill) = zip(*_mapper) @classmethod def _getdtype(cls, val): """Returns the dtype of the input variable.""" return np.array(val).dtype # @classmethod def _getsubdtype(cls, val): """Returns the type of the dtype of the input variable.""" return np.array(val).dtype.type # # This is a bit annoying. We want to return the "general" type in most # cases (ie. "string" rather than "S10"), but we want to return the # specific type for datetime64 (ie. "datetime64[us]" rather than # "datetime64"). @classmethod def _dtypeortype(cls, dtype): """Returns dtype for datetime64 and type of dtype otherwise.""" if dtype.type == np.datetime64: return dtype return dtype.type # @classmethod def upgrade_mapper(cls, func, default=None): """ Upgrade the mapper of a StringConverter by adding a new function and its corresponding default. The input function (or sequence of functions) and its associated default value (if any) is inserted in penultimate position of the mapper. The corresponding type is estimated from the dtype of the default value. Parameters ---------- func : var Function, or sequence of functions Examples -------- >>> import dateutil.parser >>> import datetime >>> dateparser = datetustil.parser.parse >>> defaultdate = datetime.date(2000, 1, 1) >>> StringConverter.upgrade_mapper(dateparser, default=defaultdate) """ # Func is a single functions if hasattr(func, '__call__'): cls._mapper.insert(-1, (cls._getsubdtype(default), func, default)) return elif hasattr(func, '__iter__'): if isinstance(func[0], (tuple, list)): for _ in func: cls._mapper.insert(-1, _) return if default is None: default = [None] * len(func) else: default = list(default) default.append([None] * (len(func) - len(default))) for (fct, dft) in zip(func, default): cls._mapper.insert(-1, (cls._getsubdtype(dft), fct, dft)) # def __init__(self, dtype_or_func=None, default=None, missing_values=None, locked=False): # Convert unicode (for Py3) if isinstance(missing_values, unicode): missing_values = asbytes(missing_values) elif isinstance(missing_values, (list, tuple)): missing_values = asbytes_nested(missing_values) # Defines a lock for upgrade self._locked = bool(locked) # No input dtype: minimal initialization if dtype_or_func is None: self.func = str2bool self._status = 0 self.default = default or False dtype = np.dtype('bool') else: # Is the input a np.dtype ? try: self.func = None dtype = np.dtype(dtype_or_func) except TypeError: # dtype_or_func must be a function, then if not hasattr(dtype_or_func, '__call__'): errmsg = ("The input argument `dtype` is neither a" " function nor a dtype (got '%s' instead)") raise TypeError(errmsg % type(dtype_or_func)) # Set the function self.func = dtype_or_func # If we don't have a default, try to guess it or set it to # None if default is None: try: default = self.func(asbytes('0')) except ValueError: default = None dtype = self._getdtype(default) # Set the status according to the dtype _status = -1 for (i, (deftype, func, default_def)) in enumerate(self._mapper): if np.issubdtype(dtype.type, deftype): _status = i if default is None: self.default = default_def else: self.default = default break # if a converter for the specific dtype is available use that last_func = func for (i, (deftype, func, default_def)) in enumerate(self._mapper): if dtype.type == deftype: _status = i last_func = func if default is None: self.default = default_def else: self.default = default break func = last_func if _status == -1: # We never found a match in the _mapper... _status = 0 self.default = default self._status = _status # If the input was a dtype, set the function to the last we saw if self.func is None: self.func = func # If the status is 1 (int), change the function to # something more robust. if self.func == self._mapper[1][1]: if issubclass(dtype.type, np.uint64): self.func = np.uint64 elif issubclass(dtype.type, np.int64): self.func = np.int64 else: self.func = lambda x: int(float(x)) # Store the list of strings corresponding to missing values. if missing_values is None: self.missing_values = set([asbytes('')]) else: if isinstance(missing_values, bytes): missing_values = missing_values.split(asbytes(",")) self.missing_values = set(list(missing_values) + [asbytes('')]) # self._callingfunction = self._strict_call self.type = self._dtypeortype(dtype) self._checked = False self._initial_default = default # def _loose_call(self, value): try: return self.func(value) except ValueError: return self.default # def _strict_call(self, value): try: # We check if we can convert the value using the current function new_value = self.func(value) # In addition to having to check whether func can convert the # value, we also have to make sure that we don't get overflow # errors for integers. if self.func is int: try: np.array(value, dtype=self.type) except OverflowError: raise ValueError # We're still here so we can now return the new value return new_value except ValueError: if value.strip() in self.missing_values: if not self._status: self._checked = False return self.default raise ValueError("Cannot convert string '%s'" % value) # def __call__(self, value): return self._callingfunction(value) # def upgrade(self, value): """ Find the best converter for a given string, and return the result. The supplied string `value` is converted by testing different converters in order. First the `func` method of the `StringConverter` instance is tried, if this fails other available converters are tried. The order in which these other converters are tried is determined by the `_status` attribute of the instance. Parameters ---------- value : str The string to convert. Returns ------- out : any The result of converting `value` with the appropriate converter. """ self._checked = True try: return self._strict_call(value) except ValueError: # Raise an exception if we locked the converter... if self._locked: errmsg = "Converter is locked and cannot be upgraded" raise ConverterLockError(errmsg) _statusmax = len(self._mapper) # Complains if we try to upgrade by the maximum _status = self._status if _status == _statusmax: errmsg = "Could not find a valid conversion function" raise ConverterError(errmsg) elif _status < _statusmax - 1: _status += 1 (self.type, self.func, default) = self._mapper[_status] self._status = _status if self._initial_default is not None: self.default = self._initial_default else: self.default = default return self.upgrade(value) def iterupgrade(self, value): self._checked = True if not hasattr(value, '__iter__'): value = (value,) _strict_call = self._strict_call try: for _m in value: _strict_call(_m) except ValueError: # Raise an exception if we locked the converter... if self._locked: errmsg = "Converter is locked and cannot be upgraded" raise ConverterLockError(errmsg) _statusmax = len(self._mapper) # Complains if we try to upgrade by the maximum _status = self._status if _status == _statusmax: raise ConverterError( "Could not find a valid conversion function" ) elif _status < _statusmax - 1: _status += 1 (self.type, self.func, default) = self._mapper[_status] if self._initial_default is not None: self.default = self._initial_default else: self.default = default self._status = _status self.iterupgrade(value) def update(self, func, default=None, testing_value=None, missing_values=asbytes(''), locked=False): """ Set StringConverter attributes directly. Parameters ---------- func : function Conversion function. default : any, optional Value to return by default, that is, when the string to be converted is flagged as missing. If not given, `StringConverter` tries to supply a reasonable default value. testing_value : str, optional A string representing a standard input value of the converter. This string is used to help defining a reasonable default value. missing_values : sequence of str, optional Sequence of strings indicating a missing value. locked : bool, optional Whether the StringConverter should be locked to prevent automatic upgrade or not. Default is False. Notes ----- `update` takes the same parameters as the constructor of `StringConverter`, except that `func` does not accept a `dtype` whereas `dtype_or_func` in the constructor does. """ self.func = func self._locked = locked # Don't reset the default to None if we can avoid it if default is not None: self.default = default self.type = self._dtypeortype(self._getdtype(default)) else: try: tester = func(testing_value or asbytes('1')) except (TypeError, ValueError): tester = None self.type = self._dtypeortype(self._getdtype(tester)) # Add the missing values to the existing set if missing_values is not None: if _is_bytes_like(missing_values): self.missing_values.add(missing_values) elif hasattr(missing_values, '__iter__'): for val in missing_values: self.missing_values.add(val) else: self.missing_values = [] def easy_dtype(ndtype, names=None, defaultfmt="f%i", **validationargs): """ Convenience function to create a `np.dtype` object. The function processes the input `dtype` and matches it with the given names. Parameters ---------- ndtype : var Definition of the dtype. Can be any string or dictionary recognized by the `np.dtype` function, or a sequence of types. names : str or sequence, optional Sequence of strings to use as field names for a structured dtype. For convenience, `names` can be a string of a comma-separated list of names. defaultfmt : str, optional Format string used to define missing names, such as ``"f%i"`` (default) or ``"fields_%02i"``. validationargs : optional A series of optional arguments used to initialize a `NameValidator`. Examples -------- >>> np.lib._iotools.easy_dtype(float) dtype('float64') >>> np.lib._iotools.easy_dtype("i4, f8") dtype([('f0', '<i4'), ('f1', '<f8')]) >>> np.lib._iotools.easy_dtype("i4, f8", defaultfmt="field_%03i") dtype([('field_000', '<i4'), ('field_001', '<f8')]) >>> np.lib._iotools.easy_dtype((int, float, float), names="a,b,c") dtype([('a', '<i8'), ('b', '<f8'), ('c', '<f8')]) >>> np.lib._iotools.easy_dtype(float, names="a,b,c") dtype([('a', '<f8'), ('b', '<f8'), ('c', '<f8')]) """ try: ndtype = np.dtype(ndtype) except TypeError: validate = NameValidator(**validationargs) nbfields = len(ndtype) if names is None: names = [''] * len(ndtype) elif isinstance(names, basestring): names = names.split(",") names = validate(names, nbfields=nbfields, defaultfmt=defaultfmt) ndtype = np.dtype(dict(formats=ndtype, names=names)) else: nbtypes = len(ndtype) # Explicit names if names is not None: validate = NameValidator(**validationargs) if isinstance(names, basestring): names = names.split(",") # Simple dtype: repeat to match the nb of names if nbtypes == 0: formats = tuple([ndtype.type] * len(names)) names = validate(names, defaultfmt=defaultfmt) ndtype = np.dtype(list(zip(names, formats))) # Structured dtype: just validate the names as needed else: ndtype.names = validate(names, nbfields=nbtypes, defaultfmt=defaultfmt) # No implicit names elif (nbtypes > 0): validate = NameValidator(**validationargs) # Default initial names : should we change the format ? if ((ndtype.names == tuple("f%i" % i for i in range(nbtypes))) and (defaultfmt != "f%i")): ndtype.names = validate([''] * nbtypes, defaultfmt=defaultfmt) # Explicit initial names : just validate else: ndtype.names = validate(ndtype.names, defaultfmt=defaultfmt) return ndtype
bsd-3-clause
macs03/demo-cms
cms/bin/pilfont.py
1
1058
#!/Users/miguelcardenas/Desktop/demo-cms/cms/bin/python # # The Python Imaging Library # $Id$ # # PIL raster font compiler # # history: # 1997-08-25 fl created # 2002-03-10 fl use "from PIL import" # from __future__ import print_function VERSION = "0.4" import glob, sys # drivers from PIL import BdfFontFile from PIL import PcfFontFile if len(sys.argv) <= 1: print("PILFONT", VERSION, "-- PIL font compiler.") print() print("Usage: pilfont fontfiles...") print() print("Convert given font files to the PIL raster font format.") print("This version of pilfont supports X BDF and PCF fonts.") sys.exit(1) files = [] for f in sys.argv[1:]: files = files + glob.glob(f) for f in files: print(f + "...", end=' ') try: fp = open(f, "rb") try: p = PcfFontFile.PcfFontFile(fp) except SyntaxError: fp.seek(0) p = BdfFontFile.BdfFontFile(fp) p.save(f) except (SyntaxError, IOError): print("failed") else: print("OK")
mit
mwoehlke/gtest
scripts/upload.py
2511
51024
#!/usr/bin/env python # # Copyright 2007 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tool for uploading diffs from a version control system to the codereview app. Usage summary: upload.py [options] [-- diff_options] Diff options are passed to the diff command of the underlying system. Supported version control systems: Git Mercurial Subversion It is important for Git/Mercurial users to specify a tree/node/branch to diff against by using the '--rev' option. """ # This code is derived from appcfg.py in the App Engine SDK (open source), # and from ASPN recipe #146306. import cookielib import getpass import logging import md5 import mimetypes import optparse import os import re import socket import subprocess import sys import urllib import urllib2 import urlparse try: import readline except ImportError: pass # The logging verbosity: # 0: Errors only. # 1: Status messages. # 2: Info logs. # 3: Debug logs. verbosity = 1 # Max size of patch or base file. MAX_UPLOAD_SIZE = 900 * 1024 def GetEmail(prompt): """Prompts the user for their email address and returns it. The last used email address is saved to a file and offered up as a suggestion to the user. If the user presses enter without typing in anything the last used email address is used. If the user enters a new address, it is saved for next time we prompt. """ last_email_file_name = os.path.expanduser("~/.last_codereview_email_address") last_email = "" if os.path.exists(last_email_file_name): try: last_email_file = open(last_email_file_name, "r") last_email = last_email_file.readline().strip("\n") last_email_file.close() prompt += " [%s]" % last_email except IOError, e: pass email = raw_input(prompt + ": ").strip() if email: try: last_email_file = open(last_email_file_name, "w") last_email_file.write(email) last_email_file.close() except IOError, e: pass else: email = last_email return email def StatusUpdate(msg): """Print a status message to stdout. If 'verbosity' is greater than 0, print the message. Args: msg: The string to print. """ if verbosity > 0: print msg def ErrorExit(msg): """Print an error message to stderr and exit.""" print >>sys.stderr, msg sys.exit(1) class ClientLoginError(urllib2.HTTPError): """Raised to indicate there was an error authenticating with ClientLogin.""" def __init__(self, url, code, msg, headers, args): urllib2.HTTPError.__init__(self, url, code, msg, headers, None) self.args = args self.reason = args["Error"] class AbstractRpcServer(object): """Provides a common interface for a simple RPC server.""" def __init__(self, host, auth_function, host_override=None, extra_headers={}, save_cookies=False): """Creates a new HttpRpcServer. Args: host: The host to send requests to. auth_function: A function that takes no arguments and returns an (email, password) tuple when called. Will be called if authentication is required. host_override: The host header to send to the server (defaults to host). extra_headers: A dict of extra headers to append to every request. save_cookies: If True, save the authentication cookies to local disk. If False, use an in-memory cookiejar instead. Subclasses must implement this functionality. Defaults to False. """ self.host = host self.host_override = host_override self.auth_function = auth_function self.authenticated = False self.extra_headers = extra_headers self.save_cookies = save_cookies self.opener = self._GetOpener() if self.host_override: logging.info("Server: %s; Host: %s", self.host, self.host_override) else: logging.info("Server: %s", self.host) def _GetOpener(self): """Returns an OpenerDirector for making HTTP requests. Returns: A urllib2.OpenerDirector object. """ raise NotImplementedError() def _CreateRequest(self, url, data=None): """Creates a new urllib request.""" logging.debug("Creating request for: '%s' with payload:\n%s", url, data) req = urllib2.Request(url, data=data) if self.host_override: req.add_header("Host", self.host_override) for key, value in self.extra_headers.iteritems(): req.add_header(key, value) return req def _GetAuthToken(self, email, password): """Uses ClientLogin to authenticate the user, returning an auth token. Args: email: The user's email address password: The user's password Raises: ClientLoginError: If there was an error authenticating with ClientLogin. HTTPError: If there was some other form of HTTP error. Returns: The authentication token returned by ClientLogin. """ account_type = "GOOGLE" if self.host.endswith(".google.com"): # Needed for use inside Google. account_type = "HOSTED" req = self._CreateRequest( url="https://www.google.com/accounts/ClientLogin", data=urllib.urlencode({ "Email": email, "Passwd": password, "service": "ah", "source": "rietveld-codereview-upload", "accountType": account_type, }), ) try: response = self.opener.open(req) response_body = response.read() response_dict = dict(x.split("=") for x in response_body.split("\n") if x) return response_dict["Auth"] except urllib2.HTTPError, e: if e.code == 403: body = e.read() response_dict = dict(x.split("=", 1) for x in body.split("\n") if x) raise ClientLoginError(req.get_full_url(), e.code, e.msg, e.headers, response_dict) else: raise def _GetAuthCookie(self, auth_token): """Fetches authentication cookies for an authentication token. Args: auth_token: The authentication token returned by ClientLogin. Raises: HTTPError: If there was an error fetching the authentication cookies. """ # This is a dummy value to allow us to identify when we're successful. continue_location = "http://localhost/" args = {"continue": continue_location, "auth": auth_token} req = self._CreateRequest("http://%s/_ah/login?%s" % (self.host, urllib.urlencode(args))) try: response = self.opener.open(req) except urllib2.HTTPError, e: response = e if (response.code != 302 or response.info()["location"] != continue_location): raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg, response.headers, response.fp) self.authenticated = True def _Authenticate(self): """Authenticates the user. The authentication process works as follows: 1) We get a username and password from the user 2) We use ClientLogin to obtain an AUTH token for the user (see http://code.google.com/apis/accounts/AuthForInstalledApps.html). 3) We pass the auth token to /_ah/login on the server to obtain an authentication cookie. If login was successful, it tries to redirect us to the URL we provided. If we attempt to access the upload API without first obtaining an authentication cookie, it returns a 401 response and directs us to authenticate ourselves with ClientLogin. """ for i in range(3): credentials = self.auth_function() try: auth_token = self._GetAuthToken(credentials[0], credentials[1]) except ClientLoginError, e: if e.reason == "BadAuthentication": print >>sys.stderr, "Invalid username or password." continue if e.reason == "CaptchaRequired": print >>sys.stderr, ( "Please go to\n" "https://www.google.com/accounts/DisplayUnlockCaptcha\n" "and verify you are a human. Then try again.") break if e.reason == "NotVerified": print >>sys.stderr, "Account not verified." break if e.reason == "TermsNotAgreed": print >>sys.stderr, "User has not agreed to TOS." break if e.reason == "AccountDeleted": print >>sys.stderr, "The user account has been deleted." break if e.reason == "AccountDisabled": print >>sys.stderr, "The user account has been disabled." break if e.reason == "ServiceDisabled": print >>sys.stderr, ("The user's access to the service has been " "disabled.") break if e.reason == "ServiceUnavailable": print >>sys.stderr, "The service is not available; try again later." break raise self._GetAuthCookie(auth_token) return def Send(self, request_path, payload=None, content_type="application/octet-stream", timeout=None, **kwargs): """Sends an RPC and returns the response. Args: request_path: The path to send the request to, eg /api/appversion/create. payload: The body of the request, or None to send an empty request. content_type: The Content-Type header to use. timeout: timeout in seconds; default None i.e. no timeout. (Note: for large requests on OS X, the timeout doesn't work right.) kwargs: Any keyword arguments are converted into query string parameters. Returns: The response body, as a string. """ # TODO: Don't require authentication. Let the server say # whether it is necessary. if not self.authenticated: self._Authenticate() old_timeout = socket.getdefaulttimeout() socket.setdefaulttimeout(timeout) try: tries = 0 while True: tries += 1 args = dict(kwargs) url = "http://%s%s" % (self.host, request_path) if args: url += "?" + urllib.urlencode(args) req = self._CreateRequest(url=url, data=payload) req.add_header("Content-Type", content_type) try: f = self.opener.open(req) response = f.read() f.close() return response except urllib2.HTTPError, e: if tries > 3: raise elif e.code == 401: self._Authenticate() ## elif e.code >= 500 and e.code < 600: ## # Server Error - try again. ## continue else: raise finally: socket.setdefaulttimeout(old_timeout) class HttpRpcServer(AbstractRpcServer): """Provides a simplified RPC-style interface for HTTP requests.""" def _Authenticate(self): """Save the cookie jar after authentication.""" super(HttpRpcServer, self)._Authenticate() if self.save_cookies: StatusUpdate("Saving authentication cookies to %s" % self.cookie_file) self.cookie_jar.save() def _GetOpener(self): """Returns an OpenerDirector that supports cookies and ignores redirects. Returns: A urllib2.OpenerDirector object. """ opener = urllib2.OpenerDirector() opener.add_handler(urllib2.ProxyHandler()) opener.add_handler(urllib2.UnknownHandler()) opener.add_handler(urllib2.HTTPHandler()) opener.add_handler(urllib2.HTTPDefaultErrorHandler()) opener.add_handler(urllib2.HTTPSHandler()) opener.add_handler(urllib2.HTTPErrorProcessor()) if self.save_cookies: self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies") self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file) if os.path.exists(self.cookie_file): try: self.cookie_jar.load() self.authenticated = True StatusUpdate("Loaded authentication cookies from %s" % self.cookie_file) except (cookielib.LoadError, IOError): # Failed to load cookies - just ignore them. pass else: # Create an empty cookie file with mode 600 fd = os.open(self.cookie_file, os.O_CREAT, 0600) os.close(fd) # Always chmod the cookie file os.chmod(self.cookie_file, 0600) else: # Don't save cookies across runs of update.py. self.cookie_jar = cookielib.CookieJar() opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar)) return opener parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]") parser.add_option("-y", "--assume_yes", action="store_true", dest="assume_yes", default=False, help="Assume that the answer to yes/no questions is 'yes'.") # Logging group = parser.add_option_group("Logging options") group.add_option("-q", "--quiet", action="store_const", const=0, dest="verbose", help="Print errors only.") group.add_option("-v", "--verbose", action="store_const", const=2, dest="verbose", default=1, help="Print info level logs (default).") group.add_option("--noisy", action="store_const", const=3, dest="verbose", help="Print all logs.") # Review server group = parser.add_option_group("Review server options") group.add_option("-s", "--server", action="store", dest="server", default="codereview.appspot.com", metavar="SERVER", help=("The server to upload to. The format is host[:port]. " "Defaults to 'codereview.appspot.com'.")) group.add_option("-e", "--email", action="store", dest="email", metavar="EMAIL", default=None, help="The username to use. Will prompt if omitted.") group.add_option("-H", "--host", action="store", dest="host", metavar="HOST", default=None, help="Overrides the Host header sent with all RPCs.") group.add_option("--no_cookies", action="store_false", dest="save_cookies", default=True, help="Do not save authentication cookies to local disk.") # Issue group = parser.add_option_group("Issue options") group.add_option("-d", "--description", action="store", dest="description", metavar="DESCRIPTION", default=None, help="Optional description when creating an issue.") group.add_option("-f", "--description_file", action="store", dest="description_file", metavar="DESCRIPTION_FILE", default=None, help="Optional path of a file that contains " "the description when creating an issue.") group.add_option("-r", "--reviewers", action="store", dest="reviewers", metavar="REVIEWERS", default=None, help="Add reviewers (comma separated email addresses).") group.add_option("--cc", action="store", dest="cc", metavar="CC", default=None, help="Add CC (comma separated email addresses).") # Upload options group = parser.add_option_group("Patch options") group.add_option("-m", "--message", action="store", dest="message", metavar="MESSAGE", default=None, help="A message to identify the patch. " "Will prompt if omitted.") group.add_option("-i", "--issue", type="int", action="store", metavar="ISSUE", default=None, help="Issue number to which to add. Defaults to new issue.") group.add_option("--download_base", action="store_true", dest="download_base", default=False, help="Base files will be downloaded by the server " "(side-by-side diffs may not work on files with CRs).") group.add_option("--rev", action="store", dest="revision", metavar="REV", default=None, help="Branch/tree/revision to diff against (used by DVCS).") group.add_option("--send_mail", action="store_true", dest="send_mail", default=False, help="Send notification email to reviewers.") def GetRpcServer(options): """Returns an instance of an AbstractRpcServer. Returns: A new AbstractRpcServer, on which RPC calls can be made. """ rpc_server_class = HttpRpcServer def GetUserCredentials(): """Prompts the user for a username and password.""" email = options.email if email is None: email = GetEmail("Email (login for uploading to %s)" % options.server) password = getpass.getpass("Password for %s: " % email) return (email, password) # If this is the dev_appserver, use fake authentication. host = (options.host or options.server).lower() if host == "localhost" or host.startswith("localhost:"): email = options.email if email is None: email = "test@example.com" logging.info("Using debug user %s. Override with --email" % email) server = rpc_server_class( options.server, lambda: (email, "password"), host_override=options.host, extra_headers={"Cookie": 'dev_appserver_login="%s:False"' % email}, save_cookies=options.save_cookies) # Don't try to talk to ClientLogin. server.authenticated = True return server return rpc_server_class(options.server, GetUserCredentials, host_override=options.host, save_cookies=options.save_cookies) def EncodeMultipartFormData(fields, files): """Encode form fields for multipart/form-data. Args: fields: A sequence of (name, value) elements for regular form fields. files: A sequence of (name, filename, value) elements for data to be uploaded as files. Returns: (content_type, body) ready for httplib.HTTP instance. Source: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306 """ BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-' CRLF = '\r\n' lines = [] for (key, value) in fields: lines.append('--' + BOUNDARY) lines.append('Content-Disposition: form-data; name="%s"' % key) lines.append('') lines.append(value) for (key, filename, value) in files: lines.append('--' + BOUNDARY) lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' % (key, filename)) lines.append('Content-Type: %s' % GetContentType(filename)) lines.append('') lines.append(value) lines.append('--' + BOUNDARY + '--') lines.append('') body = CRLF.join(lines) content_type = 'multipart/form-data; boundary=%s' % BOUNDARY return content_type, body def GetContentType(filename): """Helper to guess the content-type from the filename.""" return mimetypes.guess_type(filename)[0] or 'application/octet-stream' # Use a shell for subcommands on Windows to get a PATH search. use_shell = sys.platform.startswith("win") def RunShellWithReturnCode(command, print_output=False, universal_newlines=True): """Executes a command and returns the output from stdout and the return code. Args: command: Command to execute. print_output: If True, the output is printed to stdout. If False, both stdout and stderr are ignored. universal_newlines: Use universal_newlines flag (default: True). Returns: Tuple (output, return code) """ logging.info("Running %s", command) p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=use_shell, universal_newlines=universal_newlines) if print_output: output_array = [] while True: line = p.stdout.readline() if not line: break print line.strip("\n") output_array.append(line) output = "".join(output_array) else: output = p.stdout.read() p.wait() errout = p.stderr.read() if print_output and errout: print >>sys.stderr, errout p.stdout.close() p.stderr.close() return output, p.returncode def RunShell(command, silent_ok=False, universal_newlines=True, print_output=False): data, retcode = RunShellWithReturnCode(command, print_output, universal_newlines) if retcode: ErrorExit("Got error status from %s:\n%s" % (command, data)) if not silent_ok and not data: ErrorExit("No output from %s" % command) return data class VersionControlSystem(object): """Abstract base class providing an interface to the VCS.""" def __init__(self, options): """Constructor. Args: options: Command line options. """ self.options = options def GenerateDiff(self, args): """Return the current diff as a string. Args: args: Extra arguments to pass to the diff command. """ raise NotImplementedError( "abstract method -- subclass %s must override" % self.__class__) def GetUnknownFiles(self): """Return a list of files unknown to the VCS.""" raise NotImplementedError( "abstract method -- subclass %s must override" % self.__class__) def CheckForUnknownFiles(self): """Show an "are you sure?" prompt if there are unknown files.""" unknown_files = self.GetUnknownFiles() if unknown_files: print "The following files are not added to version control:" for line in unknown_files: print line prompt = "Are you sure to continue?(y/N) " answer = raw_input(prompt).strip() if answer != "y": ErrorExit("User aborted") def GetBaseFile(self, filename): """Get the content of the upstream version of a file. Returns: A tuple (base_content, new_content, is_binary, status) base_content: The contents of the base file. new_content: For text files, this is empty. For binary files, this is the contents of the new file, since the diff output won't contain information to reconstruct the current file. is_binary: True iff the file is binary. status: The status of the file. """ raise NotImplementedError( "abstract method -- subclass %s must override" % self.__class__) def GetBaseFiles(self, diff): """Helper that calls GetBase file for each file in the patch. Returns: A dictionary that maps from filename to GetBaseFile's tuple. Filenames are retrieved based on lines that start with "Index:" or "Property changes on:". """ files = {} for line in diff.splitlines(True): if line.startswith('Index:') or line.startswith('Property changes on:'): unused, filename = line.split(':', 1) # On Windows if a file has property changes its filename uses '\' # instead of '/'. filename = filename.strip().replace('\\', '/') files[filename] = self.GetBaseFile(filename) return files def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options, files): """Uploads the base files (and if necessary, the current ones as well).""" def UploadFile(filename, file_id, content, is_binary, status, is_base): """Uploads a file to the server.""" file_too_large = False if is_base: type = "base" else: type = "current" if len(content) > MAX_UPLOAD_SIZE: print ("Not uploading the %s file for %s because it's too large." % (type, filename)) file_too_large = True content = "" checksum = md5.new(content).hexdigest() if options.verbose > 0 and not file_too_large: print "Uploading %s file for %s" % (type, filename) url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset), file_id) form_fields = [("filename", filename), ("status", status), ("checksum", checksum), ("is_binary", str(is_binary)), ("is_current", str(not is_base)), ] if file_too_large: form_fields.append(("file_too_large", "1")) if options.email: form_fields.append(("user", options.email)) ctype, body = EncodeMultipartFormData(form_fields, [("data", filename, content)]) response_body = rpc_server.Send(url, body, content_type=ctype) if not response_body.startswith("OK"): StatusUpdate(" --> %s" % response_body) sys.exit(1) patches = dict() [patches.setdefault(v, k) for k, v in patch_list] for filename in patches.keys(): base_content, new_content, is_binary, status = files[filename] file_id_str = patches.get(filename) if file_id_str.find("nobase") != -1: base_content = None file_id_str = file_id_str[file_id_str.rfind("_") + 1:] file_id = int(file_id_str) if base_content != None: UploadFile(filename, file_id, base_content, is_binary, status, True) if new_content != None: UploadFile(filename, file_id, new_content, is_binary, status, False) def IsImage(self, filename): """Returns true if the filename has an image extension.""" mimetype = mimetypes.guess_type(filename)[0] if not mimetype: return False return mimetype.startswith("image/") class SubversionVCS(VersionControlSystem): """Implementation of the VersionControlSystem interface for Subversion.""" def __init__(self, options): super(SubversionVCS, self).__init__(options) if self.options.revision: match = re.match(r"(\d+)(:(\d+))?", self.options.revision) if not match: ErrorExit("Invalid Subversion revision %s." % self.options.revision) self.rev_start = match.group(1) self.rev_end = match.group(3) else: self.rev_start = self.rev_end = None # Cache output from "svn list -r REVNO dirname". # Keys: dirname, Values: 2-tuple (ouput for start rev and end rev). self.svnls_cache = {} # SVN base URL is required to fetch files deleted in an older revision. # Result is cached to not guess it over and over again in GetBaseFile(). required = self.options.download_base or self.options.revision is not None self.svn_base = self._GuessBase(required) def GuessBase(self, required): """Wrapper for _GuessBase.""" return self.svn_base def _GuessBase(self, required): """Returns the SVN base URL. Args: required: If true, exits if the url can't be guessed, otherwise None is returned. """ info = RunShell(["svn", "info"]) for line in info.splitlines(): words = line.split() if len(words) == 2 and words[0] == "URL:": url = words[1] scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) username, netloc = urllib.splituser(netloc) if username: logging.info("Removed username from base URL") if netloc.endswith("svn.python.org"): if netloc == "svn.python.org": if path.startswith("/projects/"): path = path[9:] elif netloc != "pythondev@svn.python.org": ErrorExit("Unrecognized Python URL: %s" % url) base = "http://svn.python.org/view/*checkout*%s/" % path logging.info("Guessed Python base = %s", base) elif netloc.endswith("svn.collab.net"): if path.startswith("/repos/"): path = path[6:] base = "http://svn.collab.net/viewvc/*checkout*%s/" % path logging.info("Guessed CollabNet base = %s", base) elif netloc.endswith(".googlecode.com"): path = path + "/" base = urlparse.urlunparse(("http", netloc, path, params, query, fragment)) logging.info("Guessed Google Code base = %s", base) else: path = path + "/" base = urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) logging.info("Guessed base = %s", base) return base if required: ErrorExit("Can't find URL in output from svn info") return None def GenerateDiff(self, args): cmd = ["svn", "diff"] if self.options.revision: cmd += ["-r", self.options.revision] cmd.extend(args) data = RunShell(cmd) count = 0 for line in data.splitlines(): if line.startswith("Index:") or line.startswith("Property changes on:"): count += 1 logging.info(line) if not count: ErrorExit("No valid patches found in output from svn diff") return data def _CollapseKeywords(self, content, keyword_str): """Collapses SVN keywords.""" # svn cat translates keywords but svn diff doesn't. As a result of this # behavior patching.PatchChunks() fails with a chunk mismatch error. # This part was originally written by the Review Board development team # who had the same problem (http://reviews.review-board.org/r/276/). # Mapping of keywords to known aliases svn_keywords = { # Standard keywords 'Date': ['Date', 'LastChangedDate'], 'Revision': ['Revision', 'LastChangedRevision', 'Rev'], 'Author': ['Author', 'LastChangedBy'], 'HeadURL': ['HeadURL', 'URL'], 'Id': ['Id'], # Aliases 'LastChangedDate': ['LastChangedDate', 'Date'], 'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'], 'LastChangedBy': ['LastChangedBy', 'Author'], 'URL': ['URL', 'HeadURL'], } def repl(m): if m.group(2): return "$%s::%s$" % (m.group(1), " " * len(m.group(3))) return "$%s$" % m.group(1) keywords = [keyword for name in keyword_str.split(" ") for keyword in svn_keywords.get(name, [])] return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content) def GetUnknownFiles(self): status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True) unknown_files = [] for line in status.split("\n"): if line and line[0] == "?": unknown_files.append(line) return unknown_files def ReadFile(self, filename): """Returns the contents of a file.""" file = open(filename, 'rb') result = "" try: result = file.read() finally: file.close() return result def GetStatus(self, filename): """Returns the status of a file.""" if not self.options.revision: status = RunShell(["svn", "status", "--ignore-externals", filename]) if not status: ErrorExit("svn status returned no output for %s" % filename) status_lines = status.splitlines() # If file is in a cl, the output will begin with # "\n--- Changelist 'cl_name':\n". See # http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt if (len(status_lines) == 3 and not status_lines[0] and status_lines[1].startswith("--- Changelist")): status = status_lines[2] else: status = status_lines[0] # If we have a revision to diff against we need to run "svn list" # for the old and the new revision and compare the results to get # the correct status for a file. else: dirname, relfilename = os.path.split(filename) if dirname not in self.svnls_cache: cmd = ["svn", "list", "-r", self.rev_start, dirname or "."] out, returncode = RunShellWithReturnCode(cmd) if returncode: ErrorExit("Failed to get status for %s." % filename) old_files = out.splitlines() args = ["svn", "list"] if self.rev_end: args += ["-r", self.rev_end] cmd = args + [dirname or "."] out, returncode = RunShellWithReturnCode(cmd) if returncode: ErrorExit("Failed to run command %s" % cmd) self.svnls_cache[dirname] = (old_files, out.splitlines()) old_files, new_files = self.svnls_cache[dirname] if relfilename in old_files and relfilename not in new_files: status = "D " elif relfilename in old_files and relfilename in new_files: status = "M " else: status = "A " return status def GetBaseFile(self, filename): status = self.GetStatus(filename) base_content = None new_content = None # If a file is copied its status will be "A +", which signifies # "addition-with-history". See "svn st" for more information. We need to # upload the original file or else diff parsing will fail if the file was # edited. if status[0] == "A" and status[3] != "+": # We'll need to upload the new content if we're adding a binary file # since diff's output won't contain it. mimetype = RunShell(["svn", "propget", "svn:mime-type", filename], silent_ok=True) base_content = "" is_binary = mimetype and not mimetype.startswith("text/") if is_binary and self.IsImage(filename): new_content = self.ReadFile(filename) elif (status[0] in ("M", "D", "R") or (status[0] == "A" and status[3] == "+") or # Copied file. (status[0] == " " and status[1] == "M")): # Property change. args = [] if self.options.revision: url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) else: # Don't change filename, it's needed later. url = filename args += ["-r", "BASE"] cmd = ["svn"] + args + ["propget", "svn:mime-type", url] mimetype, returncode = RunShellWithReturnCode(cmd) if returncode: # File does not exist in the requested revision. # Reset mimetype, it contains an error message. mimetype = "" get_base = False is_binary = mimetype and not mimetype.startswith("text/") if status[0] == " ": # Empty base content just to force an upload. base_content = "" elif is_binary: if self.IsImage(filename): get_base = True if status[0] == "M": if not self.rev_end: new_content = self.ReadFile(filename) else: url = "%s/%s@%s" % (self.svn_base, filename, self.rev_end) new_content = RunShell(["svn", "cat", url], universal_newlines=True, silent_ok=True) else: base_content = "" else: get_base = True if get_base: if is_binary: universal_newlines = False else: universal_newlines = True if self.rev_start: # "svn cat -r REV delete_file.txt" doesn't work. cat requires # the full URL with "@REV" appended instead of using "-r" option. url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) base_content = RunShell(["svn", "cat", url], universal_newlines=universal_newlines, silent_ok=True) else: base_content = RunShell(["svn", "cat", filename], universal_newlines=universal_newlines, silent_ok=True) if not is_binary: args = [] if self.rev_start: url = "%s/%s@%s" % (self.svn_base, filename, self.rev_start) else: url = filename args += ["-r", "BASE"] cmd = ["svn"] + args + ["propget", "svn:keywords", url] keywords, returncode = RunShellWithReturnCode(cmd) if keywords and not returncode: base_content = self._CollapseKeywords(base_content, keywords) else: StatusUpdate("svn status returned unexpected output: %s" % status) sys.exit(1) return base_content, new_content, is_binary, status[0:5] class GitVCS(VersionControlSystem): """Implementation of the VersionControlSystem interface for Git.""" def __init__(self, options): super(GitVCS, self).__init__(options) # Map of filename -> hash of base file. self.base_hashes = {} def GenerateDiff(self, extra_args): # This is more complicated than svn's GenerateDiff because we must convert # the diff output to include an svn-style "Index:" line as well as record # the hashes of the base files, so we can upload them along with our diff. if self.options.revision: extra_args = [self.options.revision] + extra_args gitdiff = RunShell(["git", "diff", "--full-index"] + extra_args) svndiff = [] filecount = 0 filename = None for line in gitdiff.splitlines(): match = re.match(r"diff --git a/(.*) b/.*$", line) if match: filecount += 1 filename = match.group(1) svndiff.append("Index: %s\n" % filename) else: # The "index" line in a git diff looks like this (long hashes elided): # index 82c0d44..b2cee3f 100755 # We want to save the left hash, as that identifies the base file. match = re.match(r"index (\w+)\.\.", line) if match: self.base_hashes[filename] = match.group(1) svndiff.append(line + "\n") if not filecount: ErrorExit("No valid patches found in output from git diff") return "".join(svndiff) def GetUnknownFiles(self): status = RunShell(["git", "ls-files", "--exclude-standard", "--others"], silent_ok=True) return status.splitlines() def GetBaseFile(self, filename): hash = self.base_hashes[filename] base_content = None new_content = None is_binary = False if hash == "0" * 40: # All-zero hash indicates no base file. status = "A" base_content = "" else: status = "M" base_content, returncode = RunShellWithReturnCode(["git", "show", hash]) if returncode: ErrorExit("Got error status from 'git show %s'" % hash) return (base_content, new_content, is_binary, status) class MercurialVCS(VersionControlSystem): """Implementation of the VersionControlSystem interface for Mercurial.""" def __init__(self, options, repo_dir): super(MercurialVCS, self).__init__(options) # Absolute path to repository (we can be in a subdir) self.repo_dir = os.path.normpath(repo_dir) # Compute the subdir cwd = os.path.normpath(os.getcwd()) assert cwd.startswith(self.repo_dir) self.subdir = cwd[len(self.repo_dir):].lstrip(r"\/") if self.options.revision: self.base_rev = self.options.revision else: self.base_rev = RunShell(["hg", "parent", "-q"]).split(':')[1].strip() def _GetRelPath(self, filename): """Get relative path of a file according to the current directory, given its logical path in the repo.""" assert filename.startswith(self.subdir), filename return filename[len(self.subdir):].lstrip(r"\/") def GenerateDiff(self, extra_args): # If no file specified, restrict to the current subdir extra_args = extra_args or ["."] cmd = ["hg", "diff", "--git", "-r", self.base_rev] + extra_args data = RunShell(cmd, silent_ok=True) svndiff = [] filecount = 0 for line in data.splitlines(): m = re.match("diff --git a/(\S+) b/(\S+)", line) if m: # Modify line to make it look like as it comes from svn diff. # With this modification no changes on the server side are required # to make upload.py work with Mercurial repos. # NOTE: for proper handling of moved/copied files, we have to use # the second filename. filename = m.group(2) svndiff.append("Index: %s" % filename) svndiff.append("=" * 67) filecount += 1 logging.info(line) else: svndiff.append(line) if not filecount: ErrorExit("No valid patches found in output from hg diff") return "\n".join(svndiff) + "\n" def GetUnknownFiles(self): """Return a list of files unknown to the VCS.""" args = [] status = RunShell(["hg", "status", "--rev", self.base_rev, "-u", "."], silent_ok=True) unknown_files = [] for line in status.splitlines(): st, fn = line.split(" ", 1) if st == "?": unknown_files.append(fn) return unknown_files def GetBaseFile(self, filename): # "hg status" and "hg cat" both take a path relative to the current subdir # rather than to the repo root, but "hg diff" has given us the full path # to the repo root. base_content = "" new_content = None is_binary = False oldrelpath = relpath = self._GetRelPath(filename) # "hg status -C" returns two lines for moved/copied files, one otherwise out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath]) out = out.splitlines() # HACK: strip error message about missing file/directory if it isn't in # the working copy if out[0].startswith('%s: ' % relpath): out = out[1:] if len(out) > 1: # Moved/copied => considered as modified, use old filename to # retrieve base contents oldrelpath = out[1].strip() status = "M" else: status, _ = out[0].split(' ', 1) if status != "A": base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath], silent_ok=True) is_binary = "\0" in base_content # Mercurial's heuristic if status != "R": new_content = open(relpath, "rb").read() is_binary = is_binary or "\0" in new_content if is_binary and base_content: # Fetch again without converting newlines base_content = RunShell(["hg", "cat", "-r", self.base_rev, oldrelpath], silent_ok=True, universal_newlines=False) if not is_binary or not self.IsImage(relpath): new_content = None return base_content, new_content, is_binary, status # NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync. def SplitPatch(data): """Splits a patch into separate pieces for each file. Args: data: A string containing the output of svn diff. Returns: A list of 2-tuple (filename, text) where text is the svn diff output pertaining to filename. """ patches = [] filename = None diff = [] for line in data.splitlines(True): new_filename = None if line.startswith('Index:'): unused, new_filename = line.split(':', 1) new_filename = new_filename.strip() elif line.startswith('Property changes on:'): unused, temp_filename = line.split(':', 1) # When a file is modified, paths use '/' between directories, however # when a property is modified '\' is used on Windows. Make them the same # otherwise the file shows up twice. temp_filename = temp_filename.strip().replace('\\', '/') if temp_filename != filename: # File has property changes but no modifications, create a new diff. new_filename = temp_filename if new_filename: if filename and diff: patches.append((filename, ''.join(diff))) filename = new_filename diff = [line] continue if diff is not None: diff.append(line) if filename and diff: patches.append((filename, ''.join(diff))) return patches def UploadSeparatePatches(issue, rpc_server, patchset, data, options): """Uploads a separate patch for each file in the diff output. Returns a list of [patch_key, filename] for each file. """ patches = SplitPatch(data) rv = [] for patch in patches: if len(patch[1]) > MAX_UPLOAD_SIZE: print ("Not uploading the patch for " + patch[0] + " because the file is too large.") continue form_fields = [("filename", patch[0])] if not options.download_base: form_fields.append(("content_upload", "1")) files = [("data", "data.diff", patch[1])] ctype, body = EncodeMultipartFormData(form_fields, files) url = "/%d/upload_patch/%d" % (int(issue), int(patchset)) print "Uploading patch for " + patch[0] response_body = rpc_server.Send(url, body, content_type=ctype) lines = response_body.splitlines() if not lines or lines[0] != "OK": StatusUpdate(" --> %s" % response_body) sys.exit(1) rv.append([lines[1], patch[0]]) return rv def GuessVCS(options): """Helper to guess the version control system. This examines the current directory, guesses which VersionControlSystem we're using, and returns an instance of the appropriate class. Exit with an error if we can't figure it out. Returns: A VersionControlSystem instance. Exits if the VCS can't be guessed. """ # Mercurial has a command to get the base directory of a repository # Try running it, but don't die if we don't have hg installed. # NOTE: we try Mercurial first as it can sit on top of an SVN working copy. try: out, returncode = RunShellWithReturnCode(["hg", "root"]) if returncode == 0: return MercurialVCS(options, out.strip()) except OSError, (errno, message): if errno != 2: # ENOENT -- they don't have hg installed. raise # Subversion has a .svn in all working directories. if os.path.isdir('.svn'): logging.info("Guessed VCS = Subversion") return SubversionVCS(options) # Git has a command to test if you're in a git tree. # Try running it, but don't die if we don't have git installed. try: out, returncode = RunShellWithReturnCode(["git", "rev-parse", "--is-inside-work-tree"]) if returncode == 0: return GitVCS(options) except OSError, (errno, message): if errno != 2: # ENOENT -- they don't have git installed. raise ErrorExit(("Could not guess version control system. " "Are you in a working copy directory?")) def RealMain(argv, data=None): """The real main function. Args: argv: Command line arguments. data: Diff contents. If None (default) the diff is generated by the VersionControlSystem implementation returned by GuessVCS(). Returns: A 2-tuple (issue id, patchset id). The patchset id is None if the base files are not uploaded by this script (applies only to SVN checkouts). """ logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:" "%(lineno)s %(message)s ")) os.environ['LC_ALL'] = 'C' options, args = parser.parse_args(argv[1:]) global verbosity verbosity = options.verbose if verbosity >= 3: logging.getLogger().setLevel(logging.DEBUG) elif verbosity >= 2: logging.getLogger().setLevel(logging.INFO) vcs = GuessVCS(options) if isinstance(vcs, SubversionVCS): # base field is only allowed for Subversion. # Note: Fetching base files may become deprecated in future releases. base = vcs.GuessBase(options.download_base) else: base = None if not base and options.download_base: options.download_base = True logging.info("Enabled upload of base file") if not options.assume_yes: vcs.CheckForUnknownFiles() if data is None: data = vcs.GenerateDiff(args) files = vcs.GetBaseFiles(data) if verbosity >= 1: print "Upload server:", options.server, "(change with -s/--server)" if options.issue: prompt = "Message describing this patch set: " else: prompt = "New issue subject: " message = options.message or raw_input(prompt).strip() if not message: ErrorExit("A non-empty message is required") rpc_server = GetRpcServer(options) form_fields = [("subject", message)] if base: form_fields.append(("base", base)) if options.issue: form_fields.append(("issue", str(options.issue))) if options.email: form_fields.append(("user", options.email)) if options.reviewers: for reviewer in options.reviewers.split(','): if "@" in reviewer and not reviewer.split("@")[1].count(".") == 1: ErrorExit("Invalid email address: %s" % reviewer) form_fields.append(("reviewers", options.reviewers)) if options.cc: for cc in options.cc.split(','): if "@" in cc and not cc.split("@")[1].count(".") == 1: ErrorExit("Invalid email address: %s" % cc) form_fields.append(("cc", options.cc)) description = options.description if options.description_file: if options.description: ErrorExit("Can't specify description and description_file") file = open(options.description_file, 'r') description = file.read() file.close() if description: form_fields.append(("description", description)) # Send a hash of all the base file so the server can determine if a copy # already exists in an earlier patchset. base_hashes = "" for file, info in files.iteritems(): if not info[0] is None: checksum = md5.new(info[0]).hexdigest() if base_hashes: base_hashes += "|" base_hashes += checksum + ":" + file form_fields.append(("base_hashes", base_hashes)) # If we're uploading base files, don't send the email before the uploads, so # that it contains the file status. if options.send_mail and options.download_base: form_fields.append(("send_mail", "1")) if not options.download_base: form_fields.append(("content_upload", "1")) if len(data) > MAX_UPLOAD_SIZE: print "Patch is large, so uploading file patches separately." uploaded_diff_file = [] form_fields.append(("separate_patches", "1")) else: uploaded_diff_file = [("data", "data.diff", data)] ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file) response_body = rpc_server.Send("/upload", body, content_type=ctype) patchset = None if not options.download_base or not uploaded_diff_file: lines = response_body.splitlines() if len(lines) >= 2: msg = lines[0] patchset = lines[1].strip() patches = [x.split(" ", 1) for x in lines[2:]] else: msg = response_body else: msg = response_body StatusUpdate(msg) if not response_body.startswith("Issue created.") and \ not response_body.startswith("Issue updated."): sys.exit(0) issue = msg[msg.rfind("/")+1:] if not uploaded_diff_file: result = UploadSeparatePatches(issue, rpc_server, patchset, data, options) if not options.download_base: patches = result if not options.download_base: vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files) if options.send_mail: rpc_server.Send("/" + issue + "/mail", payload="") return issue, patchset def main(): try: RealMain(sys.argv) except KeyboardInterrupt: print StatusUpdate("Interrupted.") sys.exit(1) if __name__ == "__main__": main()
bsd-3-clause
etos/django
django/core/management/commands/test.py
25
2292
import sys from django.conf import settings from django.core.management.base import BaseCommand from django.test.utils import get_runner class Command(BaseCommand): help = 'Discover and run tests in the specified modules or the current directory.' # DiscoverRunner runs the checks after databases are set up. requires_system_checks = False def __init__(self): self.test_runner = None super().__init__() def run_from_argv(self, argv): """ Pre-parse the command line to extract the value of the --testrunner option. This allows a test runner to define additional command line arguments. """ option = '--testrunner=' for arg in argv[2:]: if arg.startswith(option): self.test_runner = arg[len(option):] break super().run_from_argv(argv) def add_arguments(self, parser): parser.add_argument( 'args', metavar='test_label', nargs='*', help='Module paths to test; can be modulename, modulename.TestCase or modulename.TestCase.test_method' ) parser.add_argument( '--noinput', '--no-input', action='store_false', dest='interactive', help='Tells Django to NOT prompt the user for input of any kind.', ) parser.add_argument( '--failfast', action='store_true', dest='failfast', help='Tells Django to stop running the test suite after first failed test.', ) parser.add_argument( '--testrunner', action='store', dest='testrunner', help='Tells Django to use specified test runner class instead of ' 'the one specified by the TEST_RUNNER setting.', ) test_runner_class = get_runner(settings, self.test_runner) if hasattr(test_runner_class, 'add_arguments'): test_runner_class.add_arguments(parser) def handle(self, *test_labels, **options): from django.conf import settings from django.test.utils import get_runner TestRunner = get_runner(settings, options['testrunner']) test_runner = TestRunner(**options) failures = test_runner.run_tests(test_labels) if failures: sys.exit(1)
bsd-3-clause
ruud-v-a/servo
python/mach/mach/mixin/process.py
124
6762
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # This module provides mixins to perform process execution. from __future__ import absolute_import, unicode_literals import logging import os import subprocess import sys from mozprocess.processhandler import ProcessHandlerMixin from .logging import LoggingMixin # Perform detection of operating system environment. This is used by command # execution. We only do this once to save redundancy. Yes, this can fail module # loading. That is arguably OK. if 'SHELL' in os.environ: _current_shell = os.environ['SHELL'] elif 'MOZILLABUILD' in os.environ: _current_shell = os.environ['MOZILLABUILD'] + '/msys/bin/sh.exe' elif 'COMSPEC' in os.environ: _current_shell = os.environ['COMSPEC'] else: raise Exception('Could not detect environment shell!') _in_msys = False if os.environ.get('MSYSTEM', None) == 'MINGW32': _in_msys = True if not _current_shell.lower().endswith('.exe'): _current_shell += '.exe' class ProcessExecutionMixin(LoggingMixin): """Mix-in that provides process execution functionality.""" def run_process(self, args=None, cwd=None, append_env=None, explicit_env=None, log_name=None, log_level=logging.INFO, line_handler=None, require_unix_environment=False, ensure_exit_code=0, ignore_children=False, pass_thru=False): """Runs a single process to completion. Takes a list of arguments to run where the first item is the executable. Runs the command in the specified directory and with optional environment variables. append_env -- Dict of environment variables to append to the current set of environment variables. explicit_env -- Dict of environment variables to set for the new process. Any existing environment variables will be ignored. require_unix_environment if True will ensure the command is executed within a UNIX environment. Basically, if we are on Windows, it will execute the command via an appropriate UNIX-like shell. ignore_children is proxied to mozprocess's ignore_children. ensure_exit_code is used to ensure the exit code of a process matches what is expected. If it is an integer, we raise an Exception if the exit code does not match this value. If it is True, we ensure the exit code is 0. If it is False, we don't perform any exit code validation. pass_thru is a special execution mode where the child process inherits this process's standard file handles (stdin, stdout, stderr) as well as additional file descriptors. It should be used for interactive processes where buffering from mozprocess could be an issue. pass_thru does not use mozprocess. Therefore, arguments like log_name, line_handler, and ignore_children have no effect. """ args = self._normalize_command(args, require_unix_environment) self.log(logging.INFO, 'new_process', {'args': args}, ' '.join(args)) def handleLine(line): # Converts str to unicode on Python 2 and bytes to str on Python 3. if isinstance(line, bytes): line = line.decode(sys.stdout.encoding or 'utf-8', 'replace') if line_handler: line_handler(line) if not log_name: return self.log(log_level, log_name, {'line': line.rstrip()}, '{line}') use_env = {} if explicit_env: use_env = explicit_env else: use_env.update(os.environ) if append_env: use_env.update(append_env) self.log(logging.DEBUG, 'process', {'env': use_env}, 'Environment: {env}') # There is a bug in subprocess where it doesn't like unicode types in # environment variables. Here, ensure all unicode are converted to # binary. utf-8 is our globally assumed default. If the caller doesn't # want UTF-8, they shouldn't pass in a unicode instance. normalized_env = {} for k, v in use_env.items(): if isinstance(k, unicode): k = k.encode('utf-8', 'strict') if isinstance(v, unicode): v = v.encode('utf-8', 'strict') normalized_env[k] = v use_env = normalized_env if pass_thru: proc = subprocess.Popen(args, cwd=cwd, env=use_env) status = None # Leave it to the subprocess to handle Ctrl+C. If it terminates as # a result of Ctrl+C, proc.wait() will return a status code, and, # we get out of the loop. If it doesn't, like e.g. gdb, we continue # waiting. while status is None: try: status = proc.wait() except KeyboardInterrupt: pass else: p = ProcessHandlerMixin(args, cwd=cwd, env=use_env, processOutputLine=[handleLine], universal_newlines=True, ignore_children=ignore_children) p.run() p.processOutput() status = p.wait() if ensure_exit_code is False: return status if ensure_exit_code is True: ensure_exit_code = 0 if status != ensure_exit_code: raise Exception('Process executed with non-0 exit code: %s' % args) return status def _normalize_command(self, args, require_unix_environment): """Adjust command arguments to run in the necessary environment. This exists mainly to facilitate execution of programs requiring a *NIX shell when running on Windows. The caller specifies whether a shell environment is required. If it is and we are running on Windows but aren't running in the UNIX-like msys environment, then we rewrite the command to execute via a shell. """ assert isinstance(args, list) and len(args) if not require_unix_environment or not _in_msys: return args # Always munge Windows-style into Unix style for the command. prog = args[0].replace('\\', '/') # PyMake removes the C: prefix. But, things seem to work here # without it. Not sure what that's about. # We run everything through the msys shell. We need to use # '-c' and pass all the arguments as one argument because that is # how sh works. cline = subprocess.list2cmdline([prog] + args[1:]) return [_current_shell, '-c', cline]
mpl-2.0
alexthered/kienhoc-platform
common/djangoapps/session_inactivity_timeout/middleware.py
228
1981
""" Middleware to auto-expire inactive sessions after N seconds, which is configurable in settings. To enable this feature, set in a settings.py: SESSION_INACTIVITY_TIMEOUT_IN_SECS = 300 This was taken from StackOverflow (http://stackoverflow.com/questions/14830669/how-to-expire-django-session-in-5minutes) """ from datetime import datetime, timedelta from django.conf import settings from django.contrib import auth LAST_TOUCH_KEYNAME = 'SessionInactivityTimeout:last_touch' class SessionInactivityTimeout(object): """ Middleware class to keep track of activity on a given session """ def process_request(self, request): """ Standard entry point for processing requests in Django """ if not hasattr(request, "user") or not request.user.is_authenticated(): #Can't log out if not logged in return timeout_in_seconds = getattr(settings, "SESSION_INACTIVITY_TIMEOUT_IN_SECONDS", None) # Do we have this feature enabled? if timeout_in_seconds: # what time is it now? utc_now = datetime.utcnow() # Get the last time user made a request to server, which is stored in session data last_touch = request.session.get(LAST_TOUCH_KEYNAME) # have we stored a 'last visited' in session? NOTE: first time access after login # this key will not be present in the session data if last_touch: # compute the delta since last time user came to the server time_since_last_activity = utc_now - last_touch # did we exceed the timeout limit? if time_since_last_activity > timedelta(seconds=timeout_in_seconds): # yes? Then log the user out del request.session[LAST_TOUCH_KEYNAME] auth.logout(request) return request.session[LAST_TOUCH_KEYNAME] = utc_now
agpl-3.0
races1986/SafeLanguage
CEM/pywikibot/exceptions.py
5
3668
# -*- coding: utf-8 -*- """ Exception classes used throughout the framework. """ # # (C) Pywikipedia bot team, 2008 # # Distributed under the terms of the MIT license. # __version__ = '$Id: 9f2eb10c50ec43d917bd9b4af68f85dd9afdb0c9 $' # Application specific imports import config # TODO: not certain that all of them will be needed. class Error(Exception): """Wikipedia error""" class PageRelatedError(Error): """Abstract Exception, used when the Exception concerns a particular Page, and when a generic message can be written once for all""" # Preformated UNICODE message where the page title will be inserted # Override this in subclasses. # u"Oh noes! Page %s is too funky, we should not delete it ;(" message = None class NoUsername(Error): """Username is not in user-config.py""" class NoPage(PageRelatedError): """Page does not exist""" message = u"Page %s doesn't exist." class NoSuchSite(Error): """Site does not exist""" class IsRedirectPage(PageRelatedError): """Page is a redirect page""" message = u"Page %s is a redirect page." class IsNotRedirectPage(PageRelatedError): """Page is not a redirect page""" message = u"Page %s is not a redirect page." class CircularRedirect(Error): """Page is a circular redirect Exception argument is the redirect target; this may be the same title as this page or a different title (in which case the target page directly or indirectly redirects back to this one) """ class InvalidTitle(Error): """Invalid page title""" class LockedPage(PageRelatedError): """Page is locked""" message = u"Page %s is locked." class SectionError(Error): """The section specified by # does not exist""" class PageNotSaved(Error): """Saving the page has failed""" class EditConflict(PageNotSaved): """There has been an edit conflict while uploading the page""" class SpamfilterError(PageNotSaved): """Saving the page has failed because the MediaWiki spam filter detected a blacklisted URL.""" def __init__(self, arg): super(SpamfilterError, self).__init__(u'MediaWiki spam filter has been triggered') self.url = arg self.args = arg, class LongPageError(PageNotSaved): """Saving the page has failed because it is too long.""" def __init__(self, arg, arg2): self.length = arg self.limit = arg2, class MaxTriesExceededError(PageNotSaved): """Saving the page has failed because the maximum number of attempts has been reached""" class ServerError(Error): """Got unexpected server response""" class Server504Error(Error): """Server timed out with http 504 code""" class BadTitle(Error): """Server responded with BadTitle.""" # UserBlocked exceptions should in general not be caught. If the bot has # been blocked, the bot operator should address the reason for the block # before continuing. class UserBlocked(Error): """Your username or IP has been blocked""" class PageNotFound(Error): """Page not found in list""" class CaptchaError(Error): """Captcha is asked and config.solve_captcha == False.""" class NoHash(Error): """ The APIs don't return any Hash for the image searched. Really Strange, better to raise an error. """ class UploadWarning(Error): """Upload failed with a warning message (passed as the argument).""" class AutoblockUser(Error): """ The class AutoblockUserError is an exception that is raised whenever an action is requested on a virtual autoblock user that's not available for him (i.e. roughly everything except unblock). """ class UserActionRefuse(Error): pass
epl-1.0
AnthonyMRios/pymetamap
pymetamap/SubprocessBackendLite.py
1
5539
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import subprocess import tempfile from .MetaMapLite import MetaMapLite from .ConceptLite import CorpusLite class SubprocessBackendLite(MetaMapLite): def __init__(self, metamap_home): """ Interface to MetaMap using subprocess. This creates a command line call to a specified metamap process. """ MetaMapLite.__init__(self, metamap_home=metamap_home) def extract_concepts(self, sentences=None, ids=None, filename=None, restrict_to_sts=None, restrict_to_sources=None): """ extract_concepts takes a list of sentences and ids(optional) then returns a list of Concept objects extracted via MetaMapLite. Supported Options: Restrict to Semantic Types --restrict_to_sts Restrict to Sources --restrict_to_sources For information about the available options visit http://metamap.nlm.nih.gov/. Note: If an error is encountered the process will be closed and whatever was processed, if anything, will be returned along with the error found. """ if (sentences is not None and filename is not None) or \ (sentences is None and filename is None): raise ValueError("You must either pass a list of sentences " "OR a filename.") input_file = None if sentences is not None: input_file = tempfile.NamedTemporaryFile(mode="wb", delete=False, suffix='.mmi') else: input_file = open(filename, 'r') # Unlike MetaMap, MetaMapLite does not take an output filename as a parameter. # It creates a new output file at same location as "input_file" with the default file extension ".mmi". # output_file = tempfile.NamedTemporaryFile(mode="r", delete=False) output_file_name = None error = None try: if sentences is not None: if ids is not None: for identifier, sentence in zip(ids, sentences): input_file.write('{0!r}|{1}\n'.format(identifier, sentence).encode('utf8')) else: for sentence in sentences: input_file.write('{0!r}\n'.format(sentence).encode('utf8')) input_file.flush() input_file.close() command = ["bash", os.path.join(self.metamap_home, "metamaplite.sh")] if restrict_to_sts: if isinstance(restrict_to_sts, str): restrict_to_sts = [restrict_to_sts] if len(restrict_to_sts) > 0: command.append('--restrict_to_sts={}'.format(str(','.join(restrict_to_sts)))) #command.append(str(','.join(restrict_to_sts))) if restrict_to_sources: if isinstance(restrict_to_sources, str): restrict_to_sources = [restrict_to_sources] if len(restrict_to_sources) > 0: command.append('--restrict_to_sources') command.append(str(','.join(restrict_to_sources))) if ids is not None: command.append('--inputformat=sldiwi') command.append(input_file.name) command.append('--overwrite') #command.append('--indexdir={}data/ivf/2020AA/USAbase'.format(self.metamap_home)) #command.append('--specialtermsfile={}data/specialterms.txt'.format(self.metamap_home)) # command.append(output_file.name) output_file_name, file_extension = os.path.splitext(input_file.name) output_file_name += "." + "mmi" output_file_name, file_extension = os.path.splitext(input_file.name) output_file_name += "." + "mmi" # output = str(output_file.read()) metamap_process = subprocess.Popen(command, stdout=subprocess.PIPE) while metamap_process.poll() is None: stdout = str(metamap_process.stdout.readline()) if 'ERROR' in stdout: metamap_process.terminate() error = stdout.rstrip() # print("input file name: {0}".format(input_file.name)) output_file_name, file_extension = os.path.splitext(input_file.name) output_file_name += "." + "mmi" # print("output_file_name: {0}".format(output_file_name)) with open(output_file_name) as fd: output = fd.read() # output = str(output_file.read()) # print("output: {0}".format(output)) except: pass concepts = CorpusLite.load(output.splitlines()) return concepts, error #finally: # if sentences is not None: # os.remove(input_file.name) # else: # input_file.close() # # os.remove(output_file.name) # #os.remove(output_file_name)
apache-2.0
naiquevin/lookupy
lookupy/tests.py
1
14908
""" lookupy.tests ~~~~~~~~~~~~~ This module contains tests for the lookupy module written using nose to be run using:: $ nosetests -v """ import re from nose.tools import assert_list_equal, assert_equal, assert_raises from .lookupy import filter_items, lookup, include_keys, Q, QuerySet, \ Collection, LookupyError from .dunderkey import dunderkey, dunder_partition, dunder_init, dunder_last, \ dunder_get, undunder_keys, dunder_truncate entries_fixtures = [{'request': {'url': 'http://example.com', 'headers': [{'name': 'Connection', 'value': 'Keep-Alive'}]}, 'response': {'status': 404, 'headers': [{'name': 'Date', 'value': 'Thu, 13 Jun 2013 06:43:14 GMT'}, {'name': 'Content-Type', 'value': 'text/html'}]}}, {'request': {'url': 'http://example.org', 'headers': [{'name': 'Connection', 'value': 'Keep-Alive'}]}, 'response': {'status': 200, 'headers': [{'name': 'Date', 'value': 'Thu, 13 Jun 2013 06:43:14 GMT'}, {'name': 'Content-Type', 'value': 'text/html'}]}}, {'request': {'url': 'http://example.com/myphoto.jpg', 'headers': [{'name': 'Connection', 'value': 'Keep-Alive'}]}, 'response': {'status': 200, 'headers': [{'name': 'Date', 'value': 'Thu, 13 Jun 2013 06:43:14 GMT'}, {'name': 'Content-Type', 'value': 'image/jpg'}]}}] def fe(entries, *args, **kwargs): return list(filter_items(entries, *args, **kwargs)) def ik(entries, fields): return list(include_keys(entries, fields)) ## Tests def test_Collection(): c = Collection(entries_fixtures) assert_list_equal(list(c), entries_fixtures) assert_list_equal(list(c), entries_fixtures) def test_Q(): entries = entries_fixtures q1 = Q(response__status__exact=404, request__url__contains='.com') assert q1.evaluate(entries[0]) # test with negation q2 = ~Q(response__status__exact=404) assert q2.evaluate(entries[1]) # test multiple application of negation assert not (~q2).evaluate(entries[1]) q3 = Q(response__status=200) assert not (q1 & q3).evaluate(entries[0]) assert (q1 | q3).evaluate(entries[0]) assert (~(q1 & q3)).evaluate(entries[0]) assert_list_equal(list(((Q(request__url__endswith='.jpg') | Q(response__status=404)).evaluate(e) for e in entries)), [True, False, True]) assert_list_equal(list(((~Q(request__url__endswith='.jpg') | Q(response__status=404)).evaluate(e) for e in entries)), [True, True, False]) def test_lookup(): entry1, entry2, entry3 = entries_fixtures # exact -- works for strings and int assert lookup('request__url__exact', 'http://example.com', entry1) assert not lookup('request_url__exact', 'http://example.org', entry1) assert lookup('response__status__exact', 404, entry1) assert not lookup('response__status__exact', 404, entry2) assert lookup('response_unknown__exact', None, entry1) # neq -- works for strings and ints assert not lookup('request__url__neq', 'http://example.com', entry1) assert lookup('request_url__neq', 'http://example.org', entry1) assert not lookup('response__status__neq', 404, entry1) assert lookup('response__status__neq', 404, entry2) assert not lookup('response_unknown__neq', None, entry1) # contains -- works for strings, else raises error assert lookup('request__url__contains', '.com', entry1) assert not lookup('request__url__contains', 'www', entry1) assert_raises(LookupyError, lookup, 'response__status__contains', 2, entry2) assert_raises(LookupyError, lookup, 'response__unknown__contains', None, entry2) # icontains -- works for strings, else raises error assert lookup('request__url__icontains', 'EXAMPLE', entry1) assert not lookup('request__url__icontains', 'www', entry1) assert_raises(LookupyError, lookup, 'response__status__icontains', 2, entry2) assert_raises(LookupyError, lookup, 'response__unknown__icontains', None, entry2) # in -- works for strings and lists, else raises error assert lookup('request__url__in', ['http://example.com', 'http://blog.example.com'], entry1) assert lookup('response__status__in', [400, 200], entry2) assert not lookup('response__status__in', [], entry2) assert lookup('request__url__in', 'http://example.com/?q=hello', entry1) assert_raises(LookupyError, lookup, 'response__status__in', 404, entry1) # startswith -- works for strings, else raises error assert lookup('request__url__startswith', 'http://', entry1) assert not lookup('request__url__startswith', 'HTTP://', entry1) assert_raises(LookupyError, lookup, 'response__status__startswith', 4, entry1) # istartswith -- works for strings, else raises error assert lookup('request__url__istartswith', 'http://', entry1) assert lookup('request__url__istartswith', 'HTTP://', entry1) assert_raises(LookupyError, lookup, 'response__status__istartswith', 4, entry1) # endswith -- works for strings, else raises error assert lookup('request__url__endswith', '.jpg', entry3) assert not lookup('request__url__endswith', '.JPG', entry3) assert_raises(LookupyError, lookup, 'response__status__endswith', 0, entry3) # iendswith -- works for strings, else raises error assert lookup('request__url__iendswith', '.jpg', entry3) assert lookup('request__url__iendswith', '.JPG', entry3) assert_raises(LookupyError, lookup, 'response__status__iendswith', 0, entry3) # gt -- works for strings and int assert lookup('response__status__gt', 200, entry1) assert not lookup('response__status__gt', 404, entry1) assert lookup('request__url__gt', 'ftp://example.com', entry1) assert not lookup('request__url__gt', 'http://example.com', entry1) # gte -- works for strings and int assert lookup('response__status__gte', 200, entry1) assert lookup('response__status__gte', 404, entry1) assert lookup('request__url__gte', 'ftp://example.com', entry1) assert lookup('request__url__gte', 'http://example.com', entry1) # lt -- works for strings and int assert lookup('response__status__lt', 301, entry2) assert not lookup('response__status__lt', 200, entry2) assert lookup('request__url__lt', 'ws://example.com', entry2) assert not lookup('request__url__lt', 'http://example.org', entry2) # lte -- works for strings and int assert lookup('response__status__lte', 301, entry2) assert lookup('response__status__lte', 200, entry2) assert lookup('request__url__lte', 'ws://example.com', entry2) assert lookup('request__url__lte', 'http://example.org', entry2) # regex -- works for compiled patterns and strings pattern = r'^http:\/\/.+g$' assert lookup('request__url__regex', pattern, entry2) assert lookup('request__url__regex', pattern, entry3) assert not lookup('request__url__regex', pattern, entry1) compiled_pattern = re.compile(pattern) assert lookup('request__url__regex', compiled_pattern, entry2) assert lookup('request__url__regex', compiled_pattern, entry3) assert not lookup('request__url__regex', compiled_pattern, entry1) # filter -- works for Q objects, else raises error assert lookup('response__headers__filter', Q(name__exact='Content-Type', value__exact='image/jpg'), entry3) assert not lookup('response__headers__filter', Q(name__exact='Content-Type', value__exact='text/html'), entry3) assert_raises(LookupyError, lookup, 'response__headers__filter', 0, entry3) assert_raises(LookupyError, lookup, 'response__headers__filter', "hello", entry3) assert_raises(LookupyError, lookup, 'response__headers__filter', None, entry3) assert_raises(LookupyError, lookup, 'response__headers__filter', {'a': 'b'}, entry3) assert_raises(LookupyError, lookup, 'response__status__filter', Q(name__exact='Content-Type', value__exact='image/jpg'), entry3) # nothing -- works for strings and int assert lookup('request__url', 'http://example.com', entry1) assert not lookup('request_url', 'http://example.org', entry1) assert lookup('response__status', 404, entry1) assert not lookup('response__status', 404, entry2) assert lookup('response_unknown', None, entry1) def test_filter_items(): entries = entries_fixtures # when no lookup kwargs passed, all entries are returned assert_list_equal(fe(entries), entries) # simple 1st level lookups assert_list_equal(fe(entries, request__url='http://example.com'), entries[0:1]) assert_list_equal(fe(entries, response__status=200), entries[1:]) assert len(fe(entries, response__status=405)) == 0 # testing compund lookups assert len(fe(entries, Q(request__url__exact='http://example.org'))) == 1 assert len(fe(entries, Q(request__url__exact='http://example.org', response__status=200) | Q(request__url__endswith='.com', response__status=404))) == 2 assert len(fe(entries, ~Q(request__url__exact='http://example.org', response__status__gte=500) | Q(request__url__endswith='.com', response__status=404))) == 3 assert len(fe(entries, ~Q(request__url__exact='http://example.org', response__status__gte=500) | Q(request__url__endswith='.com', response__status=404), response__status__exact=200)) == 2 def test_include_keys(): entries = entries_fixtures assert_list_equal(ik(entries, ['request']), [{'request': {'url': 'http://example.com', 'headers': [{'name': 'Connection', 'value': 'Keep-Alive'}]}}, {'request': {'url': 'http://example.org', 'headers': [{'name': 'Connection', 'value': 'Keep-Alive'}]}}, {'request': {'url': 'http://example.com/myphoto.jpg', 'headers': [{'name': 'Connection', 'value': 'Keep-Alive'}]}}]) assert_list_equal(ik(entries, ['response__status']), [{'response__status': 404}, {'response__status': 200}, {'response__status': 200}]) # when an empty list is passed as fields assert_list_equal(ik(entries, []), [{},{},{}]) # when a non-existent key is passed in fields assert_list_equal(ik(entries, ['response__status', 'cookies']), [{'response__status': 404, 'cookies': None}, {'response__status': 200, 'cookies': None}, {'response__status': 200, 'cookies': None}]) def test_Collection_QuerySet(): data = [{'framework': 'Django', 'language': 'Python', 'type': 'full-stack'}, {'framework': 'Flask', 'language': 'Python', 'type': 'micro'}, {'framework': 'Rails', 'language': 'Ruby', 'type': 'full-stack'}, {'framework': 'Sinatra', 'language': 'Ruby', 'type': 'micro'}, {'framework': 'Zend', 'language': 'PHP', 'type': 'full-stack'}, {'framework': 'Slim', 'language': 'PHP', 'type': 'micro'}] c = Collection(data) r1 = c.filter(framework__startswith='S') assert isinstance(r1, QuerySet) assert len(list(r1)) == 2 r2 = c.filter(Q(language__exact='Python') | Q(language__exact='Ruby')) assert len(list(r2)) == 4 r3 = c.filter(language='PHP') assert_list_equal(list(r3.select('framework', 'type')), [{'framework': 'Zend', 'type': 'full-stack'}, {'framework': 'Slim', 'type': 'micro'}]) r4 = c.filter(Q(language__exact='Python') | Q(language__exact='Ruby')) assert_list_equal(list(r4.select('framework')), [{'framework': 'Django'}, {'framework': 'Flask'}, {'framework': 'Rails'}, {'framework': 'Sinatra'}]) # :todo: test with flatten=True r5 = c.filter(framework__startswith='S').select('framework', 'somekey') assert_list_equal(list(r5), [{'framework': 'Sinatra', 'somekey': None}, {'framework': 'Slim', 'somekey': None}]) ## nesdict tests def test_dunderkey(): assert dunderkey('a', 'b', 'c') == 'a__b__c' assert dunderkey('a') == 'a' assert dunderkey('name', 'school_name') == 'name__school_name' def test_dunder_partition(): assert dunder_partition('a__b') == ('a', 'b') assert dunder_partition('a__b__c') == ('a__b', 'c') assert dunder_partition('a') == ('a', None) def test_dunder_init(): assert dunder_init('a__b') == 'a' assert dunder_init('a__b__c') == 'a__b' assert dunder_init('a') == 'a' def test_dunder_last(): assert dunder_last('a__b') == 'b' assert dunder_last('a__b__c') == 'c' assert dunder_last('a') == None def test_dunder_get(): d = dict([('a', 'A'), ('p', {'q': 'Q'}), ('x', {'y': {'z': 'Z'}})]) assert dunder_get(d, 'a') == 'A' assert dunder_get(d, 'p__q') == 'Q' assert dunder_get(d, 'x__y__z') == 'Z' def test_undunder_keys(): entry = {'request__url': 'http://example.com', 'request__headers': [{'name': 'Connection', 'value': 'Keep-Alive',}], 'response__status': 404, 'response__headers': [{'name': 'Date', 'value': 'Thu, 13 Jun 2013 06:43:14 GMT'}]} assert_equal(undunder_keys(entry), {'request': {'url': 'http://example.com', 'headers': [{'name': 'Connection', 'value': 'Keep-Alive',}]}, 'response': {'status': 404, 'headers': [{'name': 'Date', 'value': 'Thu, 13 Jun 2013 06:43:14 GMT'}]}}) def test_dunder_truncate(): entry = {'request__url': 'http://example.com', 'request__headers': [{'name': 'Connection', 'value': 'Keep-Alive',}], 'response__status': 404, 'response__headers': [{'name': 'Date', 'value': 'Thu, 13 Jun 2013 06:43:14 GMT'}]} assert_equal(dunder_truncate(entry), {'url': 'http://example.com', 'request__headers': [{'name': 'Connection', 'value': 'Keep-Alive',}], 'status': 404, 'response__headers': [{'name': 'Date', 'value': 'Thu, 13 Jun 2013 06:43:14 GMT'}]})
mit
legscoin/legscoin
share/qt/make_spinner.py
4415
1035
#!/usr/bin/env python # W.J. van der Laan, 2011 # Make spinning .mng animation from a .png # Requires imagemagick 6.7+ from __future__ import division from os import path from PIL import Image from subprocess import Popen SRC='img/reload_scaled.png' DST='../../src/qt/res/movies/update_spinner.mng' TMPDIR='/tmp' TMPNAME='tmp-%03i.png' NUMFRAMES=35 FRAMERATE=10.0 CONVERT='convert' CLOCKWISE=True DSIZE=(16,16) im_src = Image.open(SRC) if CLOCKWISE: im_src = im_src.transpose(Image.FLIP_LEFT_RIGHT) def frame_to_filename(frame): return path.join(TMPDIR, TMPNAME % frame) frame_files = [] for frame in xrange(NUMFRAMES): rotation = (frame + 0.5) / NUMFRAMES * 360.0 if CLOCKWISE: rotation = -rotation im_new = im_src.rotate(rotation, Image.BICUBIC) im_new.thumbnail(DSIZE, Image.ANTIALIAS) outfile = frame_to_filename(frame) im_new.save(outfile, 'png') frame_files.append(outfile) p = Popen([CONVERT, "-delay", str(FRAMERATE), "-dispose", "2"] + frame_files + [DST]) p.communicate()
mit
LeartS/odoo
addons/point_of_sale/wizard/pos_box.py
381
2211
from openerp.osv import osv from openerp.tools.translate import _ from openerp.addons.account.wizard.pos_box import CashBox class PosBox(CashBox): _register = False def run(self, cr, uid, ids, context=None): if not context: context = dict() active_model = context.get('active_model', False) or False active_ids = context.get('active_ids', []) or [] if active_model == 'pos.session': records = self.pool[active_model].browse(cr, uid, active_ids, context=context) bank_statements = [record.cash_register_id for record in records if record.cash_register_id] if not bank_statements: raise osv.except_osv(_('Error!'), _("There is no cash register for this PoS Session")) return self._run(cr, uid, ids, bank_statements, context=context) else: return super(PosBox, self).run(cr, uid, ids, context=context) class PosBoxIn(PosBox): _inherit = 'cash.box.in' def _compute_values_for_statement_line(self, cr, uid, box, record, context=None): if context is None: context = {} values = super(PosBoxIn, self)._compute_values_for_statement_line(cr, uid, box, record, context=context) active_model = context.get('active_model', False) or False active_ids = context.get('active_ids', []) or [] if active_model == 'pos.session': session = self.pool[active_model].browse(cr, uid, active_ids, context=context)[0] values['ref'] = session.name return values class PosBoxOut(PosBox): _inherit = 'cash.box.out' def _compute_values_for_statement_line(self, cr, uid, box, record, context=None): values = super(PosBoxOut, self)._compute_values_for_statement_line(cr, uid, box, record, context=context) active_model = context.get('active_model', False) or False active_ids = context.get('active_ids', []) or [] if active_model == 'pos.session': session = self.pool[active_model].browse(cr, uid, active_ids, context=context)[0] values['ref'] = session.name return values
agpl-3.0
basicthinker/Sexain-MemController
gem5-stable/src/mem/slicc/generate/html.py
91
3325
# Copyright (c) 1999-2008 Mark D. Hill and David A. Wood # Copyright (c) 2009 The Hewlett-Packard Development Company # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from m5.util.code_formatter import code_formatter def createSymbol(symbol, title): code = code_formatter() code(''' <HTML><BODY><BIG> $title: ${{formatShorthand(symbol.short)}} - ${{symbol.desc}} </BIG></BODY></HTML> ''') return code def formatShorthand(short): munged_shorthand = "" mode_is_normal = True # -- Walk over the string, processing superscript directives gen = enumerate(short) for i,c in gen: if c == '!': # -- Reached logical end of shorthand name break elif c == '_': munged_shorthand += " " elif c == '^': # -- Process super/subscript formatting mode_is_normal = not mode_is_normal if mode_is_normal: # -- Back to normal mode munged_shorthand += "</SUP>" else: # -- Going to superscript mode munged_shorthand += "<SUP>" elif c == '\\': # -- Process Symbol character set if i + 1 < len(short): # -- Proceed to next char. Yes I know that changing # the loop var is ugly! i,c = gen.next() munged_shorthand += "<B><FONT size=+1>" munged_shorthand += c munged_shorthand += "</FONT></B>" else: # -- FIXME: Add line number info later panic("Encountered a `\\` without anything following it!") else: # -- Pass on un-munged munged_shorthand += c # -- Do any other munging if not mode_is_normal: # -- Back to normal mode munged_shorthand += "</SUP>" return munged_shorthand
apache-2.0
joopert/home-assistant
homeassistant/components/automation/device.py
2
1042
"""Offer device oriented automation.""" import voluptuous as vol from homeassistant.components.device_automation import ( TRIGGER_BASE_SCHEMA, async_get_device_automation_platform, ) from homeassistant.const import CONF_DOMAIN # mypy: allow-untyped-defs, no-check-untyped-defs TRIGGER_SCHEMA = TRIGGER_BASE_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA) async def async_validate_trigger_config(hass, config): """Validate config.""" platform = await async_get_device_automation_platform( hass, config[CONF_DOMAIN], "trigger" ) if hasattr(platform, "async_validate_trigger_config"): return await getattr(platform, "async_validate_trigger_config")(hass, config) return platform.TRIGGER_SCHEMA(config) async def async_attach_trigger(hass, config, action, automation_info): """Listen for trigger.""" platform = await async_get_device_automation_platform( hass, config[CONF_DOMAIN], "trigger" ) return await platform.async_attach_trigger(hass, config, action, automation_info)
apache-2.0
moreati/django
tests/auth_tests/test_management.py
116
27025
from __future__ import unicode_literals import locale import sys from datetime import date from django.apps import apps from django.contrib.auth import management, models from django.contrib.auth.checks import check_user_model from django.contrib.auth.management import create_permissions from django.contrib.auth.management.commands import ( changepassword, createsuperuser, ) from django.contrib.auth.models import Group, User from django.contrib.auth.tests.custom_user import CustomUser from django.contrib.contenttypes.models import ContentType from django.core import checks, exceptions from django.core.management import call_command from django.core.management.base import CommandError from django.test import ( SimpleTestCase, TestCase, override_settings, override_system_checks, ) from django.utils import six from django.utils.encoding import force_str from django.utils.translation import ugettext_lazy as _ from .models import ( CustomUserBadRequiredFields, CustomUserNonListRequiredFields, CustomUserNonUniqueUsername, CustomUserWithFK, Email, ) def mock_inputs(inputs): """ Decorator to temporarily replace input/getpass to allow interactive createsuperuser. """ def inner(test_func): def wrapped(*args): class mock_getpass: @staticmethod def getpass(prompt=b'Password: ', stream=None): if six.PY2: # getpass on Windows only supports prompt as bytestring (#19807) assert isinstance(prompt, six.binary_type) if callable(inputs['password']): return inputs['password']() return inputs['password'] def mock_input(prompt): # prompt should be encoded in Python 2. This line will raise an # Exception if prompt contains unencoded non-ASCII on Python 2. prompt = str(prompt) assert str('__proxy__') not in prompt response = '' for key, val in inputs.items(): if force_str(key) in prompt.lower(): response = val break return response old_getpass = createsuperuser.getpass old_input = createsuperuser.input createsuperuser.getpass = mock_getpass createsuperuser.input = mock_input try: test_func(*args) finally: createsuperuser.getpass = old_getpass createsuperuser.input = old_input return wrapped return inner class MockTTY(object): """ A fake stdin object that pretends to be a TTY to be used in conjunction with mock_inputs. """ def isatty(self): return True class GetDefaultUsernameTestCase(TestCase): def setUp(self): self.old_get_system_username = management.get_system_username def tearDown(self): management.get_system_username = self.old_get_system_username def test_actual_implementation(self): self.assertIsInstance(management.get_system_username(), six.text_type) def test_simple(self): management.get_system_username = lambda: 'joe' self.assertEqual(management.get_default_username(), 'joe') def test_existing(self): models.User.objects.create(username='joe') management.get_system_username = lambda: 'joe' self.assertEqual(management.get_default_username(), '') self.assertEqual( management.get_default_username(check_db=False), 'joe') def test_i18n(self): # 'Julia' with accented 'u': management.get_system_username = lambda: 'J\xfalia' self.assertEqual(management.get_default_username(), 'julia') @override_settings(AUTH_PASSWORD_VALIDATORS=[ {'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'}, ]) class ChangepasswordManagementCommandTestCase(TestCase): def setUp(self): self.user = models.User.objects.create_user(username='joe', password='qwerty') self.stdout = six.StringIO() self.stderr = six.StringIO() def tearDown(self): self.stdout.close() self.stderr.close() def test_that_changepassword_command_changes_joes_password(self): "Executing the changepassword management command should change joe's password" self.assertTrue(self.user.check_password('qwerty')) command = changepassword.Command() command._get_pass = lambda *args: 'not qwerty' command.execute(username="joe", stdout=self.stdout) command_output = self.stdout.getvalue().strip() self.assertEqual( command_output, "Changing password for user 'joe'\nPassword changed successfully for user 'joe'" ) self.assertTrue(models.User.objects.get(username="joe").check_password("not qwerty")) def test_that_max_tries_exits_1(self): """ A CommandError should be thrown by handle() if the user enters in mismatched passwords three times. """ command = changepassword.Command() command._get_pass = lambda *args: str(args) or 'foo' with self.assertRaises(CommandError): command.execute(username="joe", stdout=self.stdout, stderr=self.stderr) def test_password_validation(self): """ A CommandError should be raised if the user enters in passwords which fail validation three times. """ command = changepassword.Command() command._get_pass = lambda *args: '1234567890' abort_msg = "Aborting password change for user 'joe' after 3 attempts" with self.assertRaisesMessage(CommandError, abort_msg): command.execute(username="joe", stdout=self.stdout, stderr=self.stderr) self.assertIn('This password is entirely numeric.', self.stdout.getvalue()) def test_that_changepassword_command_works_with_nonascii_output(self): """ #21627 -- Executing the changepassword management command should allow non-ASCII characters from the User object representation. """ # 'Julia' with accented 'u': models.User.objects.create_user(username='J\xfalia', password='qwerty') command = changepassword.Command() command._get_pass = lambda *args: 'not qwerty' command.execute(username="J\xfalia", stdout=self.stdout) @override_settings( SILENCED_SYSTEM_CHECKS=['fields.W342'], # ForeignKey(unique=True) AUTH_PASSWORD_VALIDATORS=[{'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator'}], ) class CreatesuperuserManagementCommandTestCase(TestCase): def test_basic_usage(self): "Check the operation of the createsuperuser management command" # We can use the management command to create a superuser new_io = six.StringIO() call_command( "createsuperuser", interactive=False, username="joe", email="joe@somewhere.org", stdout=new_io ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') u = User.objects.get(username="joe") self.assertEqual(u.email, 'joe@somewhere.org') # created password should be unusable self.assertFalse(u.has_usable_password()) @mock_inputs({'password': "nopasswd"}) def test_nolocale(self): """ Check that createsuperuser does not break when no locale is set. See ticket #16017. """ old_getdefaultlocale = locale.getdefaultlocale try: # Temporarily remove locale information locale.getdefaultlocale = lambda: (None, None) # Call the command in this new environment call_command( "createsuperuser", interactive=True, username="nolocale@somewhere.org", email="nolocale@somewhere.org", verbosity=0, stdin=MockTTY(), ) except TypeError: self.fail("createsuperuser fails if the OS provides no information about the current locale") finally: # Re-apply locale information locale.getdefaultlocale = old_getdefaultlocale # If we were successful, a user should have been created u = User.objects.get(username="nolocale@somewhere.org") self.assertEqual(u.email, 'nolocale@somewhere.org') @mock_inputs({ 'password': "nopasswd", 'u\u017eivatel': 'foo', # username (cz) 'email': 'nolocale@somewhere.org'}) def test_non_ascii_verbose_name(self): username_field = User._meta.get_field('username') old_verbose_name = username_field.verbose_name username_field.verbose_name = _('u\u017eivatel') new_io = six.StringIO() try: call_command( "createsuperuser", interactive=True, stdout=new_io, stdin=MockTTY(), ) finally: username_field.verbose_name = old_verbose_name command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') def test_verbosity_zero(self): # We can suppress output on the management command new_io = six.StringIO() call_command( "createsuperuser", interactive=False, username="joe2", email="joe2@somewhere.org", verbosity=0, stdout=new_io ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, '') u = User.objects.get(username="joe2") self.assertEqual(u.email, 'joe2@somewhere.org') self.assertFalse(u.has_usable_password()) def test_email_in_username(self): new_io = six.StringIO() call_command( "createsuperuser", interactive=False, username="joe+admin@somewhere.org", email="joe@somewhere.org", stdout=new_io ) u = User._default_manager.get(username="joe+admin@somewhere.org") self.assertEqual(u.email, 'joe@somewhere.org') self.assertFalse(u.has_usable_password()) @override_settings(AUTH_USER_MODEL='auth.CustomUser') def test_swappable_user(self): "A superuser can be created when a custom User model is in use" # We can use the management command to create a superuser # We skip validation because the temporary substitution of the # swappable User model messes with validation. new_io = six.StringIO() call_command( "createsuperuser", interactive=False, email="joe@somewhere.org", date_of_birth="1976-04-01", stdout=new_io, ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') u = CustomUser._default_manager.get(email="joe@somewhere.org") self.assertEqual(u.date_of_birth, date(1976, 4, 1)) # created password should be unusable self.assertFalse(u.has_usable_password()) @override_settings(AUTH_USER_MODEL='auth.CustomUser') def test_swappable_user_missing_required_field(self): "A Custom superuser won't be created when a required field isn't provided" # We can use the management command to create a superuser # We skip validation because the temporary substitution of the # swappable User model messes with validation. new_io = six.StringIO() with self.assertRaises(CommandError): call_command( "createsuperuser", interactive=False, username="joe@somewhere.org", stdout=new_io, stderr=new_io, ) self.assertEqual(CustomUser._default_manager.count(), 0) @override_settings( AUTH_USER_MODEL='auth.CustomUserNonUniqueUsername', AUTHENTICATION_BACKENDS=['my.custom.backend'], ) def test_swappable_user_username_non_unique(self): @mock_inputs({ 'username': 'joe', 'password': 'nopasswd', }) def createsuperuser(): new_io = six.StringIO() call_command( "createsuperuser", interactive=True, email="joe@somewhere.org", stdout=new_io, stdin=MockTTY(), ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') for i in range(2): createsuperuser() users = CustomUserNonUniqueUsername.objects.filter(username="joe") self.assertEqual(users.count(), 2) def test_skip_if_not_in_TTY(self): """ If the command is not called from a TTY, it should be skipped and a message should be displayed (#7423). """ class FakeStdin(object): """A fake stdin object that has isatty() return False.""" def isatty(self): return False out = six.StringIO() call_command( "createsuperuser", stdin=FakeStdin(), stdout=out, interactive=True, ) self.assertEqual(User._default_manager.count(), 0) self.assertIn("Superuser creation skipped", out.getvalue()) def test_passing_stdin(self): """ You can pass a stdin object as an option and it should be available on self.stdin. If no such option is passed, it defaults to sys.stdin. """ sentinel = object() command = createsuperuser.Command() command.check = lambda: [] command.execute( stdin=sentinel, stdout=six.StringIO(), stderr=six.StringIO(), interactive=False, verbosity=0, username='janet', email='janet@example.com', ) self.assertIs(command.stdin, sentinel) command = createsuperuser.Command() command.check = lambda: [] command.execute( stdout=six.StringIO(), stderr=six.StringIO(), interactive=False, verbosity=0, username='joe', email='joe@example.com', ) self.assertIs(command.stdin, sys.stdin) @override_settings(AUTH_USER_MODEL='auth.CustomUserWithFK') def test_fields_with_fk(self): new_io = six.StringIO() group = Group.objects.create(name='mygroup') email = Email.objects.create(email='mymail@gmail.com') call_command( 'createsuperuser', interactive=False, username=email.pk, email=email.email, group=group.pk, stdout=new_io, ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') u = CustomUserWithFK._default_manager.get(email=email) self.assertEqual(u.username, email) self.assertEqual(u.group, group) non_existent_email = 'mymail2@gmail.com' with self.assertRaisesMessage(CommandError, 'email instance with email %r does not exist.' % non_existent_email): call_command( 'createsuperuser', interactive=False, username=email.pk, email=non_existent_email, stdout=new_io, ) @override_settings(AUTH_USER_MODEL='auth.CustomUserWithFK') def test_fields_with_fk_interactive(self): new_io = six.StringIO() group = Group.objects.create(name='mygroup') email = Email.objects.create(email='mymail@gmail.com') @mock_inputs({ 'password': 'nopasswd', 'username (email.id)': email.pk, 'email (email.email)': email.email, 'group (group.id)': group.pk, }) def test(self): call_command( 'createsuperuser', interactive=True, stdout=new_io, stdin=MockTTY(), ) command_output = new_io.getvalue().strip() self.assertEqual(command_output, 'Superuser created successfully.') u = CustomUserWithFK._default_manager.get(email=email) self.assertEqual(u.username, email) self.assertEqual(u.group, group) test(self) def test_password_validation(self): """ Creation should fail if the password fails validation. """ new_io = six.StringIO() # Returns '1234567890' the first two times it is called, then # 'password' subsequently. def bad_then_good_password(index=[0]): index[0] += 1 if index[0] <= 2: return '1234567890' return 'password' @mock_inputs({ 'password': bad_then_good_password, 'username': 'joe1234567890', }) def test(self): call_command( "createsuperuser", interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), "This password is entirely numeric.\n" "Superuser created successfully." ) test(self) def test_validation_mismatched_passwords(self): """ Creation should fail if the user enters mismatched passwords. """ new_io = six.StringIO() # The first two passwords do not match, but the second two do match and # are valid. entered_passwords = ["password", "not password", "password2", "password2"] def mismatched_passwords_then_matched(): return entered_passwords.pop(0) @mock_inputs({ 'password': mismatched_passwords_then_matched, 'username': 'joe1234567890', }) def test(self): call_command( "createsuperuser", interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), "Error: Your passwords didn't match.\n" "Superuser created successfully." ) test(self) def test_validation_blank_password_entered(self): """ Creation should fail if the user enters blank passwords. """ new_io = six.StringIO() # The first two passwords are empty strings, but the second two are # valid. entered_passwords = ["", "", "password2", "password2"] def blank_passwords_then_valid(): return entered_passwords.pop(0) @mock_inputs({ 'password': blank_passwords_then_valid, 'username': 'joe1234567890', }) def test(self): call_command( "createsuperuser", interactive=True, stdin=MockTTY(), stdout=new_io, stderr=new_io, ) self.assertEqual( new_io.getvalue().strip(), "Error: Blank passwords aren't allowed.\n" "Superuser created successfully." ) test(self) class CustomUserModelValidationTestCase(SimpleTestCase): @override_settings(AUTH_USER_MODEL='auth.CustomUserNonListRequiredFields') @override_system_checks([check_user_model]) def test_required_fields_is_list(self): "REQUIRED_FIELDS should be a list." errors = checks.run_checks() expected = [ checks.Error( "'REQUIRED_FIELDS' must be a list or tuple.", hint=None, obj=CustomUserNonListRequiredFields, id='auth.E001', ), ] self.assertEqual(errors, expected) @override_settings(AUTH_USER_MODEL='auth.CustomUserBadRequiredFields') @override_system_checks([check_user_model]) def test_username_not_in_required_fields(self): "USERNAME_FIELD should not appear in REQUIRED_FIELDS." errors = checks.run_checks() expected = [ checks.Error( ("The field named as the 'USERNAME_FIELD' for a custom user model " "must not be included in 'REQUIRED_FIELDS'."), hint=None, obj=CustomUserBadRequiredFields, id='auth.E002', ), ] self.assertEqual(errors, expected) @override_settings(AUTH_USER_MODEL='auth.CustomUserNonUniqueUsername') @override_system_checks([check_user_model]) def test_username_non_unique(self): "A non-unique USERNAME_FIELD should raise a model validation error." errors = checks.run_checks() expected = [ checks.Error( ("'CustomUserNonUniqueUsername.username' must be " "unique because it is named as the 'USERNAME_FIELD'."), hint=None, obj=CustomUserNonUniqueUsername, id='auth.E003', ), ] self.assertEqual(errors, expected) @override_settings(AUTH_USER_MODEL='auth.CustomUserNonUniqueUsername', AUTHENTICATION_BACKENDS=[ 'my.custom.backend', ]) @override_system_checks([check_user_model]) def test_username_non_unique_with_custom_backend(self): """ A non-unique USERNAME_FIELD should raise an error only if we use the default authentication backend. Otherwise, an warning should be raised. """ errors = checks.run_checks() expected = [ checks.Warning( ("'CustomUserNonUniqueUsername.username' is named as " "the 'USERNAME_FIELD', but it is not unique."), hint=('Ensure that your authentication backend(s) can handle ' 'non-unique usernames.'), obj=CustomUserNonUniqueUsername, id='auth.W004', ) ] self.assertEqual(errors, expected) class PermissionTestCase(TestCase): def setUp(self): self._original_permissions = models.Permission._meta.permissions[:] self._original_default_permissions = models.Permission._meta.default_permissions self._original_verbose_name = models.Permission._meta.verbose_name def tearDown(self): models.Permission._meta.permissions = self._original_permissions models.Permission._meta.default_permissions = self._original_default_permissions models.Permission._meta.verbose_name = self._original_verbose_name ContentType.objects.clear_cache() def test_duplicated_permissions(self): """ Test that we show proper error message if we are trying to create duplicate permissions. """ auth_app_config = apps.get_app_config('auth') # check duplicated default permission models.Permission._meta.permissions = [ ('change_permission', 'Can edit permission (duplicate)')] six.assertRaisesRegex(self, CommandError, "The permission codename 'change_permission' clashes with a " "builtin permission for model 'auth.Permission'.", create_permissions, auth_app_config, verbosity=0) # check duplicated custom permissions models.Permission._meta.permissions = [ ('my_custom_permission', 'Some permission'), ('other_one', 'Some other permission'), ('my_custom_permission', 'Some permission with duplicate permission code'), ] six.assertRaisesRegex(self, CommandError, "The permission codename 'my_custom_permission' is duplicated for model " "'auth.Permission'.", create_permissions, auth_app_config, verbosity=0) # should not raise anything models.Permission._meta.permissions = [ ('my_custom_permission', 'Some permission'), ('other_one', 'Some other permission'), ] create_permissions(auth_app_config, verbosity=0) def test_default_permissions(self): auth_app_config = apps.get_app_config('auth') permission_content_type = ContentType.objects.get_by_natural_key('auth', 'permission') models.Permission._meta.permissions = [ ('my_custom_permission', 'Some permission'), ] create_permissions(auth_app_config, verbosity=0) # add/change/delete permission by default + custom permission self.assertEqual(models.Permission.objects.filter( content_type=permission_content_type, ).count(), 4) models.Permission.objects.filter(content_type=permission_content_type).delete() models.Permission._meta.default_permissions = [] create_permissions(auth_app_config, verbosity=0) # custom permission only since default permissions is empty self.assertEqual(models.Permission.objects.filter( content_type=permission_content_type, ).count(), 1) def test_verbose_name_length(self): auth_app_config = apps.get_app_config('auth') permission_content_type = ContentType.objects.get_by_natural_key('auth', 'permission') models.Permission.objects.filter(content_type=permission_content_type).delete() models.Permission._meta.verbose_name = "some ridiculously long verbose name that is out of control" * 5 six.assertRaisesRegex(self, exceptions.ValidationError, "The verbose_name of auth.permission is longer than 244 characters", create_permissions, auth_app_config, verbosity=0) def test_custom_permission_name_length(self): auth_app_config = apps.get_app_config('auth') ContentType.objects.get_by_natural_key('auth', 'permission') custom_perm_name = 'a' * 256 models.Permission._meta.permissions = [ ('my_custom_permission', custom_perm_name), ] try: msg = ( "The permission name %s of auth.permission is longer than " "255 characters" % custom_perm_name ) with self.assertRaisesMessage(exceptions.ValidationError, msg): create_permissions(auth_app_config, verbosity=0) finally: models.Permission._meta.permissions = []
bsd-3-clause
brentdax/swift
utils/cmpcodesize/tests/test_list_function_sizes.py
60
1154
# test_list_function_sizes.py - list_function_sizes unit tests -*- python -*- # # This source file is part of the Swift.org open source project # # Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors # Licensed under Apache License v2.0 with Runtime Library Exception # # See https://swift.org/LICENSE.txt for license information # See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors import unittest from cmpcodesize.compare import list_function_sizes class ListFunctionSizesTestCase(unittest.TestCase): def test_when_size_array_is_none_raises(self): with self.assertRaises(TypeError): list(list_function_sizes(None)) def test_when_size_array_is_empty_returns_none(self): self.assertEqual(list(list_function_sizes([])), []) def test_lists_each_entry(self): sizes = { 'foo': 1, 'bar': 10, 'baz': 100, } self.assertEqual(list(list_function_sizes(sizes.items())), [ ' 1 foo', ' 10 bar', ' 100 baz', ]) if __name__ == '__main__': unittest.main()
apache-2.0
thenewguy/wagtail
wagtail/wagtailforms/models.py
6
6555
import json import re from six import text_type from unidecode import unidecode from django.db import models from django.shortcuts import render from django.utils.translation import ugettext_lazy as _ from django.utils.text import slugify from django.utils.encoding import python_2_unicode_compatible from wagtail.wagtailcore.models import Page, Orderable, UserPagePermissionsProxy, get_page_types from wagtail.wagtailadmin.edit_handlers import FieldPanel from wagtail.wagtailadmin import tasks from .forms import FormBuilder FORM_FIELD_CHOICES = ( ('singleline', _('Single line text')), ('multiline', _('Multi-line text')), ('email', _('Email')), ('number', _('Number')), ('url', _('URL')), ('checkbox', _('Checkbox')), ('checkboxes', _('Checkboxes')), ('dropdown', _('Drop down')), ('radio', _('Radio buttons')), ('date', _('Date')), ('datetime', _('Date/time')), ) HTML_EXTENSION_RE = re.compile(r"(.*)\.html") @python_2_unicode_compatible class FormSubmission(models.Model): """Data for a Form submission.""" form_data = models.TextField() page = models.ForeignKey(Page) submit_time = models.DateTimeField(auto_now_add=True) def get_data(self): return json.loads(self.form_data) def __str__(self): return self.form_data class AbstractFormField(Orderable): """Database Fields required for building a Django Form field.""" label = models.CharField( max_length=255, help_text=_('The label of the form field') ) field_type = models.CharField(max_length=16, choices=FORM_FIELD_CHOICES) required = models.BooleanField(default=True) choices = models.CharField( max_length=512, blank=True, help_text=_('Comma seperated list of choices. Only applicable in checkboxes, radio and dropdown.') ) default_value = models.CharField( max_length=255, blank=True, help_text=_('Default value. Comma seperated values supported for checkboxes.') ) help_text = models.CharField(max_length=255, blank=True) @property def clean_name(self): # unidecode will return an ascii string while slugify wants a # unicode string on the other hand, slugify returns a safe-string # which will be converted to a normal str return str(slugify(text_type(unidecode(self.label)))) panels = [ FieldPanel('label'), FieldPanel('help_text'), FieldPanel('required'), FieldPanel('field_type', classname="formbuilder-type"), FieldPanel('choices', classname="formbuilder-choices"), FieldPanel('default_value', classname="formbuilder-default"), ] class Meta: abstract = True ordering = ['sort_order'] _FORM_CONTENT_TYPES = None def get_form_types(): global _FORM_CONTENT_TYPES if _FORM_CONTENT_TYPES is None: _FORM_CONTENT_TYPES = [ ct for ct in get_page_types() if issubclass(ct.model_class(), AbstractForm) ] return _FORM_CONTENT_TYPES def get_forms_for_user(user): """Return a queryset of form pages that this user is allowed to access the submissions for""" editable_pages = UserPagePermissionsProxy(user).editable_pages() return editable_pages.filter(content_type__in=get_form_types()) class AbstractForm(Page): """A Form Page. Pages implementing a form should inhert from it""" form_builder = FormBuilder is_abstract = True # Don't display me in "Add" def __init__(self, *args, **kwargs): super(AbstractForm, self).__init__(*args, **kwargs) if not hasattr(self, 'landing_page_template'): template_wo_ext = re.match(HTML_EXTENSION_RE, self.template).group(1) self.landing_page_template = template_wo_ext + '_landing.html' class Meta: abstract = True def get_form_parameters(self): return {} def process_form_submission(self, form): # remove csrf_token from form.data form_data = dict( i for i in form.data.items() if i[0] != 'csrfmiddlewaretoken' ) FormSubmission.objects.create( form_data=json.dumps(form_data), page=self, ) def serve(self, request): fb = self.form_builder(self.form_fields.all()) form_class = fb.get_form_class() form_params = self.get_form_parameters() if request.method == 'POST': form = form_class(request.POST, **form_params) if form.is_valid(): self.process_form_submission(form) # If we have a form_processing_backend call its process method if hasattr(self, 'form_processing_backend'): form_processor = self.form_processing_backend() form_processor.process(self, form) # render the landing_page # TODO: It is much better to redirect to it return render(request, self.landing_page_template, { 'self': self, }) else: form = form_class(**form_params) return render(request, self.template, { 'self': self, 'form': form, }) preview_modes = [ ('form', 'Form'), ('landing', 'Landing page'), ] def serve_preview(self, request, mode): if mode == 'landing': return render(request, self.landing_page_template, { 'self': self, }) else: return super(AbstractForm, self).serve_preview(request, mode) class AbstractEmailForm(AbstractForm): """A Form Page that sends email. Pages implementing a form to be send to an email should inherit from it""" is_abstract = True # Don't display me in "Add" to_address = models.CharField(max_length=255, blank=True, help_text=_("Optional - form submissions will be emailed to this address")) from_address = models.CharField(max_length=255, blank=True) subject = models.CharField(max_length=255, blank=True) def process_form_submission(self, form): super(AbstractEmailForm, self).process_form_submission(form) if self.to_address: content = '\n'.join([x[1].label + ': ' + form.data.get(x[0]) for x in form.fields.items()]) tasks.send_email_task.delay(self.subject, content, [self.to_address], self.from_address,) class Meta: abstract = True
bsd-3-clause
GoogleCloudPlatform/deploymentmanager-autogen
javatests/com/google/cloud/deploymentmanager/autogen/testdata/singlevm/full_features2/golden/common/common.py
7
7308
# Copyright 2015 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Generic simple functions used for python based templage generation.""" import re import sys import traceback import default import yaml RFC1035_RE = re.compile(r'^[a-z][-a-z0-9]{1,61}[a-z0-9]{1}$') class Error(Exception): """Common exception wrapper for template exceptions.""" pass def AddDiskResourcesIfNeeded(context): """Checks context if disk resources need to be added.""" if default.DISK_RESOURCES in context.properties: return context.properties[default.DISK_RESOURCES] else: return [] def AutoName(base, resource, *args): """Helper method to generate names automatically based on default.""" auto_name = '%s-%s' % (base, '-'.join(list(args) + [default.AKA[resource]])) if not RFC1035_RE.match(auto_name): raise Error('"%s" name for type %s does not match RFC1035 regex (%s)' % (auto_name, resource, RFC1035_RE.pattern)) return auto_name def AutoRef(base, resource, *args): """Helper method that builds a reference for an auto-named resource.""" return Ref(AutoName(base, resource, *args)) def OrderedItems(dict_obj): """Convenient method to yield sorted iteritems of a dictionary.""" keys = dict_obj.keys() keys.sort() for k in keys: yield (k, dict_obj[k]) def ShortenZoneName(zone): """Given a string that looks like a zone name, creates a shorter version.""" geo, coord, number, letter = re.findall(r'(\w+)-(\w+)(\d)-(\w)', zone)[0] geo = geo.lower() if len(geo) == 2 else default.LOC[geo.lower()] coord = default.LOC[coord.lower()] number = str(number) letter = letter.lower() return geo + '-' + coord + number + letter def ZoneToRegion(zone): """Derives the region from a zone name.""" parts = zone.split('-') if len(parts) != 3: raise Error('Cannot derive region from zone "%s"' % zone) return '-'.join(parts[:2]) def FormatException(message): """Adds more information to the exception.""" message = ('Exception Type: %s\n' 'Details: %s\n' 'Message: %s\n') % (sys.exc_type, traceback.format_exc(), message) return message def Ref(name): return '$(ref.%s.selfLink)' % name def RefGroup(name): return '$(ref.%s.instanceGroup)' % name def GlobalComputeLink(project, collection, value): if IsComputeLink(value): return value return ''.join([default.COMPUTE_URL_BASE, 'projects/', project, '/global/', collection, '/', value]) def LocalComputeLink(project, zone, key, value): if IsComputeLink(value): return value return ''.join([default.COMPUTE_URL_BASE, 'projects/', project, '/zones/', zone, '/', key, '/', value]) def MakeLocalComputeLink(context, key): return LocalComputeLink(context.env['project'], context.properties.get('zone', None), key + 's', context.properties[key]) def MakeNetworkComputeLink(context, value): return GlobalComputeLink(context.env['project'], 'networks', value) def MakeSubnetworkComputeLink(context, value): region = ZoneToRegion(context.properties.get('zone', None)) if IsComputeLink(value): return value return ''.join([ default.COMPUTE_URL_BASE, 'projects/', context.env['project'], '/regions/', region, '/subnetworks/', value ]) def MakeAcceleratorTypeLink(context, accelerator_type): project = context.env['project'] zone = context.properties.get('zone', None) return ''.join([default.COMPUTE_URL_BASE, 'projects/', project, '/zones/', zone, '/acceleratorTypes/', accelerator_type]) def MakeFQHN(context, name): return '%s.c.%s.internal' % (name, context.env['project']) # TODO(victorg): Consider moving this method to a different file def MakeC2DImageLink(name, dev_mode=False): if IsGlobalProjectShortcut(name) or name.startswith('http'): return name else: if dev_mode: return 'global/images/%s' % name else: return GlobalComputeLink(default.C2D_IMAGES, 'images', name) def IsGlobalProjectShortcut(name): return name.startswith('projects/') or name.startswith('global/') def IsComputeLink(name): return (name.startswith(default.COMPUTE_URL_BASE) or name.startswith(default.REFERENCE_PREFIX)) def GetNamesAndTypes(resources_dict): return [(d['name'], d['type']) for d in resources_dict] def SummarizeResources(res_dict): """Summarizes the name of resources per resource type.""" result = {} for res in res_dict: result.setdefault(res['type'], []).append(res['name']) return result def ListPropertyValuesOfType(res_dict, prop, res_type): """Lists all the values for a property of a certain type.""" return [r['properties'][prop] for r in res_dict if r['type'] == res_type] def MakeResource(resource_list, output_list=None): """Wrapper for a DM template basic spec.""" content = {'resources': resource_list} if output_list: content['outputs'] = output_list return yaml.dump(content) def TakeZoneOut(properties): """Given a properties dictionary, removes the zone specific information.""" def _CleanZoneUrl(value): value = value.split('/')[-1] if IsComputeLink(value) else value return value for name in default.VM_ZONE_PROPERTIES: if name in properties: properties[name] = _CleanZoneUrl(properties[name]) if default.ZONE in properties: properties.pop(default.ZONE) if default.BOOTDISK in properties: properties[default.BOOTDISK] = _CleanZoneUrl(properties[default.BOOTDISK]) if default.DISKS in properties: for disk in properties[default.DISKS]: # Don't touch references to other disks if default.DISK_SOURCE in disk: continue if default.INITIALIZEP in disk: disk_init = disk[default.INITIALIZEP] if default.DISKTYPE in disk_init: disk_init[default.DISKTYPE] = _CleanZoneUrl(disk_init[default.DISKTYPE]) def GenerateEmbeddableYaml(yaml_string): # Because YAML is a space delimited format, we need to be careful about # embedding one YAML document in another. This function takes in a string in # YAML format and produces an equivalent YAML representation that can be # inserted into arbitrary points of another YAML document. It does so by # printing the YAML string in a single line format. Consistent ordering of # the string is also guaranteed by using yaml.dump. yaml_object = yaml.load(yaml_string) dumped_yaml = yaml.dump(yaml_object, default_flow_style=True) return dumped_yaml def FormatErrorsDec(func): """Decorator to format exceptions if they get raised.""" def FormatErrorsWrap(context): try: return func(context) except Exception as e: raise Error(FormatException(e.message)) return FormatErrorsWrap
apache-2.0
tempbottle/dlr
Src/Hosts/Silverlight/Tests/tests/manual/test_s_fractulator/verification.py
2
1111
from SL_util import * import time sl = SilverlightDLRTest(2) def verify_1st(): sl.verify_exact('frac([123,1]) + frac([45,65])', HtmlPage.Document.formattedExpression.innerHTML) sl.verify_exact('None', HtmlPage.Document.result.innerHTML) sl.verify_exact('frac([123,1]) + frac([45,65])', HtmlPage.Document.evalExpression.innerHTML) sl.verify_exact('None', HtmlPage.Document.evalException.innerHTML) sl.log_response() def verify_2nd(): sl.verify_exact('123 ÷ 456 ÷ 0', HtmlPage.Document.formattedExpression.innerHTML) sl.verify_exact('n/a', HtmlPage.Document.result.innerHTML) sl.verify_exact('frac([123,1]) / frac([456,1]) / frac([0,1])', HtmlPage.Document.evalExpression.innerHTML) sl.verify_exact('Attempted to divide by zero.', HtmlPage.Document.evalException.innerHTML) sl.log_response() sl.log_scenario('Try a valid expression') #wait for signal while not get_signal() == '1st Done': time.sleep(1) verify_1st() sl.log_scenario('Try devided by zero') #wait for signal while not get_signal() == '2nd Done': time.sleep(1) verify_2nd() sl.log_done()
apache-2.0
tkelman/utf8rewind
tools/converter/blns.py
1
8064
import datetime import os.path import re import sys import libs.header def codepointToHexadecimalWideUtf16(codepoint, wroteHex = False): result = '' if codepoint <= 0x7F: conversion = { 0x00: "\\0", 0x07: "\\a", 0x08: "\\b", 0x09: "\\t", 0x0A: "\\n", 0x0B: "\\v", 0x0C: "\\f", 0x0D: "\\r", # must be escaped 0x22: "\\\"", 0x5C: "\\\\", } if codepoint in conversion: result += conversion[codepoint] return result, False elif codepoint < 0x20: result += '\\x' + format(codepoint, 'X') return result, True else: isHex = (codepoint >= 0x41 and codepoint <= 0x46) or (codepoint >= 0x61 and codepoint <= 0x76) or (codepoint >= 0x30 and codepoint <= 0x39) if wroteHex and isHex: result += "\" L\"" result += "%c" % codepoint return result, False elif codepoint <= 0xFFFF: result += '\\x' + format(codepoint, 'X') return result, True elif codepoint <= 0x10FFFF: decoded = codepoint - 0x10000 surrogate_high = 0xD800 + (decoded >> 10) surrogate_low = 0xDC00 + (decoded & 0x03FF) result += '\\x' + format(surrogate_high, '4X') result += '\\x' + format(surrogate_low, '4X') return result, True else: result += '\\xFFFD' return result, True def codepointToHexadecimalWideUtf32(codepoint, wroteHex = False): result = '' if codepoint <= 0x7F: conversion = { 0x00: "\\0", 0x07: "\\a", 0x08: "\\b", 0x09: "\\t", 0x0A: "\\n", 0x0B: "\\v", 0x0C: "\\f", 0x0D: "\\r", # must be escaped 0x22: "\\\"", 0x5C: "\\\\", } if codepoint in conversion: result += conversion[codepoint] return result, False elif codepoint < 0x20: result += '\\x' + format(codepoint, 'X') return result, True else: isHex = (codepoint >= 0x41 and codepoint <= 0x46) or (codepoint >= 0x61 and codepoint <= 0x76) or (codepoint >= 0x30 and codepoint <= 0x39) if wroteHex and isHex: result += "\" L\"" result += "%c" % codepoint return result, False elif codepoint <= 0x10FFFF: result += '\\x' + format(codepoint, 'X') return result, True else: result += '\\xFFFD' return result, True class Test: def __init__(self, line, bytes, offset): self.line_utf16 = line.encode('utf-16le') self.line_utf32 = line.encode('utf-32le') self.bytes = bytes self.offset = offset self.converted_utf16 = '' self.converted_utf32 = '' def Convert(self): hex_utf16 = False self.converted_utf16 = '' hex_utf32 = False self.converted_utf32 = '' for c in (self.line_utf16[pos:pos + 2] for pos in range(0, len(self.line_utf16), 2)): codepoint = (c[1] << 8) | c[0] result_utf16, hex_utf16 = codepointToHexadecimalWideUtf16(codepoint, hex_utf16) self.converted_utf16 += result_utf16 for c in (self.line_utf32[pos:pos + 4] for pos in range(0, len(self.line_utf32), 4)): codepoint = (c[3] << 8) | (c[2] << 16) | (c[1] << 8) | c[0] result_utf32, hex_utf32 = codepointToHexadecimalWideUtf32(codepoint, hex_utf32) self.converted_utf32 += result_utf32 return self.converted_utf16 == self.converted_utf32 def Render(self, header, type='utf16'): header.writeLine('EXPECT_STREQ(L"' + self.__dict__['converted_' + type] + '", helpers::wide(ReadSection(' + str(self.offset) + ', ' + str(len(self.bytes)) + ')).c_str());') class Section: def __init__(self, name): self.name = re.sub('[^A-Za-z0-9_]', '', name.title()) self.tests = [] self.differs = False def Process(self): self.differs = False for t in self.tests: if not t.Convert(): self.differs = True def Render(self, header): print('Writing tests for "' + self.name + '"...') if self.differs: print('\tUTF-16 differs from UTF-32, writing both.') header.newLine() header.newLine() header.writeLine('TEST_F(NaughtyStrings, ' + self.name + ')') header.writeLine('{') if self.differs: header.writeLine('#if UTF8_WCHAR_UTF16') header.indent() for t in self.tests: t.Render(header, 'utf16') header.outdent() header.writeLine('#elif UTF8_WCHAR_UTF32') header.indent() for t in self.tests: t.Render(header, 'utf32') header.outdent() header.writeLine('#endif') else: header.indent() for t in self.tests: t.Render(header) header.outdent() header.write("}") class Processor: def __init__(self): self.current = None self.sections = [] self.state_map = { 'section': self.ProcessSection, 'comments': self.ProcessComments, 'test': self.ProcessTest, 'exit': self.ProcessExit } self.state = 'section' def Parse(self, filepath): print('Parsing "' + os.path.realpath(filepath) + '"...') with open(filepath, 'rb') as f: self.state = 'section' bytes_read = bytearray() offset = 0 offset_start = offset while True: current = f.read(1) offset += 1 if not current: break if current == b'\n': line = str(bytes_read, encoding='utf-8') self.state = self.state_map[self.state](line, bytes_read, offset_start) bytes_read = bytearray() offset_start = offset else: bytes_read.append(ord(current)) print('Processing sections...') for s in self.sections: s.Process() def Render(self, filepath): print('Rendering tests to "' + os.path.realpath(filepath) + '"...') command_line = sys.argv[0] arguments = sys.argv[1:] for a in arguments: command_line += ' ' + a header = libs.header.Header(filepath) header.generatedNotice() header.newLine() header.writeLine('#include "tests-base.hpp"') header.newLine() header.writeLine('#include "../helpers/helpers-strings.hpp"') header.newLine() header.writeLine('#define NAUGHTY_STRINGS_LENGTH 10370') header.newLine() header.writeLine('class NaughtyStrings') header.writeLine(' : public ::testing::Test') header.writeLine('{') header.newLine() header.writeLine('protected:') header.newLine() header.writeLine(' void SetUp()') header.writeLine(' {') header.writeLine(' file.open("testdata/big-list-of-naughty-strings-master/blns.txt", std::ios_base::in);') header.writeLine(' ASSERT_TRUE(file.is_open());') header.writeLine(' }') header.newLine() header.writeLine(' void TearDown()') header.writeLine(' {') header.writeLine(' file.close();') header.writeLine(' }') header.newLine() header.writeLine(' std::string ReadSection(size_t position, size_t length)') header.writeLine(' {') header.writeLine(' std::string result;') header.newLine() header.writeLine(' file.seekg(position, std::ios::beg);') header.writeLine(' if (file.eof())') header.writeLine(' {') header.writeLine(' return result;') header.writeLine(' }') header.newLine() header.writeLine(' result.resize(length + 1);') header.writeLine(' file.read(&result[0], length);') header.newLine() header.writeLine(' return result;') header.writeLine(' }') header.newLine() header.writeLine(' std::fstream file;') header.newLine() header.write('};') for s in self.sections: s.Render(header) def ProcessSection(self, line, bytes, offset): match = re.match('#[\t ]+(.+)', line) if not match: return 'exit' self.current = Section(match.group(1)) self.sections.append(self.current) return 'comments' def ProcessComments(self, line, bytes, offset): if len(line) > 0 and not re.match('#.*', line): return self.ProcessTest(line, bytes, offset) return 'comments' def ProcessTest(self, line, bytes, offset): if len(line) == 0: return 'section' test = Test(line, bytes, offset) self.current.tests.append(test) return 'test' def ProcessExit(self, line, bytes, offset): print('Error parsing file.') exit(1) if __name__ == '__main__': current_directory = os.path.dirname(os.path.realpath(sys.argv[0])) + '/' processor = Processor() processor.Parse(current_directory + '../../testdata/big-list-of-naughty-strings-master/blns.txt') processor.Render(current_directory + '../../source/tests/integration-naughty-strings.cpp') print('Done.')
mit
cpollard1001/FreeCAD_sf_master
src/Mod/Spreadsheet/App/Spreadsheet_legacy.py
25
43880
#*************************************************************************** #* * #* Copyright (c) 2013 - Yorik van Havre <yorik@uncreated.net> * #* * #* This program is free software; you can redistribute it and/or modify * #* it under the terms of the GNU Lesser General Public License (LGPL) * #* as published by the Free Software Foundation; either version 2 of * #* the License, or (at your option) any later version. * #* for detail see the LICENCE text file. * #* * #* This program is distributed in the hope that it will be useful, * #* but WITHOUT ANY WARRANTY; without even the implied warranty of * #* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * #* GNU Library General Public License for more details. * #* * #* You should have received a copy of the GNU Library General Public * #* License along with this program; if not, write to the Free Software * #* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * #* USA * #* * #*************************************************************************** """This is the deprecated spreadsheet module. It is not used anymore in FreeCAD, but is still there for archiving purposes.""" import re, math, FreeCAD, FreeCADGui from PySide import QtCore,QtGui DEBUG = True # set to True to show debug messages if open.__module__ == '__builtin__': pyopen = open # because we'll redefine open below class MathParser: "A math expression parser" # code adapted from http://www.nerdparadise.com/tech/python/parsemath/ def __init__(self, string, vars={}): self.string = string self.index = 0 self.vars = { 'pi' : math.pi, 'e' : math.e } for var in vars.keys(): if self.vars.get(var) != None: raise RuntimeError("Cannot redefine the value of " + var) self.vars[var] = vars[var] def getValue(self): value = self.parseExpression() self.skipWhitespace() if self.hasNext(): raise SyntaxError( "Unexpected character found: '" + self.peek() + "' at index " + str(self.index)) return value def peek(self): return self.string[self.index:self.index + 1] def hasNext(self): return self.index < len(self.string) def skipWhitespace(self): while self.hasNext(): if self.peek() in ' \t\n\r': self.index += 1 else: return def parseExpression(self): return self.parseAddition() def parseAddition(self): values = [self.parseMultiplication()] while True: self.skipWhitespace() char = self.peek() if char == '+': self.index += 1 values.append(self.parseMultiplication()) elif char == '-': self.index += 1 values.append(-1 * self.parseMultiplication()) else: break return sum(values) def parseMultiplication(self): values = [self.parseParenthesis()] while True: self.skipWhitespace() char = self.peek() if char == '*': self.index += 1 values.append(self.parseParenthesis()) elif char == '/': div_index = self.index self.index += 1 denominator = self.parseParenthesis() if denominator == 0: raise ZeroDivisionError( "Division by 0 kills baby whales (occured at index " + str(div_index) + ")") values.append(1.0 / denominator) else: break value = 1.0 for factor in values: value *= factor return value def parseParenthesis(self): self.skipWhitespace() char = self.peek() if char == '(': self.index += 1 value = self.parseExpression() self.skipWhitespace() if self.peek() != ')': raise SyntaxError( "No closing parenthesis found at character " + str(self.index)) self.index += 1 return value else: return self.parseNegative() def parseNegative(self): self.skipWhitespace() char = self.peek() if char == '-': self.index += 1 return -1 * self.parseParenthesis() else: return self.parseValue() def parseValue(self): self.skipWhitespace() char = self.peek() if char in '0123456789.': return self.parseNumber() else: return self.parseVariable() def parseVariable(self): self.skipWhitespace() var = '' while self.hasNext(): char = self.peek() if char.lower() in '_abcdefghijklmnopqrstuvwxyz0123456789': var += char self.index += 1 else: break value = self.vars.get(var, None) if value == None: raise ValueError( "Unrecognized variable: '" + var + "'") return float(value) def parseNumber(self): self.skipWhitespace() strValue = '' decimal_found = False char = '' while self.hasNext(): char = self.peek() if char == '.': if decimal_found: raise SyntaxError( "Found an extra period in a number at character " + str(self.index) + ". Are you European?") decimal_found = True strValue += '.' elif char in '0123456789': strValue += char else: break self.index += 1 if len(strValue) == 0: if char == '': raise SyntaxError("Unexpected end found") else: raise SyntaxError( "I was expecting to find a number at character " + str(self.index) + " but instead I found a '" + char + "'. What's up with that?") return float(strValue) class Spreadsheet: """An object representing a spreadsheet. Can be used as a FreeCAD object or as a standalone python object. Cells of the spreadsheet can be got/set as arguments, as: myspreadsheet = Spreadsheet() myspreadsheet.a1 = 54 print(myspreadsheet.a1) myspreadsheet.a2 = "My text" myspreadsheet.b1 = "=a1*3" print(myspreadsheet.b1) The cell names are case-insensitive (a1 = A1) """ def __init__(self,obj=None): if obj: obj.Proxy = self obj.addProperty("App::PropertyLinkList","Controllers","Base","Cell controllers of this object") self.Object = obj.Name self._cells = {} # this stores cell contents self._relations = {} # this stores relations - currently not used self.cols = [] # this stores filled columns self.rows = [] # this stores filed rows self.Type = "Spreadsheet" def __repr__(self): return "Spreadsheet object containing " + str(len(self._cells)) + " cells" def __setattr__(self, key, value): if self.isKey(key): key = key.lower() if DEBUG: print "Setting key ",key," to value ",value if (value == "") or (value == None): # remove cell if key in self._cells.keys(): del self._cells[key] else: # add cell self._cells[key] = value if value: if self.isFunction(value): self._updateDependencies(key,value) c,r = self.splitKey(key) if not c in self.cols: self.cols.append(c) self.cols.sort() if not r in self.rows: self.rows.append(r) self.rows.sort() self._updateControllers() else: self.__dict__.__setitem__(key,value) def __getattr__(self, key): if key.lower() in self._cells: key = key.lower() if self.isFunction(self._cells[key]): try: e = self.evaluate(key) except: print "Spreadsheet: Error evaluating formula" return None else: return e else: return self._cells[key] else: return self.__dict__.__getitem__(key) def __setitem__(self, key, value): __setattr__(self, key, value) def __getitem__(self, key): return __getattr__(self, key) def __getstate__(self): self._cells["Type"] = self.Type if hasattr(self,"Object"): self._cells["Object"] = self.Object return self._cells def __setstate__(self,state): if state: self._cells = state # extracting Type if "Type" in self._cells.keys(): self.Type = self._cells["Type"] del self._cells["Type"] if "Object" in self._cells.keys(): self.Object = self._cells["Object"] del self._cells["Object"] # updating relation tables self.rows = [] self.cols = [] self._relations = {} for key in self._cells.keys(): c,r = self.splitKey(key) if not r in self.rows: self.rows.append(r) self.rows.sort() if not c in self.cols: self.cols.append(c) self.cols.sort() if self.isFunction(key): self._updateDependencies(key) def _updateDependencies(self,key,value=None): "search for ancestors in the value and updates the table" ancestors = [] if not value: value = self._cells[key] for v in re.findall(r"[\w']+",value): if self.isKey(v): ancestors.append(v) for a in ancestors: if a in self._relations: if not key in self._relations[a]: self._relations[a].append(key) else: self._relations[a] = [key] def _updateControllers(self): "triggers the property controllers" if hasattr(self,"Object"): obj = FreeCAD.ActiveDocument.getObject(self.Object) if obj: import Draft if Draft.getType(obj) == "Spreadsheet": if hasattr(obj,"Controllers"): for co in obj.Controllers: if Draft.getType(co) == "SpreadsheetPropertyController": co.Proxy.execute(co) def execute(self,obj): pass def isFunction(self,key): "isFunction(cell): returns True if the given cell or value is a function" if str(key).lower() in self._cells: key = key.lower() if str(self._cells[key])[0] == "=": return True elif str(key)[0] == "=": return True else: return False def isNumeric(self,key): "isNumeric(cell): returns True if the given cell returns a number" key = key.lower() if self.isFunction(key): res = self.evaluate(key) else: res = self._cells[key] if isinstance(res,float) or isinstance(res,int): return True else: return False def isKey(self,value): "isKey(val): returns True if the given value is a valid cell number" allowMoreThanOneLetter = False al = False nu = False for v in value: if not v.isalnum(): return False elif not al: if v.isalpha(): al = True else: return False else: if not nu: # forbidden to set items at row 0 if v == "0": return False if v.isalpha(): if not allowMoreThanOneLetter: return False elif nu: return False elif v.isdigit(): nu = True if not nu: return False return True def splitKey(self,key): "splitKey(cell): splits a key between column and row" c = '' r = '' for ch in key: if ch.isalpha(): c += ch else: r += ch return c,r def getFunction(self,key): "getFunction(cell): returns the function contained in the given cell, instead of the value" key = key.lower() if key in self._cells: return self._cells[key] else: return None def getSize(self): "getSize(): returns a tuple with number of columns and rows of this spreadsheet" return (len(self.columns),len(self.rows)) def getCells(self,index): "getCells(index): returns the cells from the given column of row number" cells = {} for k in self._cells.keys(): c,r = self.splitKey(k) if index in [c,r]: cells[k] = self._cells[k] return cells def evaluate(self,key): "evaluate(key): evaluates the given formula" key = key.lower() elts = re.split(r'(\W+)',self._cells[key][1:]) result = "" for e in elts: if self.isKey(e): if self.isFunction(e): if self.isNumeric(e): result += str(self.evaluate(e)) else: print "Spreadsheet: Error evaluating formula" return elif self.isNumeric(e): result += str(self._cells[e.lower()]) else: result += e if DEBUG: print "Evaluating ",result try: p = MathParser(result) result = p.getValue() except Exception as (ex): raise # #msg = ex.message #raise Exception(msg) #would discard the type return result def recompute(self,obj): "Fills the controlled cells and properties" if obj: if hasattr(obj,"Controllers"): import Draft for co in obj.Controllers: if Draft.getType(co) == "SpreadsheetController": co.Proxy.setCells(co,obj) elif Draft.getType(co) == "SpreadsheetPropertyController": co.Proxy.compute(co) def getControlledCells(self,obj): "returns a list of cells managed by controllers" cells = [] if hasattr(obj,"Controllers"): import Draft for co in obj.Controllers: if Draft.getType(co) == "SpreadsheetController": cells.extend(co.Proxy.getCells(co,obj)) return cells def getControllingCells(self,obj): "returns a list of controlling cells managed by controllers" cells = [] if hasattr(obj,"Controllers"): import Draft for co in obj.Controllers: if Draft.getType(co) == "SpreadsheetPropertyController": if co.Cell: cells.append(co.Cell.lower()) return cells class ViewProviderSpreadsheet(object): def __init__(self, vobj): vobj.Proxy = self def getIcon(self): import Spreadsheet_rc return ":/icons/Spreadsheet.svg" def attach(self,vobj): self.Object = vobj.Object def setEdit(self,vobj,mode=0): if hasattr(self,"editor"): pass else: self.editor = SpreadsheetView(vobj.Object) addSpreadsheetView(self.editor) return True def unsetEdit(self,vobj,mode=0): return False def doubleClicked(self,vobj): self.setEdit(vobj) def claimChildren(self): if hasattr(self,"Object"): if hasattr(self.Object,"Controllers"): return self.Object.Controllers def __getstate__(self): return None def __setstate__(self,state): return None class SpreadsheetController: "A spreadsheet cell controller object" def __init__(self,obj): obj.Proxy = self self.Type = "SpreadsheetController" obj.addProperty("App::PropertyEnumeration","FilterType","Filter","The type of filter to apply to the scene objects") obj.addProperty("App::PropertyString","Filter","Filter","The filter to apply to the scene objects") obj.addProperty("App::PropertyEnumeration","DataType","Data","The type of data to extract from the objects") obj.addProperty("App::PropertyString","Data","Data","The data to extract from the objects") obj.addProperty("App::PropertyString","BaseCell","Base","The starting cell of this controller") obj.addProperty("App::PropertyEnumeration","Direction","Base","The cells direction of this controller") obj.FilterType = ["Object Type","Object Name"] obj.DataType = ["Get Property","Count"] obj.Direction = ["Horizontal","Vertical"] def execute(self,obj): pass def __getstate__(self): return self.Type def __setstate__(self,state): if state: self.Type = state def onChanged(self,obj,prop): if prop == "DataType": if obj.DataType == "Count": obj.setEditorMode('Data',1) else: obj.setEditorMode('Data',0) def getDataSet(self,obj): "returns a list of objects to be considered by this controller" result = [] if hasattr(obj,"FilterType"): import Draft baseset = FreeCAD.ActiveDocument.Objects if obj.FilterType == "Object Type": for o in baseset: if not ("Spreadsheet" in Draft.getType(o)): t = Draft.getType(o) if t == "Part": t = obj.TypeId if obj.Filter: if obj.Filter in t: result.append(o) else: result.append(o) elif obj.FilterType == "Object Name": for o in baseset: if not ("Spreadsheet" in Draft.getType(o)): if obj.Filter: if obj.Filter in o.Label: result.append(o) else: result.append(o) return result def getCells(self,obj,spreadsheet): "returns a list of cells controlled by this controller" cells = [] if obj.BaseCell: if obj.DataType == "Count": return obj.BaseCell for i in range(len(self.getDataSet(obj))): # get the correct cell key c,r = spreadsheet.Proxy.splitKey(obj.BaseCell) if obj.Direction == "Horizontal": c = c.lower() c = "abcdefghijklmnopqrstuvwxyz".index(c) c += i c = "abcdefghijklmnopqrstuvwxyz"[c] else: r = int(r) + i cells.append(c+str(r)) return cells def setCells(self,obj,spreadsheet): "Fills the controlled cells of the given spreadsheet" if obj.BaseCell: dataset = self.getDataSet(obj) if obj.DataType == "Count": if spreadsheet.Proxy.isKey(obj.BaseCell): try: setattr(spreadsheet.Proxy,obj.BaseCell,len(dataset)) except: print "Spreadsheet: Error counting objects" elif obj.Data: for i in range(len(dataset)): # get the correct cell key c,r = spreadsheet.Proxy.splitKey(obj.BaseCell) if obj.Direction == "Horizontal": c = c.lower() c = "abcdefghijklmnopqrstuvwxyz".index(c) c += i c = "abcdefghijklmnopqrstuvwxyz"[c] else: r = int(r) + i cell = c+str(r) if DEBUG: print "auto setting cell ",cell if spreadsheet.Proxy.isKey(cell): # get the contents args = obj.Data.split(".") value = dataset[i] for arg in args: print arg if hasattr(value,arg): value = getattr(value,arg) try: if isinstance(value,float) or isinstance(value,int): pass else: value = str(value) value = ''.join([ c for c in value if c not in ('<','>',':')]) setattr(spreadsheet.Proxy,cell,value) if DEBUG: print "setting cell ",cell," to value ",value except: print "Spreadsheet: Error retrieving property "+obj.Data+" from object "+dataset[i].Name class ViewProviderSpreadsheetController: "A view provider for the spreadsheet cell controller" def __init__(self,vobj): vobj.Proxy = self def getIcon(self): import Spreadsheet_rc return ":/icons/SpreadsheetController.svg" class SpreadsheetPropertyController: "A spreadsheet property controller object" def __init__(self,obj): obj.Proxy = self self.Type = "SpreadsheetPropertyController" obj.addProperty("App::PropertyEnumeration","TargetType","Base","The type of item to control") obj.addProperty("App::PropertyLink","TargetObject","Base","The object that must be controlled") obj.addProperty("App::PropertyString","TargetProperty","Base","The property or constraint of the target object to control") obj.addProperty("App::PropertyString","Cell","Base","The cell that contains the value to apply to the property") obj.TargetType = ["Property","Constraint"] def execute(self,obj): pass def compute(self,obj): if obj.Cell and obj.TargetObject and obj.TargetProperty and obj.InList: sp = obj.InList[0] import Draft if Draft.getType(sp) == "Spreadsheet": try: value = getattr(sp.Proxy,obj.Cell) except: if DEBUG: print "No value for cell ",obj.Cell," in spreadsheet." return if obj.TargetType == "Property": b = obj.TargetObject props = obj.TargetProperty.split(".") for p in props: if hasattr(b,p): if p != props[-1]: b = getattr(b,p) else: return try: setattr(b,p,value) FreeCAD.ActiveDocument.recompute() if DEBUG: print "setting property ",obj.TargetProperty, " of object ",obj.TargetObject.Name, " to ",value except: if DEBUG: print "unable to set property ",obj.TargetProperty, " of object ",obj.TargetObject.Name, " to ",value else: if Draft.getType(obj.TargetObject) == "Sketch": if obj.TargetProperty.isdigit(): # try setting by constraint id try: c = int(obj.TargetProperty) obj.TargetObject.setDatum(c,float(value)) FreeCAD.ActiveDocument.recompute() if DEBUG: print "setting constraint ",obj.TargetProperty, " of object ",obj.TargetObject.Name, " to ",value except: if DEBUG: print "unable to set constraint ",obj.TargetProperty, " of object ",obj.TargetObject.Name, " to ",value else: # try setting by constraint name try: obj.TargetObject.setDatum(obj.TargetProperty,float(value)) FreeCAD.ActiveDocument.recompute() if DEBUG: print "setting constraint ",obj.TargetProperty, " of object ",obj.TargetObject.Name, " to ",value except: if DEBUG: print "unable to set constraint ",obj.TargetProperty, " of object ",obj.TargetObject.Name, " to ",value def __getstate__(self): return self.Type def __setstate__(self,state): if state: self.Type = state def onChanged(self,obj,prop): pass class ViewProviderSpreadsheetPropertyController: "A view provider for the spreadsheet property controller" def __init__(self,vobj): vobj.Proxy = self def getIcon(self): import Spreadsheet_rc return ":/icons/SpreadsheetPropertyController.svg" class SpreadsheetView(QtGui.QWidget): "A spreadsheet viewer for FreeCAD" def __init__(self,spreadsheet=None): from DraftTools import translate QtGui.QWidget.__init__(self) self.setWindowTitle(str(translate("Spreadsheet","Spreadsheet"))) self.setObjectName("Spreadsheet viewer") self.verticalLayout = QtGui.QVBoxLayout(self) self.doNotChange = False # add editor line self.horizontalLayout = QtGui.QHBoxLayout() self.label = QtGui.QLabel(self) self.label.setMinimumSize(QtCore.QSize(82, 0)) self.label.setText(translate("Spreadsheet","Cell")+" A1 :") self.lineEdit = QtGui.QLineEdit(self) self.applyButton = QtGui.QPushButton(self) self.applyButton.setText(translate("Spreadsheet","Apply")) self.applyButton.setIcon(QtGui.QIcon(":/icons/edit_OK.svg")) self.applyButton.setToolTip(translate("Spreadsheet","Apply the changes to the current cell")) self.wipeButton = QtGui.QPushButton(self) self.wipeButton.setText(translate("Spreadsheet","Delete")) self.wipeButton.setIcon(QtGui.QIcon(":/icons/process-stop.svg")) self.wipeButton.setToolTip(translate("Spreadsheet","Deletes the contents of the current cell")) self.computeButton = QtGui.QPushButton(self) self.computeButton.setText(translate("Spreadsheet","Compute")) self.computeButton.setIcon(QtGui.QIcon(":/icons/view-refresh.svg")) self.computeButton.setToolTip(translate("Spreadsheet","Updates the values handled by controllers")) self.horizontalLayout.addWidget(self.label) self.horizontalLayout.addWidget(self.lineEdit) self.horizontalLayout.addWidget(self.applyButton) self.horizontalLayout.addWidget(self.wipeButton) self.horizontalLayout.addWidget(self.computeButton) self.verticalLayout.addLayout(self.horizontalLayout) # add table self.table = QtGui.QTableWidget(30,26,self) for i in range(26): ch = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"[i] self.table.setHorizontalHeaderItem(i, QtGui.QTableWidgetItem(ch)) self.verticalLayout.addWidget(self.table) self.table.setCurrentCell(0,0) self.spreadsheet = spreadsheet self.update() QtCore.QObject.connect(self.table, QtCore.SIGNAL("cellChanged(int,int)"), self.changeCell) QtCore.QObject.connect(self.table, QtCore.SIGNAL("currentCellChanged(int,int,int,int)"), self.setEditLine) QtCore.QObject.connect(self.lineEdit, QtCore.SIGNAL("returnPressed()"), self.getEditLine) QtCore.QObject.connect(self.applyButton, QtCore.SIGNAL("clicked()"), self.getEditLine) QtCore.QObject.connect(self.wipeButton, QtCore.SIGNAL("clicked()"), self.wipeCell) QtCore.QObject.connect(self.computeButton, QtCore.SIGNAL("clicked()"), self.recompute) def closeEvent(self, event): #if DEBUG: print "Closing spreadsheet view" if self.spreadsheet: # before deleting this view, we remove the reference to it in the object if hasattr(self.spreadsheet,"ViewObject"): if self.spreadsheet.ViewObject: if hasattr(self.spreadsheet.ViewObject.Proxy,"editor"): del self.spreadsheet.ViewObject.Proxy.editor if FreeCADGui: if FreeCADGui.ActiveDocument: FreeCADGui.ActiveDocument.resetEdit() def update(self): "updates the cells with the contents of the spreadsheet" if self.spreadsheet: controlled = self.spreadsheet.Proxy.getControlledCells(self.spreadsheet) controlling = self.spreadsheet.Proxy.getControllingCells(self.spreadsheet) for cell in self.spreadsheet.Proxy._cells.keys(): if not cell in ["Type","Object"]: c,r = self.spreadsheet.Proxy.splitKey(cell) c = "abcdefghijklmnopqrstuvwxyz".index(c) r = int(str(r))-1 content = getattr(self.spreadsheet.Proxy,cell) if self.spreadsheet.Proxy.isFunction(cell): self.doNotChange = True if content == None: content = "" if DEBUG: print "Updating ",cell," to ",content if self.table.item(r,c): self.table.item(r,c).setText(str(content)) else: self.table.setItem(r,c,QtGui.QTableWidgetItem(str(content))) if cell in controlled: brush = QtGui.QBrush(QtGui.QColor(255, 0, 0)) brush.setStyle(QtCore.Qt.Dense6Pattern) if self.table.item(r,c): self.table.item(r,c).setBackground(brush) elif cell in controlling: brush = QtGui.QBrush(QtGui.QColor(0, 0, 255)) brush.setStyle(QtCore.Qt.Dense6Pattern) if self.table.item(r,c): self.table.item(r,c).setBackground(brush) else: brush = QtGui.QBrush() if self.table.item(r,c): self.table.item(r,c).setBackground(brush) def changeCell(self,r,c,value=None): "changes the contens of a cell" if self.doNotChange: if DEBUG: print "DoNotChange flag is set" self.doNotChange = False elif self.spreadsheet: key = "abcdefghijklmnopqrstuvwxyz"[c]+str(r+1) if value == None: value = self.table.item(r,c).text() if value == "": if DEBUG: print "Wiping "+key if self.table.item(r,c): self.table.item(r,c).setText("") if key in self.spreadsheet.Proxy._cells.keys(): del self.spreadsheet.Proxy._cells[key] else: if DEBUG: print "Changing "+key+" to "+value # store the entry as best as possible try: v = int(value) except: try: v = float(value) except: try: v = v = str(value) except: v = value setattr(self.spreadsheet.Proxy,key,v) self.update() # TODO do not update the whole spreadsheet when only one cell has changed: # use the _relations table and recursively update only cells based on this one self.setEditLine(r,c) def setEditLine(self,r,c,orr=None,orc=None): "copies the contents of the active cell to the edit line" if self.spreadsheet: c = "abcdefghijklmnopqrstuvwxyz"[c] r = r+1 if DEBUG: print "Active cell "+c+str(r) from DraftTools import translate self.label.setText(str(translate("Spreadsheet","Cell"))+" "+c.upper()+str(r)+" :") content = self.spreadsheet.Proxy.getFunction(c+str(r)) if content == None: content = "" self.lineEdit.setText(str(content)) def getEditLine(self): "called when something has been entered in the edit line" txt = str(self.lineEdit.text()) if DEBUG: print "Text edited ",txt r = self.table.currentRow() c = self.table.currentColumn() self.changeCell(r,c,txt) def wipeCell(self): if DEBUG: print "Wiping cell" self.lineEdit.setText("") self.getEditLine() def recompute(self): if self.spreadsheet: self.spreadsheet.Proxy.recompute(self.spreadsheet) self.update() class _Command_Spreadsheet_Create: "the Spreadsheet_Create FreeCAD command" def GetResources(self): return {'Pixmap' : 'Spreadsheet', 'MenuText': QtCore.QT_TRANSLATE_NOOP("Spreadsheet_Create","Spreadsheet"), 'ToolTip': QtCore.QT_TRANSLATE_NOOP("Spreadsheet_Create","Adds a spreadsheet object to the active document")} def Activated(self): from DraftTools import translate FreeCAD.ActiveDocument.openTransaction(str(translate("Spreadsheet","Create Spreadsheet"))) FreeCADGui.doCommand("import Spreadsheet") FreeCADGui.doCommand("s = Spreadsheet.makeSpreadsheet()") FreeCADGui.doCommand("FreeCAD.ActiveDocument.recompute()") FreeCADGui.doCommand("FreeCADGui.ActiveDocument.setEdit(s.Name,0)") FreeCAD.ActiveDocument.commitTransaction() class _Command_Spreadsheet_Controller: "the Spreadsheet_Controller FreeCAD command" def GetResources(self): return {'Pixmap' : 'SpreadsheetController', 'MenuText': QtCore.QT_TRANSLATE_NOOP("Spreadsheet_Controller","Add controller"), 'ToolTip': QtCore.QT_TRANSLATE_NOOP("Spreadsheet_Controller","Adds a cell controller to a selected spreadsheet")} def IsActive(self): if FreeCADGui.Selection.getSelection(): return True else: return False def Activated(self): import Draft if Draft.getType(FreeCADGui.Selection.getSelection()[0]) == "Spreadsheet": from DraftTools import translate n = FreeCADGui.Selection.getSelection()[0].Name FreeCAD.ActiveDocument.openTransaction(str(translate("Spreadsheet","Add controller"))) FreeCADGui.doCommand("import Spreadsheet") FreeCADGui.doCommand("Spreadsheet.makeSpreadsheetController(FreeCAD.ActiveDocument."+n+")") FreeCAD.ActiveDocument.commitTransaction() FreeCAD.ActiveDocument.recompute() class _Command_Spreadsheet_PropertyController: "the Spreadsheet_Controller FreeCAD command" def GetResources(self): return {'Pixmap' : 'SpreadsheetPropertyController', 'MenuText': QtCore.QT_TRANSLATE_NOOP("Spreadsheet_PropertyController","Add property controller"), 'ToolTip': QtCore.QT_TRANSLATE_NOOP("Spreadsheet_PropertyController","Adds a property controller to a selected spreadsheet")} def IsActive(self): if FreeCADGui.Selection.getSelection(): return True else: return False def Activated(self): import Draft from DraftTools import translate sel = FreeCADGui.Selection.getSelection() if (len(sel) == 1) and Draft.getType(sel[0]) == "Spreadsheet": n = FreeCADGui.Selection.getSelection()[0].Name FreeCAD.ActiveDocument.openTransaction(str(translate("Spreadsheet","Add property controller"))) FreeCADGui.doCommand("import Spreadsheet") FreeCADGui.doCommand("Spreadsheet.makeSpreadsheetPropertyController(FreeCAD.ActiveDocument."+n+")") FreeCAD.ActiveDocument.commitTransaction() FreeCAD.ActiveDocument.recompute() elif (len(sel) == 2): if (Draft.getType(sel[0]) == "Spreadsheet") and (Draft.getType(sel[1]) == "SpreadsheetPropertyController"): s = sel[0].Name o = sel[1].Name elif (Draft.getType(sel[1]) == "Spreadsheet") and (Draft.getType(sel[0]) == "SpreadsheetPropertyController"): s = sel[1].Name o = sel[0].Name else: return FreeCAD.ActiveDocument.openTransaction(str(translate("Spreadsheet","Add property controller"))) FreeCADGui.doCommand("import Spreadsheet") FreeCADGui.doCommand("Spreadsheet.makeSpreadsheetPropertyController(FreeCAD.ActiveDocument."+s+",FreeCAD.ActiveDocument."+o+")") FreeCAD.ActiveDocument.commitTransaction() FreeCAD.ActiveDocument.recompute() def makeSpreadsheet(): "makeSpreadsheet(): adds a spreadsheet object to the active document" obj = FreeCAD.ActiveDocument.addObject("App::FeaturePython","Spreadsheet") Spreadsheet(obj) if FreeCAD.GuiUp: ViewProviderSpreadsheet(obj.ViewObject) return obj def makeSpreadsheetController(spreadsheet,cell=None,direction=None): """makeSpreadsheetController(spreadsheet,[cell,direction]): adds a controller to the given spreadsheet. Call can be a starting cell such as "A5", and direction can be "Horizontal" or "Vertical".""" obj = FreeCAD.ActiveDocument.addObject("App::FeaturePython","CellController") SpreadsheetController(obj) if FreeCAD.GuiUp: ViewProviderSpreadsheetController(obj.ViewObject) conts = spreadsheet.Controllers conts.append(obj) spreadsheet.Controllers = conts if cell: obj.BaseCell = cell if direction: obj.Direction = direction return obj def makeSpreadsheetPropertyController(spreadsheet,object=None,prop=None,cell=None): """makeSpreadsheetPropertyController(spreadsheet,[object,prop,cell]): adds a property controller, targetting the given object if any, to the given spreadsheet. You can give a property (such as "Length" or "Proxy.Length") and a cell address (such as "B6").""" obj = FreeCAD.ActiveDocument.addObject("App::FeaturePython","PropertyController") SpreadsheetPropertyController(obj) if FreeCAD.GuiUp: ViewProviderSpreadsheetPropertyController(obj.ViewObject) conts = spreadsheet.Controllers conts.append(obj) spreadsheet.Controllers = conts if cell: obj.Cell = cell if prop: obj.Property = prop return obj def addSpreadsheetView(view): "addSpreadsheetView(view): adds the given spreadsheet view to the FreeCAD MDI area" if FreeCAD.GuiUp: import Spreadsheet_rc mw = FreeCADGui.getMainWindow() mdi = mw.findChild(QtGui.QMdiArea) sw = mdi.addSubWindow(view) sw.setWindowIcon(QtGui.QIcon(":/icons/Spreadsheet.svg")) sw.show() mdi.setActiveSubWindow(sw) def open(filename): "called when freecad opens a csv file" import os docname = os.path.splitext(os.path.basename(filename))[0] doc = FreeCAD.newDocument(docname) FreeCAD.ActiveDocument = doc read(filename) doc.recompute() return doc def insert(filename,docname): "called when freecad wants to import a csv file" try: doc = FreeCAD.getDocument(docname) except NameError: doc = FreeCAD.newDocument(docname) FreeCAD.ActiveDocument = doc read(filename) doc.recompute() return doc def read(filename): "creates a spreadsheet with the contents of a csv file" sp = makeSpreadsheet() import csv with pyopen(filename, 'rb') as csvfile: csvfile = csv.reader(csvfile) rn = 1 for row in csvfile: cn = 0 for c in row[:26]: cl = "abcdefghijklmnopqrstuvwxyz"[cn] #print "setting ",cl+str(rn)," ",c try: c = int(c) except ValueError: try: c = float(c) except ValueError: c = str(c) setattr(sp.Proxy,cl+str(rn),c) cn += 1 rn += 1 print "successfully imported ",filename def export(exportList,filename): "called when freecad exports a csv file" import csv, Draft if not exportList: print "Spreadsheet: Nothing to export" return obj = exportList[0] if Draft.getType(obj) != "Spreadsheet": print "Spreadhseet: The selected object is not a spreadsheet" return if not obj.Proxy._cells: print "Spreadsheet: The selected spreadsheet contains no cell" return numcols = ("abcdefghijklmnopqrstuvwxyz".index(str(obj.Proxy.cols[-1])))+1 numrows = int(obj.Proxy.rows[-1]) with pyopen(filename, 'wb') as csvfile: csvfile = csv.writer(csvfile) for i in range(numrows): r = [] for j in range(numcols): key = "abcdefghijklmnopqrstuvwxyz"[j]+str(i+1) if key in obj.Proxy._cells.keys(): r.append(str(obj.Proxy.getFunction(key))) else: r.append("") csvfile.writerow(r) print "successfully exported ",filename #FreeCADGui.addCommand('Spreadsheet_Create',_Command_Spreadsheet_Create()) #FreeCADGui.addCommand('Spreadsheet_Controller',_Command_Spreadsheet_Controller()) #FreeCADGui.addCommand('Spreadsheet_PropertyController',_Command_Spreadsheet_PropertyController())
lgpl-2.1
ForensicArtifacts/artifacts
tests/reader_test.py
3
12848
# -*- coding: utf-8 -*- """Tests for the artifact definitions readers.""" from __future__ import unicode_literals import io import unittest import yaml from artifacts import definitions from artifacts import errors from artifacts import reader from tests import test_lib class YamlArtifactsReaderTest(test_lib.BaseTestCase): """YAML artifacts reader tests.""" _DEFINITION_INVALID_LABELS = """\ name: BadLabel doc: badlabel. sources: - type: ARTIFACT_GROUP attributes: names: - 'SystemEventLogEvtx' labels: Logs supported_os: [Windows] """ _DEFINITION_INVALID_SUPPORTED_OS_1 = """\ name: BadSupportedOS doc: supported_os should be an array of strings. sources: - type: ARTIFACT_GROUP attributes: names: - 'SystemEventLogEvtx' labels: [Logs] supported_os: Windows """ _DEFINITION_INVALID_SUPPORTED_OS_2 = """\ name: BadTopSupportedOS doc: Top supported_os should match supported_os from sources. sources: - type: ARTIFACT_GROUP attributes: names: - 'SystemEventLogEvtx' supported_os: [Windows] labels: [Logs] """ _DEFINITION_INVALID_URLS = """\ name: BadUrls doc: badurls. sources: - type: ARTIFACT_GROUP attributes: names: - 'SystemEventLogEvtx' supported_os: [Windows] urls: 'http://example.com' """ _DEFINITION_WITH_EXTRA_KEY = """\ name: WithExtraKey doc: definition with extra_key sources: - type: ARTIFACT_GROUP attributes: names: - 'SystemEventLogEvtx' extra_key: 'wrong' labels: [Logs] supported_os: [Windows] """ _DEFINITION_WITH_RETURN_TYPES = """\ name: WithReturnTypes doc: definition with return_types sources: - type: ARTIFACT_GROUP attributes: names: [WindowsRunKeys, WindowsServices] returned_types: [PersistenceFile] """ _DEFINITION_WITHOUT_DOC = """\ name: NoDoc sources: - type: ARTIFACT_GROUP attributes: names: - 'SystemEventLogEvtx' """ _DEFINITION_WITHOUT_NAME = """\ name: NoNames doc: Missing names attr. sources: - type: ARTIFACT_GROUP attributes: - 'SystemEventLogEvtx' """ _DEFINITION_WITHOUT_SOURCES = """\ name: BadSources doc: must have one sources. labels: [Logs] supported_os: [Windows] """ def testReadFileObject(self): """Tests the ReadFileObject function.""" test_file = self._GetTestFilePath(['definitions.yaml']) self._SkipIfPathNotExists(test_file) artifact_reader = reader.YamlArtifactsReader() with open(test_file, 'rb') as file_object: artifact_definitions = list(artifact_reader.ReadFileObject(file_object)) self.assertEqual(len(artifact_definitions), 7) # Artifact with file source type. artifact_definition = artifact_definitions[0] self.assertEqual(artifact_definition.name, 'SecurityEventLogEvtx') expected_description = ( 'Windows Security Event log for Vista or later systems.') self.assertEqual(artifact_definition.description, expected_description) self.assertEqual(len(artifact_definition.sources), 1) source_type = artifact_definition.sources[0] self.assertIsNotNone(source_type) self.assertEqual( source_type.type_indicator, definitions.TYPE_INDICATOR_FILE) expected_paths = [ '%%environ_systemroot%%\\System32\\winevt\\Logs\\Security.evtx' ] self.assertEqual(sorted(source_type.paths), sorted(expected_paths)) self.assertEqual(len(artifact_definition.conditions), 1) expected_condition = 'os_major_version >= 6' self.assertEqual(artifact_definition.conditions[0], expected_condition) self.assertEqual(len(artifact_definition.labels), 1) self.assertEqual(artifact_definition.labels[0], 'Logs') self.assertEqual(len(artifact_definition.supported_os), 1) self.assertEqual(artifact_definition.supported_os[0], 'Windows') self.assertEqual(len(artifact_definition.urls), 1) expected_url = ( 'http://www.forensicswiki.org/wiki/Windows_XML_Event_Log_(EVTX)') self.assertEqual(artifact_definition.urls[0], expected_url) # Artifact with Windows Registry key source type. artifact_definition = artifact_definitions[1] self.assertEqual( artifact_definition.name, 'AllUsersProfileEnvironmentVariable') self.assertEqual(len(artifact_definition.sources), 1) source_type = artifact_definition.sources[0] self.assertIsNotNone(source_type) self.assertEqual( source_type.type_indicator, definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_KEY) expected_key1 = ( 'HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows NT\\CurrentVersion\\' 'ProfileList\\ProfilesDirectory') expected_key2 = ( 'HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows NT\\CurrentVersion\\' 'ProfileList\\AllUsersProfile') expected_keys = [expected_key1, expected_key2] self.assertEqual(sorted(source_type.keys), sorted(expected_keys)) # Artifact with Windows Registry value source type. artifact_definition = artifact_definitions[2] self.assertEqual(artifact_definition.name, 'CurrentControlSet') self.assertEqual(len(artifact_definition.sources), 1) source_type = artifact_definition.sources[0] self.assertIsNotNone(source_type) self.assertEqual( source_type.type_indicator, definitions.TYPE_INDICATOR_WINDOWS_REGISTRY_VALUE) self.assertEqual(len(source_type.key_value_pairs), 1) key_value_pair = source_type.key_value_pairs[0] expected_key = 'HKEY_LOCAL_MACHINE\\SYSTEM\\Select' self.assertEqual(key_value_pair['key'], expected_key) self.assertEqual(key_value_pair['value'], 'Current') # Artifact with WMI query source type. artifact_definition = artifact_definitions[3] self.assertEqual(artifact_definition.name, 'WMIProfileUsersHomeDir') expected_provides = sorted(['users.homedir']) self.assertEqual(sorted(artifact_definition.provides), expected_provides) self.assertEqual(len(artifact_definition.sources), 1) source_type = artifact_definition.sources[0] self.assertIsNotNone(source_type) self.assertEqual( source_type.type_indicator, definitions.TYPE_INDICATOR_WMI_QUERY) expected_query = ( 'SELECT * FROM Win32_UserProfile WHERE SID=\'%%users.sid%%\'') self.assertEqual(source_type.query, expected_query) # Artifact with artifact definition source type. artifact_definition = artifact_definitions[4] self.assertEqual(artifact_definition.name, 'EventLogs') self.assertEqual(len(artifact_definition.sources), 1) source_type = artifact_definition.sources[0] self.assertIsNotNone(source_type) self.assertEqual( source_type.type_indicator, definitions.TYPE_INDICATOR_ARTIFACT_GROUP) # Artifact with command definition source type. artifact_definition = artifact_definitions[5] self.assertEqual(artifact_definition.name, 'RedhatPackagesList') self.assertEqual(len(artifact_definition.sources), 1) source_type = artifact_definition.sources[0] self.assertIsNotNone(source_type) self.assertEqual( source_type.type_indicator, definitions.TYPE_INDICATOR_COMMAND) # Artifact with COMMAND definition collector definition. artifact_definition = artifact_definitions[5] self.assertEqual(artifact_definition.name, 'RedhatPackagesList') self.assertEqual(len(artifact_definition.sources), 1) collector_definition = artifact_definition.sources[0] self.assertIsNotNone(collector_definition) self.assertEqual( collector_definition.type_indicator, definitions.TYPE_INDICATOR_COMMAND) def testReadFileObjectInvalidLabels(self): """Tests the ReadFileObject function on an invalid labels.""" artifact_reader = reader.YamlArtifactsReader() file_object = io.StringIO(initial_value=self._DEFINITION_INVALID_LABELS) with self.assertRaises(errors.FormatError): _ = list(artifact_reader.ReadFileObject(file_object)) def testReadFileObjectInvalidSupportedOS(self): """Tests the ReadFileObject function on an invalid supported_os.""" artifact_reader = reader.YamlArtifactsReader() file_object = io.StringIO( initial_value=self._DEFINITION_INVALID_SUPPORTED_OS_1) with self.assertRaises(errors.FormatError): _ = list(artifact_reader.ReadFileObject(file_object)) file_object = io.StringIO( initial_value=self._DEFINITION_INVALID_SUPPORTED_OS_2) with self.assertRaises(errors.FormatError): _ = list(artifact_reader.ReadFileObject(file_object)) def testReadFileObjectInvalidURLs(self): """Tests the ReadFileObject function on an invalid urls.""" artifact_reader = reader.YamlArtifactsReader() file_object = io.StringIO(initial_value=self._DEFINITION_INVALID_URLS) with self.assertRaises(errors.FormatError): _ = list(artifact_reader.ReadFileObject(file_object)) def testReadFileObjectWithExtraKey(self): """Tests the ReadFileObject function on a definition with extra key.""" artifact_reader = reader.YamlArtifactsReader() file_object = io.StringIO(initial_value=self._DEFINITION_WITH_EXTRA_KEY) with self.assertRaises(errors.FormatError): _ = list(artifact_reader.ReadFileObject(file_object)) def testReadFileObjectWithReturnTypes(self): """Tests the ReadFileObject function on a definition with return types.""" artifact_reader = reader.YamlArtifactsReader() file_object = io.StringIO(initial_value=self._DEFINITION_WITH_RETURN_TYPES) with self.assertRaises(errors.FormatError): _ = list(artifact_reader.ReadFileObject(file_object)) def testReadFileObjectWithoutDoc(self): """Tests the ReadFileObject function on a definition without doc.""" artifact_reader = reader.YamlArtifactsReader() file_object = io.StringIO(initial_value=self._DEFINITION_WITHOUT_DOC) with self.assertRaises(errors.FormatError): _ = list(artifact_reader.ReadFileObject(file_object)) def testReadFileObjectWithoutName(self): """Tests the ReadFileObject function on a definition without name.""" artifact_reader = reader.YamlArtifactsReader() file_object = io.StringIO(initial_value=self._DEFINITION_WITHOUT_NAME) with self.assertRaises(errors.FormatError): _ = list(artifact_reader.ReadFileObject(file_object)) def testReadFileObjectWithoutSources(self): """Tests the ReadFileObject function on a definition without sources.""" artifact_reader = reader.YamlArtifactsReader() file_object = io.StringIO(initial_value=self._DEFINITION_WITHOUT_SOURCES) with self.assertRaises(errors.FormatError): _ = list(artifact_reader.ReadFileObject(file_object)) def testReadYamlFile(self): """Tests the ReadFile function.""" test_file = self._GetTestFilePath(['definitions.yaml']) self._SkipIfPathNotExists(test_file) artifact_reader = reader.YamlArtifactsReader() artifact_definitions = list(artifact_reader.ReadFile(test_file)) self.assertEqual(len(artifact_definitions), 7) def testReadDirectory(self): """Tests the ReadDirectory function.""" artifact_reader = reader.YamlArtifactsReader() test_file = self._GetTestFilePath(['.']) artifact_definitions = list(artifact_reader.ReadDirectory(test_file)) self.assertEqual(len(artifact_definitions), 7) def testArtifactAsDict(self): """Tests the AsDict function.""" test_file = self._GetTestFilePath(['definitions.yaml']) self._SkipIfPathNotExists(test_file) artifact_reader = reader.YamlArtifactsReader() with open(test_file, 'r') as file_object: for artifact_definition in yaml.safe_load_all(file_object): artifact_object = artifact_reader.ReadArtifactDefinitionValues( artifact_definition) self.assertEqual(artifact_definition, artifact_object.AsDict()) def testDefinitionsAsDict(self): """Tests the AsDict function.""" artifact_reader = reader.YamlArtifactsReader() artifact_definitions = list(artifact_reader.ReadDirectory('data')) last_artifact_definition = None for artifact in artifact_definitions: try: artifact_definition = artifact.AsDict() except errors.FormatError: error_location = 'At start' if last_artifact_definition: error_location = 'After: {0}'.format(last_artifact_definition.name) self.fail('{0} failed to convert to dict'.format(error_location)) last_artifact_definition = artifact_definition class JsonArtifactsReaderTest(test_lib.BaseTestCase): """JSON artifacts reader tests.""" def testReadJsonFile(self): """Tests the ReadFile function.""" test_file = self._GetTestFilePath(['definitions.json']) self._SkipIfPathNotExists(test_file) artifact_reader = reader.JsonArtifactsReader() artifact_definitions = list(artifact_reader.ReadFile(test_file)) self.assertEqual(len(artifact_definitions), 7) if __name__ == '__main__': unittest.main()
apache-2.0
oczkers/gdown
gdown/modules/mediafire.py
1
3909
# -*- coding: utf-8 -*- """ gdown.modules.mediafire ~~~~~~~~~~~~~~~~~~~ This module contains handlers for mediafire. """ import re import time from hashlib import sha1 from dateutil import parser from requests.exceptions import ConnectionError # TODO?: import connection errors into gdown from simplejson.scanner import JSONDecodeError from ..module import browser, acc_info_template from ..exceptions import ModuleError def accInfo(username, passwd, proxy=False): acc_info = acc_info_template() r = browser(proxy=proxy) application_id = 42511 # mediafireapi official client signature = sha1() signature.update(username.encode('ascii')) signature.update(passwd.encode('ascii')) signature.update(str(application_id).encode('ascii')) signature = signature.hexdigest() data = {'application_id': application_id, 'signature': signature, 'email': username, 'password': passwd, 'response_format': 'json'} try: rc = r.post('https://www.mediafire.com/api/1.5/user/get_session_token.php', data=data).json() except ConnectionError as e: print('connection error') time.sleep(1) return accInfo(username=username, passwd=passwd, proxy=proxy) result = rc['response']['result'] # TODO: validate this if result == 'Error': if rc['response']['message'] in ('The Credentials you entered are invalid', 'One or more parameters for this request are invalid'): acc_info['status'] = 'deleted' return acc_info elif rc['response']['message'] == 'Account Suspended': acc_info['status'] = 'blocked' return acc_info elif 'Internal server error' in rc['response']['message']: time.sleep(5) return accInfo(username, passwd, proxy=proxy) else: print(rc) raise ModuleError('Unknown error during login.') token = rc['response']['session_token'] # ekey = rc['response']['ekey'] # pkey = rc['response']['pkey'] data = {'session_token': token, 'response_format': 'json'} try: rc = r.post('http://www.mediafire.com/api/1.5/user/get_info.php', data=data).json() except JSONDecodeError: print('ip banned?') time.sleep(5) rc = r.post('http://www.mediafire.com/api/1.5/user/get_info.php', data=data).json() if 'user_info' not in rc['response']: # DEBUG if rc['response'].get('message') == 'Internal server error (1002)': time.sleep(5) rc = r.post('http://www.mediafire.com/api/1.5/user/get_info.php', data=data).json() else: print(rc) # result = rc['response']['result'] # TODO?: validate this premium = rc['response']['user_info']['premium'] == 'yes' # print(rc) if premium: acc_info['status'] = 'premium' data = {'session_token': token, 'response_format': 'json'} rc = r.post('https://www.mediafire.com/api/1.5/billing/get_invoice.php', data=data).json() # invoice details are delayed (after you click on payment details on page?) # print(rc) # result = rc['response']['result'] # TODO?: validate this acc_info['expire_date'] = parser.parse(rc['response']['invoice']['recurring_startdate']) # TODO: transfer https://www.mediafire.com/developers/core_api/1.3/user/#get_limits else: acc_info['status'] = 'free' return acc_info def getUrl(link, premium_key, username=None, passwd=None): """Returns direct file url.""" fileid = re.match('http://[w\.]{,4}mediafire.com/\?(.+)', link).group(1) r = browser() values = {'premium_key': premium_key, 'files': fileid} content = r.post('http://www.mediafire.com/basicapi/premiumapi.php', values).text link = re.search('<url>(.+)</url>', content).group(1) return r.get(link).url
gpl-3.0
greg-hellings/cinch
cinch/bin/entry_point.py
1
3680
#!/usr/bin/env python from __future__ import print_function from argparse import ArgumentParser, REMAINDER from os import getcwd, path from wrappers import call_ansible, call_linchpin import sys def cinch_generic(playbook): # Parse the command line arguments parser = ArgumentParser(description='A wrapper around Cinch for the most ' 'common use case') # The inventory file that the user provides which will get passed along to # Ansible for its consumption parser.add_argument('inventory') # All remaining arguments are passed through, untouched, to Ansible parser.add_argument('args', nargs=REMAINDER) args = parser.parse_known_args() if len(args.inventory) > 0: if args.inventory[0] == '/': inventory = args.inventory else: inventory = path.join(getcwd(), args.inventory) else: raise Exception('Inventory path needs to be non-empty') exit_code = call_ansible(inventory, playbook, args.args) sys.exit(exit_code) def cinch(): """ Entry point for the "cinch" CLI that merely wraps the ansible-playbook command and pre-fills its path to the site.yml file for Cinch. The cinch tool requires a single argument - the Ansible inventory file - and accepts an arbitrary number of extra arguments that are passed through to the ansible-playbook executable. :return: Exit code 0 if the execution is completed successfully, or 255 if an unknown error occurs. If ansible-playbook exits with an error code, this executable will exit with the same code. """ cinch_generic('site.yml') def teardown(): """ Entry point for the "teardown" CLI that wraps ansible-playbook commands and pre-fills its path to the teardown.yml file. :return: Exit code 0 if the execution is completed successfully, or 255 if an unknown error occurs. If ansible-playbook exits with an error code, this executable will exit with the same code. """ cinch_generic('teardown.yml') def cinchpin(): """ Entry point for the "cinchpin" CLI that wraps the linchpin command and loads the linch-pin PinFile to provision resources and then uses the generated inventory file to pass to cinch. The cinchpin tool requires a single argument - a valid linchpin subcommand - and accepts an arbitrary number of extra arguments that are passed through to the linchpin executable. If a linch-pin PinFile is not found in the current working directory, a path to a linch-pin working directory may be optionally provided. :return: Exit code 0 if the execution is completed successfully, or 255 if an unknown error occurs. If linchpin exits with an error code, this executable will exit with the same code. """ # Parse the command line arguments parser = ArgumentParser(description='A wrapper around linchpin for the ' 'most common use case') # The linch-pin working directory containing a PinFile that the user # provides which will get passed along to linchpin for its consumption parser.add_argument('-w', '--workdir', default=getcwd(), help='''path to linch-pin working directory containing a PinFile''') # All remaining arguments are passed through, untouched, to linchpin parser.add_argument('arg', help='argument to pass to the linchpin command') args = parser.parse_args() exit_code = call_linchpin(args.workdir, args.arg) sys.exit(exit_code) if __name__ == '__main__': print('You should not invoke this file directly.') sys.exit(1)
gpl-3.0
TridevGuha/django
tests/field_deconstruction/tests.py
3
18388
from __future__ import unicode_literals from django.apps import apps from django.db import models from django.test import SimpleTestCase, override_settings from django.test.utils import isolate_lru_cache from django.utils import six class FieldDeconstructionTests(SimpleTestCase): """ Tests the deconstruct() method on all core fields. """ def test_name(self): """ Tests the outputting of the correct name if assigned one. """ # First try using a "normal" field field = models.CharField(max_length=65) name, path, args, kwargs = field.deconstruct() self.assertIsNone(name) field.set_attributes_from_name("is_awesome_test") name, path, args, kwargs = field.deconstruct() self.assertEqual(name, "is_awesome_test") self.assertIsInstance(name, six.text_type) # Now try with a ForeignKey field = models.ForeignKey("some_fake.ModelName", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertIsNone(name) field.set_attributes_from_name("author") name, path, args, kwargs = field.deconstruct() self.assertEqual(name, "author") def test_auto_field(self): field = models.AutoField(primary_key=True) field.set_attributes_from_name("id") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.AutoField") self.assertEqual(args, []) self.assertEqual(kwargs, {"primary_key": True}) def test_big_integer_field(self): field = models.BigIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.BigIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_boolean_field(self): field = models.BooleanField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.BooleanField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.BooleanField(default=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.BooleanField") self.assertEqual(args, []) self.assertEqual(kwargs, {"default": True}) def test_char_field(self): field = models.CharField(max_length=65) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.CharField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 65}) field = models.CharField(max_length=65, null=True, blank=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.CharField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 65, "null": True, "blank": True}) def test_char_field_choices(self): field = models.CharField(max_length=1, choices=(("A", "One"), ("B", "Two"))) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.CharField") self.assertEqual(args, []) self.assertEqual(kwargs, {"choices": [("A", "One"), ("B", "Two")], "max_length": 1}) def test_csi_field(self): field = models.CommaSeparatedIntegerField(max_length=100) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.CommaSeparatedIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 100}) def test_date_field(self): field = models.DateField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.DateField(auto_now=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateField") self.assertEqual(args, []) self.assertEqual(kwargs, {"auto_now": True}) def test_datetime_field(self): field = models.DateTimeField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateTimeField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.DateTimeField(auto_now_add=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateTimeField") self.assertEqual(args, []) self.assertEqual(kwargs, {"auto_now_add": True}) # Bug #21785 field = models.DateTimeField(auto_now=True, auto_now_add=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DateTimeField") self.assertEqual(args, []) self.assertEqual(kwargs, {"auto_now_add": True, "auto_now": True}) def test_decimal_field(self): field = models.DecimalField(max_digits=5, decimal_places=2) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DecimalField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_digits": 5, "decimal_places": 2}) def test_decimal_field_0_decimal_places(self): """ A DecimalField with decimal_places=0 should work (#22272). """ field = models.DecimalField(max_digits=5, decimal_places=0) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.DecimalField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_digits": 5, "decimal_places": 0}) def test_email_field(self): field = models.EmailField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.EmailField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 254}) field = models.EmailField(max_length=255) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.EmailField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 255}) def test_file_field(self): field = models.FileField(upload_to="foo/bar") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FileField") self.assertEqual(args, []) self.assertEqual(kwargs, {"upload_to": "foo/bar"}) # Test max_length field = models.FileField(upload_to="foo/bar", max_length=200) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FileField") self.assertEqual(args, []) self.assertEqual(kwargs, {"upload_to": "foo/bar", "max_length": 200}) def test_file_path_field(self): field = models.FilePathField(match=".*\.txt$") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FilePathField") self.assertEqual(args, []) self.assertEqual(kwargs, {"match": ".*\.txt$"}) field = models.FilePathField(recursive=True, allow_folders=True, max_length=123) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FilePathField") self.assertEqual(args, []) self.assertEqual(kwargs, {"recursive": True, "allow_folders": True, "max_length": 123}) def test_float_field(self): field = models.FloatField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.FloatField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_foreign_key(self): # Test basic pointing from django.contrib.auth.models import Permission field = models.ForeignKey("auth.Permission", models.CASCADE) field.remote_field.model = Permission field.remote_field.field_name = "id" name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "on_delete": models.CASCADE}) self.assertFalse(hasattr(kwargs['to'], "setting_name")) # Test swap detection for swappable model field = models.ForeignKey("auth.User", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.User", "on_delete": models.CASCADE}) self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL") # Test nonexistent (for now) model field = models.ForeignKey("something.Else", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "something.Else", "on_delete": models.CASCADE}) # Test on_delete field = models.ForeignKey("auth.User", models.SET_NULL) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.User", "on_delete": models.SET_NULL}) # Test to_field preservation field = models.ForeignKey("auth.Permission", models.CASCADE, to_field="foobar") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "to_field": "foobar", "on_delete": models.CASCADE}) # Test related_name preservation field = models.ForeignKey("auth.Permission", models.CASCADE, related_name="foobar") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "related_name": "foobar", "on_delete": models.CASCADE}) @override_settings(AUTH_USER_MODEL="auth.Permission") def test_foreign_key_swapped(self): with isolate_lru_cache(apps.get_swappable_settings_name): # It doesn't matter that we swapped out user for permission; # there's no validation. We just want to check the setting stuff works. field = models.ForeignKey("auth.Permission", models.CASCADE) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ForeignKey") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "on_delete": models.CASCADE}) self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL") def test_image_field(self): field = models.ImageField(upload_to="foo/barness", width_field="width", height_field="height") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ImageField") self.assertEqual(args, []) self.assertEqual(kwargs, {"upload_to": "foo/barness", "width_field": "width", "height_field": "height"}) def test_integer_field(self): field = models.IntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.IntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_ip_address_field(self): field = models.IPAddressField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.IPAddressField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_generic_ip_address_field(self): field = models.GenericIPAddressField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.GenericIPAddressField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.GenericIPAddressField(protocol="IPv6") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.GenericIPAddressField") self.assertEqual(args, []) self.assertEqual(kwargs, {"protocol": "IPv6"}) def test_many_to_many_field(self): # Test normal field = models.ManyToManyField("auth.Permission") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission"}) self.assertFalse(hasattr(kwargs['to'], "setting_name")) # Test swappable field = models.ManyToManyField("auth.User") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.User"}) self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL") # Test through field = models.ManyToManyField("auth.Permission", through="auth.Group") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "through": "auth.Group"}) # Test custom db_table field = models.ManyToManyField("auth.Permission", db_table="custom_table") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "db_table": "custom_table"}) # Test related_name field = models.ManyToManyField("auth.Permission", related_name="custom_table") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission", "related_name": "custom_table"}) @override_settings(AUTH_USER_MODEL="auth.Permission") def test_many_to_many_field_swapped(self): with isolate_lru_cache(apps.get_swappable_settings_name): # It doesn't matter that we swapped out user for permission; # there's no validation. We just want to check the setting stuff works. field = models.ManyToManyField("auth.Permission") name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.ManyToManyField") self.assertEqual(args, []) self.assertEqual(kwargs, {"to": "auth.Permission"}) self.assertEqual(kwargs['to'].setting_name, "AUTH_USER_MODEL") def test_null_boolean_field(self): field = models.NullBooleanField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.NullBooleanField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_positive_integer_field(self): field = models.PositiveIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.PositiveIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_positive_small_integer_field(self): field = models.PositiveSmallIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.PositiveSmallIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_slug_field(self): field = models.SlugField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.SlugField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.SlugField(db_index=False, max_length=231) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.SlugField") self.assertEqual(args, []) self.assertEqual(kwargs, {"db_index": False, "max_length": 231}) def test_small_integer_field(self): field = models.SmallIntegerField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.SmallIntegerField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_text_field(self): field = models.TextField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.TextField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) def test_time_field(self): field = models.TimeField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.TimeField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.TimeField(auto_now=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {'auto_now': True}) field = models.TimeField(auto_now_add=True) name, path, args, kwargs = field.deconstruct() self.assertEqual(args, []) self.assertEqual(kwargs, {'auto_now_add': True}) def test_url_field(self): field = models.URLField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.URLField") self.assertEqual(args, []) self.assertEqual(kwargs, {}) field = models.URLField(max_length=231) name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.URLField") self.assertEqual(args, []) self.assertEqual(kwargs, {"max_length": 231}) def test_binary_field(self): field = models.BinaryField() name, path, args, kwargs = field.deconstruct() self.assertEqual(path, "django.db.models.BinaryField") self.assertEqual(args, []) self.assertEqual(kwargs, {})
bsd-3-clause
FlintHill/SUAS-Competition
env/lib/python2.7/site-packages/setuptools/command/install.py
529
4683
from distutils.errors import DistutilsArgError import inspect import glob import warnings import platform import distutils.command.install as orig import setuptools # Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for # now. See https://github.com/pypa/setuptools/issues/199/ _install = orig.install class install(orig.install): """Use easy_install to install the package, w/dependencies""" user_options = orig.install.user_options + [ ('old-and-unmanageable', None, "Try not to use this!"), ('single-version-externally-managed', None, "used by system package builders to create 'flat' eggs"), ] boolean_options = orig.install.boolean_options + [ 'old-and-unmanageable', 'single-version-externally-managed', ] new_commands = [ ('install_egg_info', lambda self: True), ('install_scripts', lambda self: True), ] _nc = dict(new_commands) def initialize_options(self): orig.install.initialize_options(self) self.old_and_unmanageable = None self.single_version_externally_managed = None def finalize_options(self): orig.install.finalize_options(self) if self.root: self.single_version_externally_managed = True elif self.single_version_externally_managed: if not self.root and not self.record: raise DistutilsArgError( "You must specify --record or --root when building system" " packages" ) def handle_extra_path(self): if self.root or self.single_version_externally_managed: # explicit backward-compatibility mode, allow extra_path to work return orig.install.handle_extra_path(self) # Ignore extra_path when installing an egg (or being run by another # command without --root or --single-version-externally-managed self.path_file = None self.extra_dirs = '' def run(self): # Explicit request for old-style install? Just do it if self.old_and_unmanageable or self.single_version_externally_managed: return orig.install.run(self) if not self._called_from_setup(inspect.currentframe()): # Run in backward-compatibility mode to support bdist_* commands. orig.install.run(self) else: self.do_egg_install() @staticmethod def _called_from_setup(run_frame): """ Attempt to detect whether run() was called from setup() or by another command. If called by setup(), the parent caller will be the 'run_command' method in 'distutils.dist', and *its* caller will be the 'run_commands' method. If called any other way, the immediate caller *might* be 'run_command', but it won't have been called by 'run_commands'. Return True in that case or if a call stack is unavailable. Return False otherwise. """ if run_frame is None: msg = "Call stack not available. bdist_* commands may fail." warnings.warn(msg) if platform.python_implementation() == 'IronPython': msg = "For best results, pass -X:Frames to enable call stack." warnings.warn(msg) return True res = inspect.getouterframes(run_frame)[2] caller, = res[:1] info = inspect.getframeinfo(caller) caller_module = caller.f_globals.get('__name__', '') return ( caller_module == 'distutils.dist' and info.function == 'run_commands' ) def do_egg_install(self): easy_install = self.distribution.get_command_class('easy_install') cmd = easy_install( self.distribution, args="x", root=self.root, record=self.record, ) cmd.ensure_finalized() # finalize before bdist_egg munges install cmd cmd.always_copy_from = '.' # make sure local-dir eggs get installed # pick up setup-dir .egg files only: no .egg-info cmd.package_index.scan(glob.glob('*.egg')) self.run_command('bdist_egg') args = [self.distribution.get_command_obj('bdist_egg').egg_output] if setuptools.bootstrap_install_from: # Bootstrap self-installation of setuptools args.insert(0, setuptools.bootstrap_install_from) cmd.args = args cmd.run() setuptools.bootstrap_install_from = None # XXX Python 3.1 doesn't see _nc if this is inside the class install.sub_commands = ( [cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] + install.new_commands )
mit
Shaps/ansible
lib/ansible/plugins/filter/urls.py
97
1865
# -*- coding: utf-8 -*- # Copyright: (c) 2012, Dag Wieers (@dagwieers) <dag@wieers.com> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type from ansible.module_utils.six import PY3, iteritems, string_types from ansible.module_utils.six.moves.urllib.parse import quote, quote_plus, unquote_plus from ansible.module_utils._text import to_bytes, to_text try: from jinja2.filters import do_urlencode HAS_URLENCODE = True except ImportError: HAS_URLENCODE = False def unicode_urldecode(string): if PY3: return unquote_plus(string) return to_text(unquote_plus(to_bytes(string))) def do_urldecode(string): return unicode_urldecode(string) # NOTE: We implement urlencode when Jinja2 is older than v2.7 def unicode_urlencode(string, for_qs=False): safe = b'' if for_qs else b'/' if for_qs: quote_func = quote_plus else: quote_func = quote if PY3: return quote_func(string, safe) return to_text(quote_func(to_bytes(string), safe)) def do_urlencode(value): itemiter = None if isinstance(value, dict): itemiter = iteritems(value) elif not isinstance(value, string_types): try: itemiter = iter(value) except TypeError: pass if itemiter is None: return unicode_urlencode(value) return u'&'.join(unicode_urlencode(k) + '=' + unicode_urlencode(v, for_qs=True) for k, v in itemiter) class FilterModule(object): ''' Ansible core jinja2 filters ''' def filters(self): filters = { 'urldecode': do_urldecode, } if not HAS_URLENCODE: filters['urlencode'] = do_urlencode return filters
gpl-3.0
tino/django-contact-form
contact_form/views.py
3
3283
""" View which can render and send email from a contact form. """ from django.shortcuts import render_to_response, redirect from django.template import RequestContext from contact_form.forms import ContactForm def contact_form(request, form_class=ContactForm, template_name='contact_form/contact_form.html', success_url=None, extra_context=None, fail_silently=False): """ Render a contact form, validate its input and send an email from it. **Optional arguments:** ``extra_context`` A dictionary of variables to add to the template context. Any callable object in this dictionary will be called to produce the end result which appears in the context. ``fail_silently`` If ``True``, errors when sending the email will be silently supressed (i.e., with no logging or reporting of any such errors. Default value is ``False``. ``form_class`` The form to use. If not supplied, this will default to ``contact_form.forms.ContactForm``. If supplied, the form class must implement a method named ``save()`` which sends the email from the form; the form class must accept an ``HttpRequest`` as the keyword argument ``request`` to its constructor, and it must implement a method named ``save()`` which sends the email and which accepts the keyword argument ``fail_silently``. ``success_url`` The URL to redirect to after a successful submission. If not supplied, this will default to the URL pointed to by the named URL pattern ``contact_form_sent``. ``template_name`` The template to use for rendering the contact form. If not supplied, defaults to :template:`contact_form/contact_form.html`. **Context:** ``form`` The form instance. **Template:** The value of the ``template_name`` keyword argument, or :template:`contact_form/contact_form.html`. """ # # We set up success_url here, rather than as the default value for # the argument. Trying to do it as the argument's default would # mean evaluating the call to reverse() at the time this module is # first imported, which introduces a circular dependency: to # perform the reverse lookup we need access to contact_form/urls.py, # but contact_form/urls.py in turn imports from this module. # if request.method == 'POST': form = form_class(data=request.POST, files=request.FILES, request=request) if form.is_valid(): form.save(fail_silently=fail_silently) if success_url is None: to, args, kwargs = form.get_success_redirect() return redirect(to, *args, **kwargs) else: return redirect(success_url) else: form = form_class(request=request) if extra_context is None: extra_context = {} context = RequestContext(request) for key, value in extra_context.items(): context[key] = callable(value) and value() or value return render_to_response(template_name, { 'form': form }, context_instance=context)
bsd-3-clause
kprkpr/kernel-e400
venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/__init__.py
650
1701
# urllib3/__init__.py # Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) # # This module is part of urllib3 and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """ urllib3 - Thread-safe connection pooling and re-using. """ __author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' __license__ = 'MIT' __version__ = 'dev' from .connectionpool import ( HTTPConnectionPool, HTTPSConnectionPool, connection_from_url ) from . import exceptions from .filepost import encode_multipart_formdata from .poolmanager import PoolManager, ProxyManager, proxy_from_url from .response import HTTPResponse from .util import make_headers, get_host, Timeout # Set default logging handler to avoid "No handler found" warnings. import logging try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler()) def add_stderr_logger(level=logging.DEBUG): """ Helper for quickly adding a StreamHandler to the logger. Useful for debugging. Returns the handler after adding it. """ # This method needs to be in this __init__.py to get the __name__ correct # even if urllib3 is vendored within another package. logger = logging.getLogger(__name__) handler = logging.StreamHandler() handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s')) logger.addHandler(handler) logger.setLevel(level) logger.debug('Added an stderr logging handler to logger: %s' % __name__) return handler # ... Clean up. del NullHandler
gpl-2.0
duyetdev/islab-portfolio-by-ghost
node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/build-2.7/pygments/lexers/hdl.py
363
16209
# -*- coding: utf-8 -*- """ pygments.lexers.hdl ~~~~~~~~~~~~~~~~~~~ Lexers for hardware descriptor languages. :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re from pygments.lexer import RegexLexer, bygroups, include, using, this from pygments.token import \ Text, Comment, Operator, Keyword, Name, String, Number, Punctuation, \ Error __all__ = ['VerilogLexer', 'SystemVerilogLexer', 'VhdlLexer'] class VerilogLexer(RegexLexer): """ For verilog source code with preprocessor directives. *New in Pygments 1.4.* """ name = 'verilog' aliases = ['verilog', 'v'] filenames = ['*.v'] mimetypes = ['text/x-verilog'] #: optional Comment or Whitespace _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+' tokens = { 'root': [ (r'^\s*`define', Comment.Preproc, 'macro'), (r'\n', Text), (r'\s+', Text), (r'\\\n', Text), # line continuation (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single), (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), (r'[{}#@]', Punctuation), (r'L?"', String, 'string'), (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char), (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float), (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex), (r'([0-9]+)|(\'b)[0-1]+', Number.Hex), # should be binary (r'([0-9]+)|(\'d)[0-9]+', Number.Integer), (r'([0-9]+)|(\'o)[0-7]+', Number.Oct), (r'\'[01xz]', Number), (r'\d+[Ll]?', Number.Integer), (r'\*/', Error), (r'[~!%^&*+=|?:<>/-]', Operator), (r'[()\[\],.;\']', Punctuation), (r'`[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant), (r'^(\s*)(package)(\s+)', bygroups(Text, Keyword.Namespace, Text)), (r'^(\s*)(import)(\s+)', bygroups(Text, Keyword.Namespace, Text), 'import'), (r'(always|always_comb|always_ff|always_latch|and|assign|automatic|' r'begin|break|buf|bufif0|bufif1|case|casex|casez|cmos|const|' r'continue|deassign|default|defparam|disable|do|edge|else|end|endcase|' r'endfunction|endgenerate|endmodule|endpackage|endprimitive|endspecify|' r'endtable|endtask|enum|event|final|for|force|forever|fork|function|' r'generate|genvar|highz0|highz1|if|initial|inout|input|' r'integer|join|large|localparam|macromodule|medium|module|' r'nand|negedge|nmos|nor|not|notif0|notif1|or|output|packed|' r'parameter|pmos|posedge|primitive|pull0|pull1|pulldown|pullup|rcmos|' r'ref|release|repeat|return|rnmos|rpmos|rtran|rtranif0|' r'rtranif1|scalared|signed|small|specify|specparam|strength|' r'string|strong0|strong1|struct|table|task|' r'tran|tranif0|tranif1|type|typedef|' r'unsigned|var|vectored|void|wait|weak0|weak1|while|' r'xnor|xor)\b', Keyword), (r'`(accelerate|autoexpand_vectornets|celldefine|default_nettype|' r'else|elsif|endcelldefine|endif|endprotect|endprotected|' r'expand_vectornets|ifdef|ifndef|include|noaccelerate|noexpand_vectornets|' r'noremove_gatenames|noremove_netnames|nounconnected_drive|' r'protect|protected|remove_gatenames|remove_netnames|resetall|' r'timescale|unconnected_drive|undef)\b', Comment.Preproc), (r'\$(bits|bitstoreal|bitstoshortreal|countdrivers|display|fclose|' r'fdisplay|finish|floor|fmonitor|fopen|fstrobe|fwrite|' r'getpattern|history|incsave|input|itor|key|list|log|' r'monitor|monitoroff|monitoron|nokey|nolog|printtimescale|' r'random|readmemb|readmemh|realtime|realtobits|reset|reset_count|' r'reset_value|restart|rtoi|save|scale|scope|shortrealtobits|' r'showscopes|showvariables|showvars|sreadmemb|sreadmemh|' r'stime|stop|strobe|time|timeformat|write)\b', Name.Builtin), (r'(byte|shortint|int|longint|integer|time|' r'bit|logic|reg|' r'supply0|supply1|tri|triand|trior|tri0|tri1|trireg|uwire|wire|wand|wor' r'shortreal|real|realtime)\b', Keyword.Type), ('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label), ('[a-zA-Z_][a-zA-Z0-9_]*', Name), ], 'string': [ (r'"', String, '#pop'), (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), (r'[^\\"\n]+', String), # all other characters (r'\\\n', String), # line continuation (r'\\', String), # stray backslash ], 'macro': [ (r'[^/\n]+', Comment.Preproc), (r'/[*](.|\n)*?[*]/', Comment.Multiline), (r'//.*?\n', Comment.Single, '#pop'), (r'/', Comment.Preproc), (r'(?<=\\)\n', Comment.Preproc), (r'\n', Comment.Preproc, '#pop'), ], 'import': [ (r'[a-zA-Z0-9_:]+\*?', Name.Namespace, '#pop') ] } def get_tokens_unprocessed(self, text): for index, token, value in \ RegexLexer.get_tokens_unprocessed(self, text): # Convention: mark all upper case names as constants if token is Name: if value.isupper(): token = Name.Constant yield index, token, value class SystemVerilogLexer(RegexLexer): """ Extends verilog lexer to recognise all SystemVerilog keywords from IEEE 1800-2009 standard. *New in Pygments 1.5.* """ name = 'systemverilog' aliases = ['systemverilog', 'sv'] filenames = ['*.sv', '*.svh'] mimetypes = ['text/x-systemverilog'] #: optional Comment or Whitespace _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+' tokens = { 'root': [ (r'^\s*`define', Comment.Preproc, 'macro'), (r'^(\s*)(package)(\s+)', bygroups(Text, Keyword.Namespace, Text)), (r'^(\s*)(import)(\s+)', bygroups(Text, Keyword.Namespace, Text), 'import'), (r'\n', Text), (r'\s+', Text), (r'\\\n', Text), # line continuation (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single), (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), (r'[{}#@]', Punctuation), (r'L?"', String, 'string'), (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char), (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float), (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), (r'([0-9]+)|(\'h)[0-9a-fA-F]+', Number.Hex), (r'([0-9]+)|(\'b)[0-1]+', Number.Hex), # should be binary (r'([0-9]+)|(\'d)[0-9]+', Number.Integer), (r'([0-9]+)|(\'o)[0-7]+', Number.Oct), (r'\'[01xz]', Number), (r'\d+[Ll]?', Number.Integer), (r'\*/', Error), (r'[~!%^&*+=|?:<>/-]', Operator), (r'[()\[\],.;\']', Punctuation), (r'`[a-zA-Z_][a-zA-Z0-9_]*', Name.Constant), (r'(accept_on|alias|always|always_comb|always_ff|always_latch|' r'and|assert|assign|assume|automatic|before|begin|bind|bins|' r'binsof|bit|break|buf|bufif0|bufif1|byte|case|casex|casez|' r'cell|chandle|checker|class|clocking|cmos|config|const|constraint|' r'context|continue|cover|covergroup|coverpoint|cross|deassign|' r'default|defparam|design|disable|dist|do|edge|else|end|endcase|' r'endchecker|endclass|endclocking|endconfig|endfunction|endgenerate|' r'endgroup|endinterface|endmodule|endpackage|endprimitive|' r'endprogram|endproperty|endsequence|endspecify|endtable|' r'endtask|enum|event|eventually|expect|export|extends|extern|' r'final|first_match|for|force|foreach|forever|fork|forkjoin|' r'function|generate|genvar|global|highz0|highz1|if|iff|ifnone|' r'ignore_bins|illegal_bins|implies|import|incdir|include|' r'initial|inout|input|inside|instance|int|integer|interface|' r'intersect|join|join_any|join_none|large|let|liblist|library|' r'local|localparam|logic|longint|macromodule|matches|medium|' r'modport|module|nand|negedge|new|nexttime|nmos|nor|noshowcancelled|' r'not|notif0|notif1|null|or|output|package|packed|parameter|' r'pmos|posedge|primitive|priority|program|property|protected|' r'pull0|pull1|pulldown|pullup|pulsestyle_ondetect|pulsestyle_onevent|' r'pure|rand|randc|randcase|randsequence|rcmos|real|realtime|' r'ref|reg|reject_on|release|repeat|restrict|return|rnmos|' r'rpmos|rtran|rtranif0|rtranif1|s_always|s_eventually|s_nexttime|' r's_until|s_until_with|scalared|sequence|shortint|shortreal|' r'showcancelled|signed|small|solve|specify|specparam|static|' r'string|strong|strong0|strong1|struct|super|supply0|supply1|' r'sync_accept_on|sync_reject_on|table|tagged|task|this|throughout|' r'time|timeprecision|timeunit|tran|tranif0|tranif1|tri|tri0|' r'tri1|triand|trior|trireg|type|typedef|union|unique|unique0|' r'unsigned|until|until_with|untyped|use|uwire|var|vectored|' r'virtual|void|wait|wait_order|wand|weak|weak0|weak1|while|' r'wildcard|wire|with|within|wor|xnor|xor)\b', Keyword ), (r'(`__FILE__|`__LINE__|`begin_keywords|`celldefine|`default_nettype|' r'`define|`else|`elsif|`end_keywords|`endcelldefine|`endif|' r'`ifdef|`ifndef|`include|`line|`nounconnected_drive|`pragma|' r'`resetall|`timescale|`unconnected_drive|`undef|`undefineall)\b', Comment.Preproc ), (r'(\$display|\$displayb|\$displayh|\$displayo|\$dumpall|\$dumpfile|' r'\$dumpflush|\$dumplimit|\$dumpoff|\$dumpon|\$dumpports|' r'\$dumpportsall|\$dumpportsflush|\$dumpportslimit|\$dumpportsoff|' r'\$dumpportson|\$dumpvars|\$fclose|\$fdisplay|\$fdisplayb|' r'\$fdisplayh|\$fdisplayo|\$feof|\$ferror|\$fflush|\$fgetc|' r'\$fgets|\$fmonitor|\$fmonitorb|\$fmonitorh|\$fmonitoro|' r'\$fopen|\$fread|\$fscanf|\$fseek|\$fstrobe|\$fstrobeb|\$fstrobeh|' r'\$fstrobeo|\$ftell|\$fwrite|\$fwriteb|\$fwriteh|\$fwriteo|' r'\$monitor|\$monitorb|\$monitorh|\$monitoro|\$monitoroff|' r'\$monitoron|\$plusargs|\$readmemb|\$readmemh|\$rewind|\$sformat|' r'\$sformatf|\$sscanf|\$strobe|\$strobeb|\$strobeh|\$strobeo|' r'\$swrite|\$swriteb|\$swriteh|\$swriteo|\$test|\$ungetc|' r'\$value\$plusargs|\$write|\$writeb|\$writeh|\$writememb|' r'\$writememh|\$writeo)\b' , Name.Builtin ), (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'), (r'(byte|shortint|int|longint|integer|time|' r'bit|logic|reg|' r'supply0|supply1|tri|triand|trior|tri0|tri1|trireg|uwire|wire|wand|wor' r'shortreal|real|realtime)\b', Keyword.Type), ('[a-zA-Z_][a-zA-Z0-9_]*:(?!:)', Name.Label), ('[a-zA-Z_][a-zA-Z0-9_]*', Name), ], 'classname': [ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop'), ], 'string': [ (r'"', String, '#pop'), (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), (r'[^\\"\n]+', String), # all other characters (r'\\\n', String), # line continuation (r'\\', String), # stray backslash ], 'macro': [ (r'[^/\n]+', Comment.Preproc), (r'/[*](.|\n)*?[*]/', Comment.Multiline), (r'//.*?\n', Comment.Single, '#pop'), (r'/', Comment.Preproc), (r'(?<=\\)\n', Comment.Preproc), (r'\n', Comment.Preproc, '#pop'), ], 'import': [ (r'[a-zA-Z0-9_:]+\*?', Name.Namespace, '#pop') ] } def get_tokens_unprocessed(self, text): for index, token, value in \ RegexLexer.get_tokens_unprocessed(self, text): # Convention: mark all upper case names as constants if token is Name: if value.isupper(): token = Name.Constant yield index, token, value def analyse_text(text): if text.startswith('//') or text.startswith('/*'): return 0.5 class VhdlLexer(RegexLexer): """ For VHDL source code. *New in Pygments 1.5.* """ name = 'vhdl' aliases = ['vhdl'] filenames = ['*.vhdl', '*.vhd'] mimetypes = ['text/x-vhdl'] flags = re.MULTILINE | re.IGNORECASE tokens = { 'root': [ (r'\n', Text), (r'\s+', Text), (r'\\\n', Text), # line continuation (r'--(?![!#$%&*+./<=>?@\^|_~]).*?$', Comment.Single), (r"'(U|X|0|1|Z|W|L|H|-)'", String.Char), (r'[~!%^&*+=|?:<>/-]', Operator), (r"'[a-zA-Z_][a-zA-Z0-9_]*", Name.Attribute), (r'[()\[\],.;\']', Punctuation), (r'"[^\n\\]*"', String), (r'(library)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Keyword, Text, Name.Namespace)), (r'(use)(\s+)(entity)', bygroups(Keyword, Text, Keyword)), (r'(use)(\s+)([a-zA-Z_][\.a-zA-Z0-9_]*)', bygroups(Keyword, Text, Name.Namespace)), (r'(entity|component)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Keyword, Text, Name.Class)), (r'(architecture|configuration)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)(\s+)' r'(of)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)(\s+)(is)', bygroups(Keyword, Text, Name.Class, Text, Keyword, Text, Name.Class, Text, Keyword)), (r'(end)(\s+)', bygroups(using(this), Text), 'endblock'), include('types'), include('keywords'), include('numbers'), (r'[a-zA-Z_][a-zA-Z0-9_]*', Name), ], 'endblock': [ include('keywords'), (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class), (r'(\s+)', Text), (r';', Punctuation, '#pop'), ], 'types': [ (r'(boolean|bit|character|severity_level|integer|time|delay_length|' r'natural|positive|string|bit_vector|file_open_kind|' r'file_open_status|std_ulogic|std_ulogic_vector|std_logic|' r'std_logic_vector)\b', Keyword.Type), ], 'keywords': [ (r'(abs|access|after|alias|all|and|' r'architecture|array|assert|attribute|begin|block|' r'body|buffer|bus|case|component|configuration|' r'constant|disconnect|downto|else|elsif|end|' r'entity|exit|file|for|function|generate|' r'generic|group|guarded|if|impure|in|' r'inertial|inout|is|label|library|linkage|' r'literal|loop|map|mod|nand|new|' r'next|nor|not|null|of|on|' r'open|or|others|out|package|port|' r'postponed|procedure|process|pure|range|record|' r'register|reject|return|rol|ror|select|' r'severity|signal|shared|sla|sli|sra|' r'srl|subtype|then|to|transport|type|' r'units|until|use|variable|wait|when|' r'while|with|xnor|xor)\b', Keyword), ], 'numbers': [ (r'\d{1,2}#[0-9a-fA-F_]+#?', Number.Integer), (r'[0-1_]+(\.[0-1_])', Number.Integer), (r'\d+', Number.Integer), (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float), (r'H"[0-9a-fA-F_]+"', Number.Oct), (r'O"[0-7_]+"', Number.Oct), (r'B"[0-1_]+"', Number.Oct), ], }
mit
FICTURE7/youtube-dl
youtube_dl/extractor/gameone.py
208
4519
# coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ( xpath_with_ns, parse_iso8601, float_or_none, int_or_none, ) NAMESPACE_MAP = { 'media': 'http://search.yahoo.com/mrss/', } # URL prefix to download the mp4 files directly instead of streaming via rtmp # Credits go to XBox-Maniac # http://board.jdownloader.org/showpost.php?p=185835&postcount=31 RAW_MP4_URL = 'http://cdn.riptide-mtvn.com/' class GameOneIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?gameone\.de/tv/(?P<id>\d+)' _TESTS = [ { 'url': 'http://www.gameone.de/tv/288', 'md5': '136656b7fb4c9cb4a8e2d500651c499b', 'info_dict': { 'id': '288', 'ext': 'mp4', 'title': 'Game One - Folge 288', 'duration': 1238, 'thumbnail': 'http://s3.gameone.de/gameone/assets/video_metas/teaser_images/000/643/636/big/640x360.jpg', 'description': 'FIFA-Pressepokal 2014, Star Citizen, Kingdom Come: Deliverance, Project Cars, Schöner Trants Nerdquiz Folge 2 Runde 1', 'age_limit': 16, 'upload_date': '20140513', 'timestamp': 1399980122, } }, { 'url': 'http://gameone.de/tv/220', 'md5': '5227ca74c4ae6b5f74c0510a7c48839e', 'info_dict': { 'id': '220', 'ext': 'mp4', 'upload_date': '20120918', 'description': 'Jet Set Radio HD, Tekken Tag Tournament 2, Source Filmmaker', 'timestamp': 1347971451, 'title': 'Game One - Folge 220', 'duration': 896.62, 'age_limit': 16, } } ] def _real_extract(self, url): video_id = self._match_id(url) webpage = self._download_webpage(url, video_id) og_video = self._og_search_video_url(webpage, secure=False) description = self._html_search_meta('description', webpage) age_limit = int( self._search_regex( r'age=(\d+)', self._html_search_meta( 'age-de-meta-label', webpage), 'age_limit', '0')) mrss_url = self._search_regex(r'mrss=([^&]+)', og_video, 'mrss') mrss = self._download_xml(mrss_url, video_id, 'Downloading mrss') title = mrss.find('.//item/title').text thumbnail = mrss.find('.//item/image').get('url') timestamp = parse_iso8601(mrss.find('.//pubDate').text, delimiter=' ') content = mrss.find(xpath_with_ns('.//media:content', NAMESPACE_MAP)) content_url = content.get('url') content = self._download_xml( content_url, video_id, 'Downloading media:content') rendition_items = content.findall('.//rendition') duration = float_or_none(rendition_items[0].get('duration')) formats = [ { 'url': re.sub(r'.*/(r2)', RAW_MP4_URL + r'\1', r.find('./src').text), 'width': int_or_none(r.get('width')), 'height': int_or_none(r.get('height')), 'tbr': int_or_none(r.get('bitrate')), } for r in rendition_items ] self._sort_formats(formats) return { 'id': video_id, 'title': title, 'thumbnail': thumbnail, 'duration': duration, 'formats': formats, 'description': description, 'age_limit': age_limit, 'timestamp': timestamp, } class GameOnePlaylistIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?gameone\.de(?:/tv)?/?$' IE_NAME = 'gameone:playlist' _TEST = { 'url': 'http://www.gameone.de/tv', 'info_dict': { 'title': 'GameOne', }, 'playlist_mincount': 294, } def _real_extract(self, url): webpage = self._download_webpage('http://www.gameone.de/tv', 'TV') max_id = max(map(int, re.findall(r'<a href="/tv/(\d+)"', webpage))) entries = [ self.url_result('http://www.gameone.de/tv/%d' % video_id, 'GameOne') for video_id in range(max_id, 0, -1)] return { '_type': 'playlist', 'title': 'GameOne', 'entries': entries, }
unlicense
iegor/kdeutils
superkaramba/examples/control_management/mgmt.py
4
1338
# # Written by Luke Kenneth Casson Leighton <lkcl@lkcl.net> #this import statement allows access to the karamba functions import karamba do_nothing_txt = None mgmt_txt = None #this is called when you widget is initialized def initWidget(widget): global do_nothing_txt global do_something_txt # display new message do_nothing_txt = karamba.createText(widget, 0, 00, 300, 20, "Right mouse click me!") karamba.changeTextColor(widget, do_nothing_txt, 252,252,252) mgmt_txt = karamba.createText(widget, 0, 20, 300, 20, "Righ mouse click me too!") karamba.changeTextColor(widget, mgmt_txt, 252,252,252) karamba.redrawWidget(widget) karamba.setWantRightButton(widget, 1) def widgetUpdated(widget): karamba.redrawWidget(widget) def widgetClicked(widget, x, y, button): global do_nothing_txt if y < 20: if do_nothing_txt is not None: karamba.deleteText(widget, do_nothing_txt) do_nothing_txt = karamba.createText(widget, 0, 0, 300, 20, "I don't do anything when clicking here.") karamba.changeTextColor(widget, do_nothing_txt, 255,200,200) karamba.redrawWidget(widget) return karamba.managementPopup(widget) # This will be printed when the widget loads. print "Loaded my python extension!"
gpl-2.0
rlindner81/pyload
module/lib/beaker/synchronization.py
48
11755
"""Synchronization functions. File- and mutex-based mutual exclusion synchronizers are provided, as well as a name-based mutex which locks within an application based on a string name. """ import os import sys import tempfile try: import threading as _threading except ImportError: import dummy_threading as _threading # check for fcntl module try: sys.getwindowsversion() has_flock = False except: try: import fcntl has_flock = True except ImportError: has_flock = False from beaker import util from beaker.exceptions import LockError __all__ = ["file_synchronizer", "mutex_synchronizer", "null_synchronizer", "NameLock", "_threading"] class NameLock(object): """a proxy for an RLock object that is stored in a name based registry. Multiple threads can get a reference to the same RLock based on the name alone, and synchronize operations related to that name. """ locks = util.WeakValuedRegistry() class NLContainer(object): def __init__(self, reentrant): if reentrant: self.lock = _threading.RLock() else: self.lock = _threading.Lock() def __call__(self): return self.lock def __init__(self, identifier = None, reentrant = False): if identifier is None: self._lock = NameLock.NLContainer(reentrant) else: self._lock = NameLock.locks.get(identifier, NameLock.NLContainer, reentrant) def acquire(self, wait = True): return self._lock().acquire(wait) def release(self): self._lock().release() _synchronizers = util.WeakValuedRegistry() def _synchronizer(identifier, cls, **kwargs): return _synchronizers.sync_get((identifier, cls), cls, identifier, **kwargs) def file_synchronizer(identifier, **kwargs): if not has_flock or 'lock_dir' not in kwargs: return mutex_synchronizer(identifier) else: return _synchronizer(identifier, FileSynchronizer, **kwargs) def mutex_synchronizer(identifier, **kwargs): return _synchronizer(identifier, ConditionSynchronizer, **kwargs) class null_synchronizer(object): def acquire_write_lock(self, wait=True): return True def acquire_read_lock(self): pass def release_write_lock(self): pass def release_read_lock(self): pass acquire = acquire_write_lock release = release_write_lock class SynchronizerImpl(object): def __init__(self): self._state = util.ThreadLocal() class SyncState(object): __slots__ = 'reentrantcount', 'writing', 'reading' def __init__(self): self.reentrantcount = 0 self.writing = False self.reading = False def state(self): if not self._state.has(): state = SynchronizerImpl.SyncState() self._state.put(state) return state else: return self._state.get() state = property(state) def release_read_lock(self): state = self.state if state.writing: raise LockError("lock is in writing state") if not state.reading: raise LockError("lock is not in reading state") if state.reentrantcount == 1: self.do_release_read_lock() state.reading = False state.reentrantcount -= 1 def acquire_read_lock(self, wait = True): state = self.state if state.writing: raise LockError("lock is in writing state") if state.reentrantcount == 0: x = self.do_acquire_read_lock(wait) if (wait or x): state.reentrantcount += 1 state.reading = True return x elif state.reading: state.reentrantcount += 1 return True def release_write_lock(self): state = self.state if state.reading: raise LockError("lock is in reading state") if not state.writing: raise LockError("lock is not in writing state") if state.reentrantcount == 1: self.do_release_write_lock() state.writing = False state.reentrantcount -= 1 release = release_write_lock def acquire_write_lock(self, wait = True): state = self.state if state.reading: raise LockError("lock is in reading state") if state.reentrantcount == 0: x = self.do_acquire_write_lock(wait) if (wait or x): state.reentrantcount += 1 state.writing = True return x elif state.writing: state.reentrantcount += 1 return True acquire = acquire_write_lock def do_release_read_lock(self): raise NotImplementedError() def do_acquire_read_lock(self): raise NotImplementedError() def do_release_write_lock(self): raise NotImplementedError() def do_acquire_write_lock(self): raise NotImplementedError() class FileSynchronizer(SynchronizerImpl): """a synchronizer which locks using flock(). Adapted for Python/multithreads from Apache::Session::Lock::File, http://search.cpan.org/src/CWEST/Apache-Session-1.81/Session/Lock/File.pm This module does not unlink temporary files, because it interferes with proper locking. This can cause problems on certain systems (Linux) whose file systems (ext2) do not perform well with lots of files in one directory. To prevent this you should use a script to clean out old files from your lock directory. """ def __init__(self, identifier, lock_dir): super(FileSynchronizer, self).__init__() self._filedescriptor = util.ThreadLocal() if lock_dir is None: lock_dir = tempfile.gettempdir() else: lock_dir = lock_dir self.filename = util.encoded_path( lock_dir, [identifier], extension='.lock' ) def _filedesc(self): return self._filedescriptor.get() _filedesc = property(_filedesc) def _open(self, mode): filedescriptor = self._filedesc if filedescriptor is None: filedescriptor = os.open(self.filename, mode) self._filedescriptor.put(filedescriptor) return filedescriptor def do_acquire_read_lock(self, wait): filedescriptor = self._open(os.O_CREAT | os.O_RDONLY) if not wait: try: fcntl.flock(filedescriptor, fcntl.LOCK_SH | fcntl.LOCK_NB) return True except IOError: os.close(filedescriptor) self._filedescriptor.remove() return False else: fcntl.flock(filedescriptor, fcntl.LOCK_SH) return True def do_acquire_write_lock(self, wait): filedescriptor = self._open(os.O_CREAT | os.O_WRONLY) if not wait: try: fcntl.flock(filedescriptor, fcntl.LOCK_EX | fcntl.LOCK_NB) return True except IOError: os.close(filedescriptor) self._filedescriptor.remove() return False else: fcntl.flock(filedescriptor, fcntl.LOCK_EX) return True def do_release_read_lock(self): self._release_all_locks() def do_release_write_lock(self): self._release_all_locks() def _release_all_locks(self): filedescriptor = self._filedesc if filedescriptor is not None: fcntl.flock(filedescriptor, fcntl.LOCK_UN) os.close(filedescriptor) self._filedescriptor.remove() class ConditionSynchronizer(SynchronizerImpl): """a synchronizer using a Condition.""" def __init__(self, identifier): super(ConditionSynchronizer, self).__init__() # counts how many asynchronous methods are executing self.async = 0 # pointer to thread that is the current sync operation self.current_sync_operation = None # condition object to lock on self.condition = _threading.Condition(_threading.Lock()) def do_acquire_read_lock(self, wait = True): self.condition.acquire() try: # see if a synchronous operation is waiting to start # or is already running, in which case we wait (or just # give up and return) if wait: while self.current_sync_operation is not None: self.condition.wait() else: if self.current_sync_operation is not None: return False self.async += 1 finally: self.condition.release() if not wait: return True def do_release_read_lock(self): self.condition.acquire() try: self.async -= 1 # check if we are the last asynchronous reader thread # out the door. if self.async == 0: # yes. so if a sync operation is waiting, notifyAll to wake # it up if self.current_sync_operation is not None: self.condition.notifyAll() elif self.async < 0: raise LockError("Synchronizer error - too many " "release_read_locks called") finally: self.condition.release() def do_acquire_write_lock(self, wait = True): self.condition.acquire() try: # here, we are not a synchronous reader, and after returning, # assuming waiting or immediate availability, we will be. if wait: # if another sync is working, wait while self.current_sync_operation is not None: self.condition.wait() else: # if another sync is working, # we dont want to wait, so forget it if self.current_sync_operation is not None: return False # establish ourselves as the current sync # this indicates to other read/write operations # that they should wait until this is None again self.current_sync_operation = _threading.currentThread() # now wait again for asyncs to finish if self.async > 0: if wait: # wait self.condition.wait() else: # we dont want to wait, so forget it self.current_sync_operation = None return False finally: self.condition.release() if not wait: return True def do_release_write_lock(self): self.condition.acquire() try: if self.current_sync_operation is not _threading.currentThread(): raise LockError("Synchronizer error - current thread doesnt " "have the write lock") # reset the current sync operation so # another can get it self.current_sync_operation = None # tell everyone to get ready self.condition.notifyAll() finally: # everyone go !! self.condition.release()
gpl-3.0
sameetb-cuelogic/edx-platform-test
lms/djangoapps/survey/tests/test_views.py
62
5051
""" Python tests for the Survey views """ import json from collections import OrderedDict from django.test.client import Client from django.contrib.auth.models import User from django.core.urlresolvers import reverse from survey.models import SurveyForm from xmodule.modulestore.tests.factories import CourseFactory from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase class SurveyViewsTests(ModuleStoreTestCase): """ All tests for the views.py file """ def setUp(self): """ Set up the test data used in the specific tests """ super(SurveyViewsTests, self).setUp() self.client = Client() # Create two accounts self.password = 'abc' self.student = User.objects.create_user('student', 'student@test.com', self.password) self.test_survey_name = 'TestSurvey' self.test_form = '<input name="field1" /><input name="field2" /><select name="ddl"><option>1</option></select>' self.student_answers = OrderedDict({ u'field1': u'value1', u'field2': u'value2', u'ddl': u'1', }) self.course = CourseFactory.create( course_survey_required=True, course_survey_name=self.test_survey_name ) self.survey = SurveyForm.create(self.test_survey_name, self.test_form) self.view_url = reverse('view_survey', args=[self.test_survey_name]) self.postback_url = reverse('submit_answers', args=[self.test_survey_name]) self.client.login(username=self.student.username, password=self.password) def test_unauthenticated_survey_view(self): """ Asserts that an unauthenticated user cannot access a survey """ anon_user = Client() resp = anon_user.get(self.view_url) self.assertEquals(resp.status_code, 302) def test_survey_not_found(self): """ Asserts that if we ask for a Survey that does not exist, then we get a 302 redirect """ resp = self.client.get(reverse('view_survey', args=['NonExisting'])) self.assertEquals(resp.status_code, 302) def test_authenticated_survey_view(self): """ Asserts that an authenticated user can see the survey """ resp = self.client.get(self.view_url) self.assertEquals(resp.status_code, 200) # is the SurveyForm html present in the HTML response? self.assertIn(self.test_form, resp.content) def test_unautneticated_survey_postback(self): """ Asserts that an anonymous user cannot answer a survey """ anon_user = Client() resp = anon_user.post( self.postback_url, self.student_answers ) self.assertEquals(resp.status_code, 302) def test_survey_postback_to_nonexisting_survey(self): """ Asserts that any attempts to post back to a non existing survey returns a 404 """ resp = self.client.post( reverse('submit_answers', args=['NonExisting']), self.student_answers ) self.assertEquals(resp.status_code, 404) def test_survey_postback(self): """ Asserts that a well formed postback of survey answers is properly stored in the database """ resp = self.client.post( self.postback_url, self.student_answers ) self.assertEquals(resp.status_code, 200) data = json.loads(resp.content) self.assertIn('redirect_url', data) answers = self.survey.get_answers(self.student) self.assertEquals(answers[self.student.id], self.student_answers) def test_strip_extra_fields(self): """ Verify that any not expected field name in the post-back is not stored in the database """ data = dict.copy(self.student_answers) data['csrfmiddlewaretoken'] = 'foo' data['_redirect_url'] = 'bar' resp = self.client.post( self.postback_url, data ) self.assertEquals(resp.status_code, 200) answers = self.survey.get_answers(self.student) self.assertNotIn('csrfmiddlewaretoken', answers[self.student.id]) self.assertNotIn('_redirect_url', answers[self.student.id]) def test_encoding_answers(self): """ Verify that if some potentially harmful input data is sent, that is is properly HTML encoded """ data = dict.copy(self.student_answers) data['field1'] = '<script type="javascript">alert("Deleting filesystem...")</script>' resp = self.client.post( self.postback_url, data ) self.assertEquals(resp.status_code, 200) answers = self.survey.get_answers(self.student) self.assertEqual( '&lt;script type=&quot;javascript&quot;&gt;alert(&quot;Deleting filesystem...&quot;)&lt;/script&gt;', answers[self.student.id]['field1'] )
agpl-3.0
cortesi/mitmproxy
pathod/language/writer.py
12
2141
import time from mitmproxy import exceptions BLOCKSIZE = 1024 # It's not clear what the upper limit for time.sleep is. It's lower than the # maximum int or float. 1 year should do. FOREVER = 60 * 60 * 24 * 365 def send_chunk(fp, val, blocksize, start, end): """ (start, end): Inclusive lower bound, exclusive upper bound. """ for i in range(start, end, blocksize): fp.write( val[i:min(i + blocksize, end)] ) return end - start def write_values(fp, vals, actions, sofar=0, blocksize=BLOCKSIZE): """ vals: A list of values, which may be strings or Value objects. actions: A list of (offset, action, arg) tuples. Action may be "inject", "pause" or "disconnect". Both vals and actions are in reverse order, with the first items last. Return True if connection should disconnect. """ sofar = 0 try: while vals: v = vals.pop() offset = 0 while actions and actions[-1][0] < (sofar + len(v)): a = actions.pop() offset += send_chunk( fp, v, blocksize, offset, a[0] - sofar - offset ) if a[1] == "pause": time.sleep( FOREVER if a[2] == "f" else a[2] ) elif a[1] == "disconnect": return True elif a[1] == "inject": send_chunk(fp, a[2], blocksize, 0, len(a[2])) send_chunk(fp, v, blocksize, offset, len(v)) sofar += len(v) # Remainders while actions: a = actions.pop() if a[1] == "pause": time.sleep( FOREVER if a[2] == "f" else a[2] ) elif a[1] == "disconnect": return True elif a[1] == "inject": send_chunk(fp, a[2], blocksize, 0, len(a[2])) except exceptions.TcpDisconnect: # pragma: no cover return True
mit
polyval/CNC
flask/Lib/encodings/cp737.py
593
34937
""" Python Character Mapping Codec cp737 generated from 'VENDORS/MICSFT/PC/CP737.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_map) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_map)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='cp737', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Map decoding_map = codecs.make_identity_dict(range(256)) decoding_map.update({ 0x0080: 0x0391, # GREEK CAPITAL LETTER ALPHA 0x0081: 0x0392, # GREEK CAPITAL LETTER BETA 0x0082: 0x0393, # GREEK CAPITAL LETTER GAMMA 0x0083: 0x0394, # GREEK CAPITAL LETTER DELTA 0x0084: 0x0395, # GREEK CAPITAL LETTER EPSILON 0x0085: 0x0396, # GREEK CAPITAL LETTER ZETA 0x0086: 0x0397, # GREEK CAPITAL LETTER ETA 0x0087: 0x0398, # GREEK CAPITAL LETTER THETA 0x0088: 0x0399, # GREEK CAPITAL LETTER IOTA 0x0089: 0x039a, # GREEK CAPITAL LETTER KAPPA 0x008a: 0x039b, # GREEK CAPITAL LETTER LAMDA 0x008b: 0x039c, # GREEK CAPITAL LETTER MU 0x008c: 0x039d, # GREEK CAPITAL LETTER NU 0x008d: 0x039e, # GREEK CAPITAL LETTER XI 0x008e: 0x039f, # GREEK CAPITAL LETTER OMICRON 0x008f: 0x03a0, # GREEK CAPITAL LETTER PI 0x0090: 0x03a1, # GREEK CAPITAL LETTER RHO 0x0091: 0x03a3, # GREEK CAPITAL LETTER SIGMA 0x0092: 0x03a4, # GREEK CAPITAL LETTER TAU 0x0093: 0x03a5, # GREEK CAPITAL LETTER UPSILON 0x0094: 0x03a6, # GREEK CAPITAL LETTER PHI 0x0095: 0x03a7, # GREEK CAPITAL LETTER CHI 0x0096: 0x03a8, # GREEK CAPITAL LETTER PSI 0x0097: 0x03a9, # GREEK CAPITAL LETTER OMEGA 0x0098: 0x03b1, # GREEK SMALL LETTER ALPHA 0x0099: 0x03b2, # GREEK SMALL LETTER BETA 0x009a: 0x03b3, # GREEK SMALL LETTER GAMMA 0x009b: 0x03b4, # GREEK SMALL LETTER DELTA 0x009c: 0x03b5, # GREEK SMALL LETTER EPSILON 0x009d: 0x03b6, # GREEK SMALL LETTER ZETA 0x009e: 0x03b7, # GREEK SMALL LETTER ETA 0x009f: 0x03b8, # GREEK SMALL LETTER THETA 0x00a0: 0x03b9, # GREEK SMALL LETTER IOTA 0x00a1: 0x03ba, # GREEK SMALL LETTER KAPPA 0x00a2: 0x03bb, # GREEK SMALL LETTER LAMDA 0x00a3: 0x03bc, # GREEK SMALL LETTER MU 0x00a4: 0x03bd, # GREEK SMALL LETTER NU 0x00a5: 0x03be, # GREEK SMALL LETTER XI 0x00a6: 0x03bf, # GREEK SMALL LETTER OMICRON 0x00a7: 0x03c0, # GREEK SMALL LETTER PI 0x00a8: 0x03c1, # GREEK SMALL LETTER RHO 0x00a9: 0x03c3, # GREEK SMALL LETTER SIGMA 0x00aa: 0x03c2, # GREEK SMALL LETTER FINAL SIGMA 0x00ab: 0x03c4, # GREEK SMALL LETTER TAU 0x00ac: 0x03c5, # GREEK SMALL LETTER UPSILON 0x00ad: 0x03c6, # GREEK SMALL LETTER PHI 0x00ae: 0x03c7, # GREEK SMALL LETTER CHI 0x00af: 0x03c8, # GREEK SMALL LETTER PSI 0x00b0: 0x2591, # LIGHT SHADE 0x00b1: 0x2592, # MEDIUM SHADE 0x00b2: 0x2593, # DARK SHADE 0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL 0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT 0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE 0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE 0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE 0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE 0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT 0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL 0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT 0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT 0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE 0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE 0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT 0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT 0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL 0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL 0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT 0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL 0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL 0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE 0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE 0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT 0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT 0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL 0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL 0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT 0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL 0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL 0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE 0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE 0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE 0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE 0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE 0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE 0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE 0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE 0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE 0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE 0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT 0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT 0x00db: 0x2588, # FULL BLOCK 0x00dc: 0x2584, # LOWER HALF BLOCK 0x00dd: 0x258c, # LEFT HALF BLOCK 0x00de: 0x2590, # RIGHT HALF BLOCK 0x00df: 0x2580, # UPPER HALF BLOCK 0x00e0: 0x03c9, # GREEK SMALL LETTER OMEGA 0x00e1: 0x03ac, # GREEK SMALL LETTER ALPHA WITH TONOS 0x00e2: 0x03ad, # GREEK SMALL LETTER EPSILON WITH TONOS 0x00e3: 0x03ae, # GREEK SMALL LETTER ETA WITH TONOS 0x00e4: 0x03ca, # GREEK SMALL LETTER IOTA WITH DIALYTIKA 0x00e5: 0x03af, # GREEK SMALL LETTER IOTA WITH TONOS 0x00e6: 0x03cc, # GREEK SMALL LETTER OMICRON WITH TONOS 0x00e7: 0x03cd, # GREEK SMALL LETTER UPSILON WITH TONOS 0x00e8: 0x03cb, # GREEK SMALL LETTER UPSILON WITH DIALYTIKA 0x00e9: 0x03ce, # GREEK SMALL LETTER OMEGA WITH TONOS 0x00ea: 0x0386, # GREEK CAPITAL LETTER ALPHA WITH TONOS 0x00eb: 0x0388, # GREEK CAPITAL LETTER EPSILON WITH TONOS 0x00ec: 0x0389, # GREEK CAPITAL LETTER ETA WITH TONOS 0x00ed: 0x038a, # GREEK CAPITAL LETTER IOTA WITH TONOS 0x00ee: 0x038c, # GREEK CAPITAL LETTER OMICRON WITH TONOS 0x00ef: 0x038e, # GREEK CAPITAL LETTER UPSILON WITH TONOS 0x00f0: 0x038f, # GREEK CAPITAL LETTER OMEGA WITH TONOS 0x00f1: 0x00b1, # PLUS-MINUS SIGN 0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO 0x00f3: 0x2264, # LESS-THAN OR EQUAL TO 0x00f4: 0x03aa, # GREEK CAPITAL LETTER IOTA WITH DIALYTIKA 0x00f5: 0x03ab, # GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA 0x00f6: 0x00f7, # DIVISION SIGN 0x00f7: 0x2248, # ALMOST EQUAL TO 0x00f8: 0x00b0, # DEGREE SIGN 0x00f9: 0x2219, # BULLET OPERATOR 0x00fa: 0x00b7, # MIDDLE DOT 0x00fb: 0x221a, # SQUARE ROOT 0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N 0x00fd: 0x00b2, # SUPERSCRIPT TWO 0x00fe: 0x25a0, # BLACK SQUARE 0x00ff: 0x00a0, # NO-BREAK SPACE }) ### Decoding Table decoding_table = ( u'\x00' # 0x0000 -> NULL u'\x01' # 0x0001 -> START OF HEADING u'\x02' # 0x0002 -> START OF TEXT u'\x03' # 0x0003 -> END OF TEXT u'\x04' # 0x0004 -> END OF TRANSMISSION u'\x05' # 0x0005 -> ENQUIRY u'\x06' # 0x0006 -> ACKNOWLEDGE u'\x07' # 0x0007 -> BELL u'\x08' # 0x0008 -> BACKSPACE u'\t' # 0x0009 -> HORIZONTAL TABULATION u'\n' # 0x000a -> LINE FEED u'\x0b' # 0x000b -> VERTICAL TABULATION u'\x0c' # 0x000c -> FORM FEED u'\r' # 0x000d -> CARRIAGE RETURN u'\x0e' # 0x000e -> SHIFT OUT u'\x0f' # 0x000f -> SHIFT IN u'\x10' # 0x0010 -> DATA LINK ESCAPE u'\x11' # 0x0011 -> DEVICE CONTROL ONE u'\x12' # 0x0012 -> DEVICE CONTROL TWO u'\x13' # 0x0013 -> DEVICE CONTROL THREE u'\x14' # 0x0014 -> DEVICE CONTROL FOUR u'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE u'\x16' # 0x0016 -> SYNCHRONOUS IDLE u'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK u'\x18' # 0x0018 -> CANCEL u'\x19' # 0x0019 -> END OF MEDIUM u'\x1a' # 0x001a -> SUBSTITUTE u'\x1b' # 0x001b -> ESCAPE u'\x1c' # 0x001c -> FILE SEPARATOR u'\x1d' # 0x001d -> GROUP SEPARATOR u'\x1e' # 0x001e -> RECORD SEPARATOR u'\x1f' # 0x001f -> UNIT SEPARATOR u' ' # 0x0020 -> SPACE u'!' # 0x0021 -> EXCLAMATION MARK u'"' # 0x0022 -> QUOTATION MARK u'#' # 0x0023 -> NUMBER SIGN u'$' # 0x0024 -> DOLLAR SIGN u'%' # 0x0025 -> PERCENT SIGN u'&' # 0x0026 -> AMPERSAND u"'" # 0x0027 -> APOSTROPHE u'(' # 0x0028 -> LEFT PARENTHESIS u')' # 0x0029 -> RIGHT PARENTHESIS u'*' # 0x002a -> ASTERISK u'+' # 0x002b -> PLUS SIGN u',' # 0x002c -> COMMA u'-' # 0x002d -> HYPHEN-MINUS u'.' # 0x002e -> FULL STOP u'/' # 0x002f -> SOLIDUS u'0' # 0x0030 -> DIGIT ZERO u'1' # 0x0031 -> DIGIT ONE u'2' # 0x0032 -> DIGIT TWO u'3' # 0x0033 -> DIGIT THREE u'4' # 0x0034 -> DIGIT FOUR u'5' # 0x0035 -> DIGIT FIVE u'6' # 0x0036 -> DIGIT SIX u'7' # 0x0037 -> DIGIT SEVEN u'8' # 0x0038 -> DIGIT EIGHT u'9' # 0x0039 -> DIGIT NINE u':' # 0x003a -> COLON u';' # 0x003b -> SEMICOLON u'<' # 0x003c -> LESS-THAN SIGN u'=' # 0x003d -> EQUALS SIGN u'>' # 0x003e -> GREATER-THAN SIGN u'?' # 0x003f -> QUESTION MARK u'@' # 0x0040 -> COMMERCIAL AT u'A' # 0x0041 -> LATIN CAPITAL LETTER A u'B' # 0x0042 -> LATIN CAPITAL LETTER B u'C' # 0x0043 -> LATIN CAPITAL LETTER C u'D' # 0x0044 -> LATIN CAPITAL LETTER D u'E' # 0x0045 -> LATIN CAPITAL LETTER E u'F' # 0x0046 -> LATIN CAPITAL LETTER F u'G' # 0x0047 -> LATIN CAPITAL LETTER G u'H' # 0x0048 -> LATIN CAPITAL LETTER H u'I' # 0x0049 -> LATIN CAPITAL LETTER I u'J' # 0x004a -> LATIN CAPITAL LETTER J u'K' # 0x004b -> LATIN CAPITAL LETTER K u'L' # 0x004c -> LATIN CAPITAL LETTER L u'M' # 0x004d -> LATIN CAPITAL LETTER M u'N' # 0x004e -> LATIN CAPITAL LETTER N u'O' # 0x004f -> LATIN CAPITAL LETTER O u'P' # 0x0050 -> LATIN CAPITAL LETTER P u'Q' # 0x0051 -> LATIN CAPITAL LETTER Q u'R' # 0x0052 -> LATIN CAPITAL LETTER R u'S' # 0x0053 -> LATIN CAPITAL LETTER S u'T' # 0x0054 -> LATIN CAPITAL LETTER T u'U' # 0x0055 -> LATIN CAPITAL LETTER U u'V' # 0x0056 -> LATIN CAPITAL LETTER V u'W' # 0x0057 -> LATIN CAPITAL LETTER W u'X' # 0x0058 -> LATIN CAPITAL LETTER X u'Y' # 0x0059 -> LATIN CAPITAL LETTER Y u'Z' # 0x005a -> LATIN CAPITAL LETTER Z u'[' # 0x005b -> LEFT SQUARE BRACKET u'\\' # 0x005c -> REVERSE SOLIDUS u']' # 0x005d -> RIGHT SQUARE BRACKET u'^' # 0x005e -> CIRCUMFLEX ACCENT u'_' # 0x005f -> LOW LINE u'`' # 0x0060 -> GRAVE ACCENT u'a' # 0x0061 -> LATIN SMALL LETTER A u'b' # 0x0062 -> LATIN SMALL LETTER B u'c' # 0x0063 -> LATIN SMALL LETTER C u'd' # 0x0064 -> LATIN SMALL LETTER D u'e' # 0x0065 -> LATIN SMALL LETTER E u'f' # 0x0066 -> LATIN SMALL LETTER F u'g' # 0x0067 -> LATIN SMALL LETTER G u'h' # 0x0068 -> LATIN SMALL LETTER H u'i' # 0x0069 -> LATIN SMALL LETTER I u'j' # 0x006a -> LATIN SMALL LETTER J u'k' # 0x006b -> LATIN SMALL LETTER K u'l' # 0x006c -> LATIN SMALL LETTER L u'm' # 0x006d -> LATIN SMALL LETTER M u'n' # 0x006e -> LATIN SMALL LETTER N u'o' # 0x006f -> LATIN SMALL LETTER O u'p' # 0x0070 -> LATIN SMALL LETTER P u'q' # 0x0071 -> LATIN SMALL LETTER Q u'r' # 0x0072 -> LATIN SMALL LETTER R u's' # 0x0073 -> LATIN SMALL LETTER S u't' # 0x0074 -> LATIN SMALL LETTER T u'u' # 0x0075 -> LATIN SMALL LETTER U u'v' # 0x0076 -> LATIN SMALL LETTER V u'w' # 0x0077 -> LATIN SMALL LETTER W u'x' # 0x0078 -> LATIN SMALL LETTER X u'y' # 0x0079 -> LATIN SMALL LETTER Y u'z' # 0x007a -> LATIN SMALL LETTER Z u'{' # 0x007b -> LEFT CURLY BRACKET u'|' # 0x007c -> VERTICAL LINE u'}' # 0x007d -> RIGHT CURLY BRACKET u'~' # 0x007e -> TILDE u'\x7f' # 0x007f -> DELETE u'\u0391' # 0x0080 -> GREEK CAPITAL LETTER ALPHA u'\u0392' # 0x0081 -> GREEK CAPITAL LETTER BETA u'\u0393' # 0x0082 -> GREEK CAPITAL LETTER GAMMA u'\u0394' # 0x0083 -> GREEK CAPITAL LETTER DELTA u'\u0395' # 0x0084 -> GREEK CAPITAL LETTER EPSILON u'\u0396' # 0x0085 -> GREEK CAPITAL LETTER ZETA u'\u0397' # 0x0086 -> GREEK CAPITAL LETTER ETA u'\u0398' # 0x0087 -> GREEK CAPITAL LETTER THETA u'\u0399' # 0x0088 -> GREEK CAPITAL LETTER IOTA u'\u039a' # 0x0089 -> GREEK CAPITAL LETTER KAPPA u'\u039b' # 0x008a -> GREEK CAPITAL LETTER LAMDA u'\u039c' # 0x008b -> GREEK CAPITAL LETTER MU u'\u039d' # 0x008c -> GREEK CAPITAL LETTER NU u'\u039e' # 0x008d -> GREEK CAPITAL LETTER XI u'\u039f' # 0x008e -> GREEK CAPITAL LETTER OMICRON u'\u03a0' # 0x008f -> GREEK CAPITAL LETTER PI u'\u03a1' # 0x0090 -> GREEK CAPITAL LETTER RHO u'\u03a3' # 0x0091 -> GREEK CAPITAL LETTER SIGMA u'\u03a4' # 0x0092 -> GREEK CAPITAL LETTER TAU u'\u03a5' # 0x0093 -> GREEK CAPITAL LETTER UPSILON u'\u03a6' # 0x0094 -> GREEK CAPITAL LETTER PHI u'\u03a7' # 0x0095 -> GREEK CAPITAL LETTER CHI u'\u03a8' # 0x0096 -> GREEK CAPITAL LETTER PSI u'\u03a9' # 0x0097 -> GREEK CAPITAL LETTER OMEGA u'\u03b1' # 0x0098 -> GREEK SMALL LETTER ALPHA u'\u03b2' # 0x0099 -> GREEK SMALL LETTER BETA u'\u03b3' # 0x009a -> GREEK SMALL LETTER GAMMA u'\u03b4' # 0x009b -> GREEK SMALL LETTER DELTA u'\u03b5' # 0x009c -> GREEK SMALL LETTER EPSILON u'\u03b6' # 0x009d -> GREEK SMALL LETTER ZETA u'\u03b7' # 0x009e -> GREEK SMALL LETTER ETA u'\u03b8' # 0x009f -> GREEK SMALL LETTER THETA u'\u03b9' # 0x00a0 -> GREEK SMALL LETTER IOTA u'\u03ba' # 0x00a1 -> GREEK SMALL LETTER KAPPA u'\u03bb' # 0x00a2 -> GREEK SMALL LETTER LAMDA u'\u03bc' # 0x00a3 -> GREEK SMALL LETTER MU u'\u03bd' # 0x00a4 -> GREEK SMALL LETTER NU u'\u03be' # 0x00a5 -> GREEK SMALL LETTER XI u'\u03bf' # 0x00a6 -> GREEK SMALL LETTER OMICRON u'\u03c0' # 0x00a7 -> GREEK SMALL LETTER PI u'\u03c1' # 0x00a8 -> GREEK SMALL LETTER RHO u'\u03c3' # 0x00a9 -> GREEK SMALL LETTER SIGMA u'\u03c2' # 0x00aa -> GREEK SMALL LETTER FINAL SIGMA u'\u03c4' # 0x00ab -> GREEK SMALL LETTER TAU u'\u03c5' # 0x00ac -> GREEK SMALL LETTER UPSILON u'\u03c6' # 0x00ad -> GREEK SMALL LETTER PHI u'\u03c7' # 0x00ae -> GREEK SMALL LETTER CHI u'\u03c8' # 0x00af -> GREEK SMALL LETTER PSI u'\u2591' # 0x00b0 -> LIGHT SHADE u'\u2592' # 0x00b1 -> MEDIUM SHADE u'\u2593' # 0x00b2 -> DARK SHADE u'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL u'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT u'\u2561' # 0x00b5 -> BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE u'\u2562' # 0x00b6 -> BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE u'\u2556' # 0x00b7 -> BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE u'\u2555' # 0x00b8 -> BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE u'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT u'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL u'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT u'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT u'\u255c' # 0x00bd -> BOX DRAWINGS UP DOUBLE AND LEFT SINGLE u'\u255b' # 0x00be -> BOX DRAWINGS UP SINGLE AND LEFT DOUBLE u'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT u'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT u'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL u'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL u'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT u'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL u'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL u'\u255e' # 0x00c6 -> BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE u'\u255f' # 0x00c7 -> BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE u'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT u'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT u'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL u'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL u'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT u'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL u'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL u'\u2567' # 0x00cf -> BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE u'\u2568' # 0x00d0 -> BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE u'\u2564' # 0x00d1 -> BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE u'\u2565' # 0x00d2 -> BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE u'\u2559' # 0x00d3 -> BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE u'\u2558' # 0x00d4 -> BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE u'\u2552' # 0x00d5 -> BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE u'\u2553' # 0x00d6 -> BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE u'\u256b' # 0x00d7 -> BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE u'\u256a' # 0x00d8 -> BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE u'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT u'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT u'\u2588' # 0x00db -> FULL BLOCK u'\u2584' # 0x00dc -> LOWER HALF BLOCK u'\u258c' # 0x00dd -> LEFT HALF BLOCK u'\u2590' # 0x00de -> RIGHT HALF BLOCK u'\u2580' # 0x00df -> UPPER HALF BLOCK u'\u03c9' # 0x00e0 -> GREEK SMALL LETTER OMEGA u'\u03ac' # 0x00e1 -> GREEK SMALL LETTER ALPHA WITH TONOS u'\u03ad' # 0x00e2 -> GREEK SMALL LETTER EPSILON WITH TONOS u'\u03ae' # 0x00e3 -> GREEK SMALL LETTER ETA WITH TONOS u'\u03ca' # 0x00e4 -> GREEK SMALL LETTER IOTA WITH DIALYTIKA u'\u03af' # 0x00e5 -> GREEK SMALL LETTER IOTA WITH TONOS u'\u03cc' # 0x00e6 -> GREEK SMALL LETTER OMICRON WITH TONOS u'\u03cd' # 0x00e7 -> GREEK SMALL LETTER UPSILON WITH TONOS u'\u03cb' # 0x00e8 -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA u'\u03ce' # 0x00e9 -> GREEK SMALL LETTER OMEGA WITH TONOS u'\u0386' # 0x00ea -> GREEK CAPITAL LETTER ALPHA WITH TONOS u'\u0388' # 0x00eb -> GREEK CAPITAL LETTER EPSILON WITH TONOS u'\u0389' # 0x00ec -> GREEK CAPITAL LETTER ETA WITH TONOS u'\u038a' # 0x00ed -> GREEK CAPITAL LETTER IOTA WITH TONOS u'\u038c' # 0x00ee -> GREEK CAPITAL LETTER OMICRON WITH TONOS u'\u038e' # 0x00ef -> GREEK CAPITAL LETTER UPSILON WITH TONOS u'\u038f' # 0x00f0 -> GREEK CAPITAL LETTER OMEGA WITH TONOS u'\xb1' # 0x00f1 -> PLUS-MINUS SIGN u'\u2265' # 0x00f2 -> GREATER-THAN OR EQUAL TO u'\u2264' # 0x00f3 -> LESS-THAN OR EQUAL TO u'\u03aa' # 0x00f4 -> GREEK CAPITAL LETTER IOTA WITH DIALYTIKA u'\u03ab' # 0x00f5 -> GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA u'\xf7' # 0x00f6 -> DIVISION SIGN u'\u2248' # 0x00f7 -> ALMOST EQUAL TO u'\xb0' # 0x00f8 -> DEGREE SIGN u'\u2219' # 0x00f9 -> BULLET OPERATOR u'\xb7' # 0x00fa -> MIDDLE DOT u'\u221a' # 0x00fb -> SQUARE ROOT u'\u207f' # 0x00fc -> SUPERSCRIPT LATIN SMALL LETTER N u'\xb2' # 0x00fd -> SUPERSCRIPT TWO u'\u25a0' # 0x00fe -> BLACK SQUARE u'\xa0' # 0x00ff -> NO-BREAK SPACE ) ### Encoding Map encoding_map = { 0x0000: 0x0000, # NULL 0x0001: 0x0001, # START OF HEADING 0x0002: 0x0002, # START OF TEXT 0x0003: 0x0003, # END OF TEXT 0x0004: 0x0004, # END OF TRANSMISSION 0x0005: 0x0005, # ENQUIRY 0x0006: 0x0006, # ACKNOWLEDGE 0x0007: 0x0007, # BELL 0x0008: 0x0008, # BACKSPACE 0x0009: 0x0009, # HORIZONTAL TABULATION 0x000a: 0x000a, # LINE FEED 0x000b: 0x000b, # VERTICAL TABULATION 0x000c: 0x000c, # FORM FEED 0x000d: 0x000d, # CARRIAGE RETURN 0x000e: 0x000e, # SHIFT OUT 0x000f: 0x000f, # SHIFT IN 0x0010: 0x0010, # DATA LINK ESCAPE 0x0011: 0x0011, # DEVICE CONTROL ONE 0x0012: 0x0012, # DEVICE CONTROL TWO 0x0013: 0x0013, # DEVICE CONTROL THREE 0x0014: 0x0014, # DEVICE CONTROL FOUR 0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE 0x0016: 0x0016, # SYNCHRONOUS IDLE 0x0017: 0x0017, # END OF TRANSMISSION BLOCK 0x0018: 0x0018, # CANCEL 0x0019: 0x0019, # END OF MEDIUM 0x001a: 0x001a, # SUBSTITUTE 0x001b: 0x001b, # ESCAPE 0x001c: 0x001c, # FILE SEPARATOR 0x001d: 0x001d, # GROUP SEPARATOR 0x001e: 0x001e, # RECORD SEPARATOR 0x001f: 0x001f, # UNIT SEPARATOR 0x0020: 0x0020, # SPACE 0x0021: 0x0021, # EXCLAMATION MARK 0x0022: 0x0022, # QUOTATION MARK 0x0023: 0x0023, # NUMBER SIGN 0x0024: 0x0024, # DOLLAR SIGN 0x0025: 0x0025, # PERCENT SIGN 0x0026: 0x0026, # AMPERSAND 0x0027: 0x0027, # APOSTROPHE 0x0028: 0x0028, # LEFT PARENTHESIS 0x0029: 0x0029, # RIGHT PARENTHESIS 0x002a: 0x002a, # ASTERISK 0x002b: 0x002b, # PLUS SIGN 0x002c: 0x002c, # COMMA 0x002d: 0x002d, # HYPHEN-MINUS 0x002e: 0x002e, # FULL STOP 0x002f: 0x002f, # SOLIDUS 0x0030: 0x0030, # DIGIT ZERO 0x0031: 0x0031, # DIGIT ONE 0x0032: 0x0032, # DIGIT TWO 0x0033: 0x0033, # DIGIT THREE 0x0034: 0x0034, # DIGIT FOUR 0x0035: 0x0035, # DIGIT FIVE 0x0036: 0x0036, # DIGIT SIX 0x0037: 0x0037, # DIGIT SEVEN 0x0038: 0x0038, # DIGIT EIGHT 0x0039: 0x0039, # DIGIT NINE 0x003a: 0x003a, # COLON 0x003b: 0x003b, # SEMICOLON 0x003c: 0x003c, # LESS-THAN SIGN 0x003d: 0x003d, # EQUALS SIGN 0x003e: 0x003e, # GREATER-THAN SIGN 0x003f: 0x003f, # QUESTION MARK 0x0040: 0x0040, # COMMERCIAL AT 0x0041: 0x0041, # LATIN CAPITAL LETTER A 0x0042: 0x0042, # LATIN CAPITAL LETTER B 0x0043: 0x0043, # LATIN CAPITAL LETTER C 0x0044: 0x0044, # LATIN CAPITAL LETTER D 0x0045: 0x0045, # LATIN CAPITAL LETTER E 0x0046: 0x0046, # LATIN CAPITAL LETTER F 0x0047: 0x0047, # LATIN CAPITAL LETTER G 0x0048: 0x0048, # LATIN CAPITAL LETTER H 0x0049: 0x0049, # LATIN CAPITAL LETTER I 0x004a: 0x004a, # LATIN CAPITAL LETTER J 0x004b: 0x004b, # LATIN CAPITAL LETTER K 0x004c: 0x004c, # LATIN CAPITAL LETTER L 0x004d: 0x004d, # LATIN CAPITAL LETTER M 0x004e: 0x004e, # LATIN CAPITAL LETTER N 0x004f: 0x004f, # LATIN CAPITAL LETTER O 0x0050: 0x0050, # LATIN CAPITAL LETTER P 0x0051: 0x0051, # LATIN CAPITAL LETTER Q 0x0052: 0x0052, # LATIN CAPITAL LETTER R 0x0053: 0x0053, # LATIN CAPITAL LETTER S 0x0054: 0x0054, # LATIN CAPITAL LETTER T 0x0055: 0x0055, # LATIN CAPITAL LETTER U 0x0056: 0x0056, # LATIN CAPITAL LETTER V 0x0057: 0x0057, # LATIN CAPITAL LETTER W 0x0058: 0x0058, # LATIN CAPITAL LETTER X 0x0059: 0x0059, # LATIN CAPITAL LETTER Y 0x005a: 0x005a, # LATIN CAPITAL LETTER Z 0x005b: 0x005b, # LEFT SQUARE BRACKET 0x005c: 0x005c, # REVERSE SOLIDUS 0x005d: 0x005d, # RIGHT SQUARE BRACKET 0x005e: 0x005e, # CIRCUMFLEX ACCENT 0x005f: 0x005f, # LOW LINE 0x0060: 0x0060, # GRAVE ACCENT 0x0061: 0x0061, # LATIN SMALL LETTER A 0x0062: 0x0062, # LATIN SMALL LETTER B 0x0063: 0x0063, # LATIN SMALL LETTER C 0x0064: 0x0064, # LATIN SMALL LETTER D 0x0065: 0x0065, # LATIN SMALL LETTER E 0x0066: 0x0066, # LATIN SMALL LETTER F 0x0067: 0x0067, # LATIN SMALL LETTER G 0x0068: 0x0068, # LATIN SMALL LETTER H 0x0069: 0x0069, # LATIN SMALL LETTER I 0x006a: 0x006a, # LATIN SMALL LETTER J 0x006b: 0x006b, # LATIN SMALL LETTER K 0x006c: 0x006c, # LATIN SMALL LETTER L 0x006d: 0x006d, # LATIN SMALL LETTER M 0x006e: 0x006e, # LATIN SMALL LETTER N 0x006f: 0x006f, # LATIN SMALL LETTER O 0x0070: 0x0070, # LATIN SMALL LETTER P 0x0071: 0x0071, # LATIN SMALL LETTER Q 0x0072: 0x0072, # LATIN SMALL LETTER R 0x0073: 0x0073, # LATIN SMALL LETTER S 0x0074: 0x0074, # LATIN SMALL LETTER T 0x0075: 0x0075, # LATIN SMALL LETTER U 0x0076: 0x0076, # LATIN SMALL LETTER V 0x0077: 0x0077, # LATIN SMALL LETTER W 0x0078: 0x0078, # LATIN SMALL LETTER X 0x0079: 0x0079, # LATIN SMALL LETTER Y 0x007a: 0x007a, # LATIN SMALL LETTER Z 0x007b: 0x007b, # LEFT CURLY BRACKET 0x007c: 0x007c, # VERTICAL LINE 0x007d: 0x007d, # RIGHT CURLY BRACKET 0x007e: 0x007e, # TILDE 0x007f: 0x007f, # DELETE 0x00a0: 0x00ff, # NO-BREAK SPACE 0x00b0: 0x00f8, # DEGREE SIGN 0x00b1: 0x00f1, # PLUS-MINUS SIGN 0x00b2: 0x00fd, # SUPERSCRIPT TWO 0x00b7: 0x00fa, # MIDDLE DOT 0x00f7: 0x00f6, # DIVISION SIGN 0x0386: 0x00ea, # GREEK CAPITAL LETTER ALPHA WITH TONOS 0x0388: 0x00eb, # GREEK CAPITAL LETTER EPSILON WITH TONOS 0x0389: 0x00ec, # GREEK CAPITAL LETTER ETA WITH TONOS 0x038a: 0x00ed, # GREEK CAPITAL LETTER IOTA WITH TONOS 0x038c: 0x00ee, # GREEK CAPITAL LETTER OMICRON WITH TONOS 0x038e: 0x00ef, # GREEK CAPITAL LETTER UPSILON WITH TONOS 0x038f: 0x00f0, # GREEK CAPITAL LETTER OMEGA WITH TONOS 0x0391: 0x0080, # GREEK CAPITAL LETTER ALPHA 0x0392: 0x0081, # GREEK CAPITAL LETTER BETA 0x0393: 0x0082, # GREEK CAPITAL LETTER GAMMA 0x0394: 0x0083, # GREEK CAPITAL LETTER DELTA 0x0395: 0x0084, # GREEK CAPITAL LETTER EPSILON 0x0396: 0x0085, # GREEK CAPITAL LETTER ZETA 0x0397: 0x0086, # GREEK CAPITAL LETTER ETA 0x0398: 0x0087, # GREEK CAPITAL LETTER THETA 0x0399: 0x0088, # GREEK CAPITAL LETTER IOTA 0x039a: 0x0089, # GREEK CAPITAL LETTER KAPPA 0x039b: 0x008a, # GREEK CAPITAL LETTER LAMDA 0x039c: 0x008b, # GREEK CAPITAL LETTER MU 0x039d: 0x008c, # GREEK CAPITAL LETTER NU 0x039e: 0x008d, # GREEK CAPITAL LETTER XI 0x039f: 0x008e, # GREEK CAPITAL LETTER OMICRON 0x03a0: 0x008f, # GREEK CAPITAL LETTER PI 0x03a1: 0x0090, # GREEK CAPITAL LETTER RHO 0x03a3: 0x0091, # GREEK CAPITAL LETTER SIGMA 0x03a4: 0x0092, # GREEK CAPITAL LETTER TAU 0x03a5: 0x0093, # GREEK CAPITAL LETTER UPSILON 0x03a6: 0x0094, # GREEK CAPITAL LETTER PHI 0x03a7: 0x0095, # GREEK CAPITAL LETTER CHI 0x03a8: 0x0096, # GREEK CAPITAL LETTER PSI 0x03a9: 0x0097, # GREEK CAPITAL LETTER OMEGA 0x03aa: 0x00f4, # GREEK CAPITAL LETTER IOTA WITH DIALYTIKA 0x03ab: 0x00f5, # GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA 0x03ac: 0x00e1, # GREEK SMALL LETTER ALPHA WITH TONOS 0x03ad: 0x00e2, # GREEK SMALL LETTER EPSILON WITH TONOS 0x03ae: 0x00e3, # GREEK SMALL LETTER ETA WITH TONOS 0x03af: 0x00e5, # GREEK SMALL LETTER IOTA WITH TONOS 0x03b1: 0x0098, # GREEK SMALL LETTER ALPHA 0x03b2: 0x0099, # GREEK SMALL LETTER BETA 0x03b3: 0x009a, # GREEK SMALL LETTER GAMMA 0x03b4: 0x009b, # GREEK SMALL LETTER DELTA 0x03b5: 0x009c, # GREEK SMALL LETTER EPSILON 0x03b6: 0x009d, # GREEK SMALL LETTER ZETA 0x03b7: 0x009e, # GREEK SMALL LETTER ETA 0x03b8: 0x009f, # GREEK SMALL LETTER THETA 0x03b9: 0x00a0, # GREEK SMALL LETTER IOTA 0x03ba: 0x00a1, # GREEK SMALL LETTER KAPPA 0x03bb: 0x00a2, # GREEK SMALL LETTER LAMDA 0x03bc: 0x00a3, # GREEK SMALL LETTER MU 0x03bd: 0x00a4, # GREEK SMALL LETTER NU 0x03be: 0x00a5, # GREEK SMALL LETTER XI 0x03bf: 0x00a6, # GREEK SMALL LETTER OMICRON 0x03c0: 0x00a7, # GREEK SMALL LETTER PI 0x03c1: 0x00a8, # GREEK SMALL LETTER RHO 0x03c2: 0x00aa, # GREEK SMALL LETTER FINAL SIGMA 0x03c3: 0x00a9, # GREEK SMALL LETTER SIGMA 0x03c4: 0x00ab, # GREEK SMALL LETTER TAU 0x03c5: 0x00ac, # GREEK SMALL LETTER UPSILON 0x03c6: 0x00ad, # GREEK SMALL LETTER PHI 0x03c7: 0x00ae, # GREEK SMALL LETTER CHI 0x03c8: 0x00af, # GREEK SMALL LETTER PSI 0x03c9: 0x00e0, # GREEK SMALL LETTER OMEGA 0x03ca: 0x00e4, # GREEK SMALL LETTER IOTA WITH DIALYTIKA 0x03cb: 0x00e8, # GREEK SMALL LETTER UPSILON WITH DIALYTIKA 0x03cc: 0x00e6, # GREEK SMALL LETTER OMICRON WITH TONOS 0x03cd: 0x00e7, # GREEK SMALL LETTER UPSILON WITH TONOS 0x03ce: 0x00e9, # GREEK SMALL LETTER OMEGA WITH TONOS 0x207f: 0x00fc, # SUPERSCRIPT LATIN SMALL LETTER N 0x2219: 0x00f9, # BULLET OPERATOR 0x221a: 0x00fb, # SQUARE ROOT 0x2248: 0x00f7, # ALMOST EQUAL TO 0x2264: 0x00f3, # LESS-THAN OR EQUAL TO 0x2265: 0x00f2, # GREATER-THAN OR EQUAL TO 0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL 0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL 0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT 0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT 0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT 0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT 0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT 0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT 0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL 0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL 0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL 0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL 0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL 0x2552: 0x00d5, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE 0x2553: 0x00d6, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE 0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT 0x2555: 0x00b8, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE 0x2556: 0x00b7, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE 0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT 0x2558: 0x00d4, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE 0x2559: 0x00d3, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE 0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT 0x255b: 0x00be, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE 0x255c: 0x00bd, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE 0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT 0x255e: 0x00c6, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE 0x255f: 0x00c7, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE 0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT 0x2561: 0x00b5, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE 0x2562: 0x00b6, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE 0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT 0x2564: 0x00d1, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE 0x2565: 0x00d2, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE 0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL 0x2567: 0x00cf, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE 0x2568: 0x00d0, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE 0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL 0x256a: 0x00d8, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE 0x256b: 0x00d7, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE 0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL 0x2580: 0x00df, # UPPER HALF BLOCK 0x2584: 0x00dc, # LOWER HALF BLOCK 0x2588: 0x00db, # FULL BLOCK 0x258c: 0x00dd, # LEFT HALF BLOCK 0x2590: 0x00de, # RIGHT HALF BLOCK 0x2591: 0x00b0, # LIGHT SHADE 0x2592: 0x00b1, # MEDIUM SHADE 0x2593: 0x00b2, # DARK SHADE 0x25a0: 0x00fe, # BLACK SQUARE }
apache-2.0
analurandis/Tur
backend/venv/Lib/site-packages/pygments/lexers/felix.py
72
9410
# -*- coding: utf-8 -*- """ pygments.lexers.felix ~~~~~~~~~~~~~~~~~~~~~ Lexer for the Felix language. :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from pygments.lexer import RegexLexer, include, bygroups, default, words, \ combined from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation __all__ = ['FelixLexer'] class FelixLexer(RegexLexer): """ For `Felix <http://www.felix-lang.org>`_ source code. .. versionadded:: 1.2 """ name = 'Felix' aliases = ['felix', 'flx'] filenames = ['*.flx', '*.flxh'] mimetypes = ['text/x-felix'] preproc = ( 'elif', 'else', 'endif', 'if', 'ifdef', 'ifndef', ) keywords = ( '_', '_deref', 'all', 'as', 'assert', 'attempt', 'call', 'callback', 'case', 'caseno', 'cclass', 'code', 'compound', 'ctypes', 'do', 'done', 'downto', 'elif', 'else', 'endattempt', 'endcase', 'endif', 'endmatch', 'enum', 'except', 'exceptions', 'expect', 'finally', 'for', 'forall', 'forget', 'fork', 'functor', 'goto', 'ident', 'if', 'incomplete', 'inherit', 'instance', 'interface', 'jump', 'lambda', 'loop', 'match', 'module', 'namespace', 'new', 'noexpand', 'nonterm', 'obj', 'of', 'open', 'parse', 'raise', 'regexp', 'reglex', 'regmatch', 'rename', 'return', 'the', 'then', 'to', 'type', 'typecase', 'typedef', 'typematch', 'typeof', 'upto', 'when', 'whilst', 'with', 'yield', ) keyword_directives = ( '_gc_pointer', '_gc_type', 'body', 'comment', 'const', 'export', 'header', 'inline', 'lval', 'macro', 'noinline', 'noreturn', 'package', 'private', 'pod', 'property', 'public', 'publish', 'requires', 'todo', 'virtual', 'use', ) keyword_declarations = ( 'def', 'let', 'ref', 'val', 'var', ) keyword_types = ( 'unit', 'void', 'any', 'bool', 'byte', 'offset', 'address', 'caddress', 'cvaddress', 'vaddress', 'tiny', 'short', 'int', 'long', 'vlong', 'utiny', 'ushort', 'vshort', 'uint', 'ulong', 'uvlong', 'int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float', 'double', 'ldouble', 'complex', 'dcomplex', 'lcomplex', 'imaginary', 'dimaginary', 'limaginary', 'char', 'wchar', 'uchar', 'charp', 'charcp', 'ucharp', 'ucharcp', 'string', 'wstring', 'ustring', 'cont', 'array', 'varray', 'list', 'lvalue', 'opt', 'slice', ) keyword_constants = ( 'false', 'true', ) operator_words = ( 'and', 'not', 'in', 'is', 'isin', 'or', 'xor', ) name_builtins = ( '_svc', 'while', ) name_pseudo = ( 'root', 'self', 'this', ) decimal_suffixes = '([tTsSiIlLvV]|ll|LL|([iIuU])(8|16|32|64))?' tokens = { 'root': [ include('whitespace'), # Keywords (words(('axiom', 'ctor', 'fun', 'gen', 'proc', 'reduce', 'union'), suffix=r'\b'), Keyword, 'funcname'), (words(('class', 'cclass', 'cstruct', 'obj', 'struct'), suffix=r'\b'), Keyword, 'classname'), (r'(instance|module|typeclass)\b', Keyword, 'modulename'), (words(keywords, suffix=r'\b'), Keyword), (words(keyword_directives, suffix=r'\b'), Name.Decorator), (words(keyword_declarations, suffix=r'\b'), Keyword.Declaration), (words(keyword_types, suffix=r'\b'), Keyword.Type), (words(keyword_constants, suffix=r'\b'), Keyword.Constant), # Operators include('operators'), # Float Literal # -- Hex Float (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)' r'[pP][+\-]?[0-9_]+[lLfFdD]?', Number.Float), # -- DecimalFloat (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|' r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[lLfFdD]?', Number.Float), (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[lLfFdD]?', Number.Float), # IntegerLiteral # -- Binary (r'0[Bb][01_]+%s' % decimal_suffixes, Number.Bin), # -- Octal (r'0[0-7_]+%s' % decimal_suffixes, Number.Oct), # -- Hexadecimal (r'0[xX][0-9a-fA-F_]+%s' % decimal_suffixes, Number.Hex), # -- Decimal (r'(0|[1-9][0-9_]*)%s' % decimal_suffixes, Number.Integer), # Strings ('([rR][cC]?|[cC][rR])"""', String, 'tdqs'), ("([rR][cC]?|[cC][rR])'''", String, 'tsqs'), ('([rR][cC]?|[cC][rR])"', String, 'dqs'), ("([rR][cC]?|[cC][rR])'", String, 'sqs'), ('[cCfFqQwWuU]?"""', String, combined('stringescape', 'tdqs')), ("[cCfFqQwWuU]?'''", String, combined('stringescape', 'tsqs')), ('[cCfFqQwWuU]?"', String, combined('stringescape', 'dqs')), ("[cCfFqQwWuU]?'", String, combined('stringescape', 'sqs')), # Punctuation (r'[\[\]{}:(),;?]', Punctuation), # Labels (r'[a-zA-Z_]\w*:>', Name.Label), # Identifiers (r'(%s)\b' % '|'.join(name_builtins), Name.Builtin), (r'(%s)\b' % '|'.join(name_pseudo), Name.Builtin.Pseudo), (r'[a-zA-Z_]\w*', Name), ], 'whitespace': [ (r'\n', Text), (r'\s+', Text), include('comment'), # Preprocessor (r'#\s*if\s+0', Comment.Preproc, 'if0'), (r'#', Comment.Preproc, 'macro'), ], 'operators': [ (r'(%s)\b' % '|'.join(operator_words), Operator.Word), (r'!=|==|<<|>>|\|\||&&|[-~+/*%=<>&^|.$]', Operator), ], 'comment': [ (r'//(.*?)\n', Comment.Single), (r'/[*]', Comment.Multiline, 'comment2'), ], 'comment2': [ (r'[^/*]', Comment.Multiline), (r'/[*]', Comment.Multiline, '#push'), (r'[*]/', Comment.Multiline, '#pop'), (r'[/*]', Comment.Multiline), ], 'if0': [ (r'^\s*#if.*?(?<!\\)\n', Comment, '#push'), (r'^\s*#endif.*?(?<!\\)\n', Comment, '#pop'), (r'.*?\n', Comment), ], 'macro': [ include('comment'), (r'(import|include)(\s+)(<[^>]*?>)', bygroups(Comment.Preproc, Text, String), '#pop'), (r'(import|include)(\s+)("[^"]*?")', bygroups(Comment.Preproc, Text, String), '#pop'), (r"(import|include)(\s+)('[^']*?')", bygroups(Comment.Preproc, Text, String), '#pop'), (r'[^/\n]+', Comment.Preproc), # (r'/[*](.|\n)*?[*]/', Comment), # (r'//.*?\n', Comment, '#pop'), (r'/', Comment.Preproc), (r'(?<=\\)\n', Comment.Preproc), (r'\n', Comment.Preproc, '#pop'), ], 'funcname': [ include('whitespace'), (r'[a-zA-Z_]\w*', Name.Function, '#pop'), # anonymous functions (r'(?=\()', Text, '#pop'), ], 'classname': [ include('whitespace'), (r'[a-zA-Z_]\w*', Name.Class, '#pop'), # anonymous classes (r'(?=\{)', Text, '#pop'), ], 'modulename': [ include('whitespace'), (r'\[', Punctuation, ('modulename2', 'tvarlist')), default('modulename2'), ], 'modulename2': [ include('whitespace'), (r'([a-zA-Z_]\w*)', Name.Namespace, '#pop:2'), ], 'tvarlist': [ include('whitespace'), include('operators'), (r'\[', Punctuation, '#push'), (r'\]', Punctuation, '#pop'), (r',', Punctuation), (r'(with|where)\b', Keyword), (r'[a-zA-Z_]\w*', Name), ], 'stringescape': [ (r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|' r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape) ], 'strings': [ (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?' '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol), (r'[^\\\'"%\n]+', String), # quotes, percents and backslashes must be parsed one at a time (r'[\'"\\]', String), # unhandled string formatting sign (r'%', String) # newlines are an error (use "nl" state) ], 'nl': [ (r'\n', String) ], 'dqs': [ (r'"', String, '#pop'), # included here again for raw strings (r'\\\\|\\"|\\\n', String.Escape), include('strings') ], 'sqs': [ (r"'", String, '#pop'), # included here again for raw strings (r"\\\\|\\'|\\\n", String.Escape), include('strings') ], 'tdqs': [ (r'"""', String, '#pop'), include('strings'), include('nl') ], 'tsqs': [ (r"'''", String, '#pop'), include('strings'), include('nl') ], }
mit
kenshay/ImageScript
ProgramData/SystemFiles/Python/Lib/site-packages/pip-9.0.1-py2.7.egg/pip/_vendor/html5lib/_tokenizer.py
385
76580
from __future__ import absolute_import, division, unicode_literals from pip._vendor.six import unichr as chr from collections import deque from .constants import spaceCharacters from .constants import entities from .constants import asciiLetters, asciiUpper2Lower from .constants import digits, hexDigits, EOF from .constants import tokenTypes, tagTokenTypes from .constants import replacementCharacters from ._inputstream import HTMLInputStream from ._trie import Trie entitiesTrie = Trie(entities) class HTMLTokenizer(object): """ This class takes care of tokenizing HTML. * self.currentToken Holds the token that is currently being processed. * self.state Holds a reference to the method to be invoked... XXX * self.stream Points to HTMLInputStream object. """ def __init__(self, stream, parser=None, **kwargs): self.stream = HTMLInputStream(stream, **kwargs) self.parser = parser # Setup the initial tokenizer state self.escapeFlag = False self.lastFourChars = [] self.state = self.dataState self.escape = False # The current token being created self.currentToken = None super(HTMLTokenizer, self).__init__() def __iter__(self): """ This is where the magic happens. We do our usually processing through the states and when we have a token to return we yield the token which pauses processing until the next token is requested. """ self.tokenQueue = deque([]) # Start processing. When EOF is reached self.state will return False # instead of True and the loop will terminate. while self.state(): while self.stream.errors: yield {"type": tokenTypes["ParseError"], "data": self.stream.errors.pop(0)} while self.tokenQueue: yield self.tokenQueue.popleft() def consumeNumberEntity(self, isHex): """This function returns either U+FFFD or the character based on the decimal or hexadecimal representation. It also discards ";" if present. If not present self.tokenQueue.append({"type": tokenTypes["ParseError"]}) is invoked. """ allowed = digits radix = 10 if isHex: allowed = hexDigits radix = 16 charStack = [] # Consume all the characters that are in range while making sure we # don't hit an EOF. c = self.stream.char() while c in allowed and c is not EOF: charStack.append(c) c = self.stream.char() # Convert the set of characters consumed to an int. charAsInt = int("".join(charStack), radix) # Certain characters get replaced with others if charAsInt in replacementCharacters: char = replacementCharacters[charAsInt] self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "illegal-codepoint-for-numeric-entity", "datavars": {"charAsInt": charAsInt}}) elif ((0xD800 <= charAsInt <= 0xDFFF) or (charAsInt > 0x10FFFF)): char = "\uFFFD" self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "illegal-codepoint-for-numeric-entity", "datavars": {"charAsInt": charAsInt}}) else: # Should speed up this check somehow (e.g. move the set to a constant) if ((0x0001 <= charAsInt <= 0x0008) or (0x000E <= charAsInt <= 0x001F) or (0x007F <= charAsInt <= 0x009F) or (0xFDD0 <= charAsInt <= 0xFDEF) or charAsInt in frozenset([0x000B, 0xFFFE, 0xFFFF, 0x1FFFE, 0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE, 0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE, 0x5FFFF, 0x6FFFE, 0x6FFFF, 0x7FFFE, 0x7FFFF, 0x8FFFE, 0x8FFFF, 0x9FFFE, 0x9FFFF, 0xAFFFE, 0xAFFFF, 0xBFFFE, 0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE, 0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE, 0xFFFFF, 0x10FFFE, 0x10FFFF])): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "illegal-codepoint-for-numeric-entity", "datavars": {"charAsInt": charAsInt}}) try: # Try/except needed as UCS-2 Python builds' unichar only works # within the BMP. char = chr(charAsInt) except ValueError: v = charAsInt - 0x10000 char = chr(0xD800 | (v >> 10)) + chr(0xDC00 | (v & 0x3FF)) # Discard the ; if present. Otherwise, put it back on the queue and # invoke parseError on parser. if c != ";": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "numeric-entity-without-semicolon"}) self.stream.unget(c) return char def consumeEntity(self, allowedChar=None, fromAttribute=False): # Initialise to the default output for when no entity is matched output = "&" charStack = [self.stream.char()] if (charStack[0] in spaceCharacters or charStack[0] in (EOF, "<", "&") or (allowedChar is not None and allowedChar == charStack[0])): self.stream.unget(charStack[0]) elif charStack[0] == "#": # Read the next character to see if it's hex or decimal hex = False charStack.append(self.stream.char()) if charStack[-1] in ("x", "X"): hex = True charStack.append(self.stream.char()) # charStack[-1] should be the first digit if (hex and charStack[-1] in hexDigits) \ or (not hex and charStack[-1] in digits): # At least one digit found, so consume the whole number self.stream.unget(charStack[-1]) output = self.consumeNumberEntity(hex) else: # No digits found self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-numeric-entity"}) self.stream.unget(charStack.pop()) output = "&" + "".join(charStack) else: # At this point in the process might have named entity. Entities # are stored in the global variable "entities". # # Consume characters and compare to these to a substring of the # entity names in the list until the substring no longer matches. while (charStack[-1] is not EOF): if not entitiesTrie.has_keys_with_prefix("".join(charStack)): break charStack.append(self.stream.char()) # At this point we have a string that starts with some characters # that may match an entity # Try to find the longest entity the string will match to take care # of &noti for instance. try: entityName = entitiesTrie.longest_prefix("".join(charStack[:-1])) entityLength = len(entityName) except KeyError: entityName = None if entityName is not None: if entityName[-1] != ";": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "named-entity-without-semicolon"}) if (entityName[-1] != ";" and fromAttribute and (charStack[entityLength] in asciiLetters or charStack[entityLength] in digits or charStack[entityLength] == "=")): self.stream.unget(charStack.pop()) output = "&" + "".join(charStack) else: output = entities[entityName] self.stream.unget(charStack.pop()) output += "".join(charStack[entityLength:]) else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-named-entity"}) self.stream.unget(charStack.pop()) output = "&" + "".join(charStack) if fromAttribute: self.currentToken["data"][-1][1] += output else: if output in spaceCharacters: tokenType = "SpaceCharacters" else: tokenType = "Characters" self.tokenQueue.append({"type": tokenTypes[tokenType], "data": output}) def processEntityInAttribute(self, allowedChar): """This method replaces the need for "entityInAttributeValueState". """ self.consumeEntity(allowedChar=allowedChar, fromAttribute=True) def emitCurrentToken(self): """This method is a generic handler for emitting the tags. It also sets the state to "data" because that's what's needed after a token has been emitted. """ token = self.currentToken # Add token to the queue to be yielded if (token["type"] in tagTokenTypes): token["name"] = token["name"].translate(asciiUpper2Lower) if token["type"] == tokenTypes["EndTag"]: if token["data"]: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "attributes-in-end-tag"}) if token["selfClosing"]: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "self-closing-flag-on-end-tag"}) self.tokenQueue.append(token) self.state = self.dataState # Below are the various tokenizer states worked out. def dataState(self): data = self.stream.char() if data == "&": self.state = self.entityDataState elif data == "<": self.state = self.tagOpenState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\u0000"}) elif data is EOF: # Tokenization ends. return False elif data in spaceCharacters: # Directly after emitting a token you switch back to the "data # state". At that point spaceCharacters are important so they are # emitted separately. self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data": data + self.stream.charsUntil(spaceCharacters, True)}) # No need to update lastFourChars here, since the first space will # have already been appended to lastFourChars and will have broken # any <!-- or --> sequences else: chars = self.stream.charsUntil(("&", "<", "\u0000")) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data + chars}) return True def entityDataState(self): self.consumeEntity() self.state = self.dataState return True def rcdataState(self): data = self.stream.char() if data == "&": self.state = self.characterReferenceInRcdata elif data == "<": self.state = self.rcdataLessThanSignState elif data == EOF: # Tokenization ends. return False elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) elif data in spaceCharacters: # Directly after emitting a token you switch back to the "data # state". At that point spaceCharacters are important so they are # emitted separately. self.tokenQueue.append({"type": tokenTypes["SpaceCharacters"], "data": data + self.stream.charsUntil(spaceCharacters, True)}) # No need to update lastFourChars here, since the first space will # have already been appended to lastFourChars and will have broken # any <!-- or --> sequences else: chars = self.stream.charsUntil(("&", "<", "\u0000")) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data + chars}) return True def characterReferenceInRcdata(self): self.consumeEntity() self.state = self.rcdataState return True def rawtextState(self): data = self.stream.char() if data == "<": self.state = self.rawtextLessThanSignState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) elif data == EOF: # Tokenization ends. return False else: chars = self.stream.charsUntil(("<", "\u0000")) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data + chars}) return True def scriptDataState(self): data = self.stream.char() if data == "<": self.state = self.scriptDataLessThanSignState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) elif data == EOF: # Tokenization ends. return False else: chars = self.stream.charsUntil(("<", "\u0000")) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data + chars}) return True def plaintextState(self): data = self.stream.char() if data == EOF: # Tokenization ends. return False elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data + self.stream.charsUntil("\u0000")}) return True def tagOpenState(self): data = self.stream.char() if data == "!": self.state = self.markupDeclarationOpenState elif data == "/": self.state = self.closeTagOpenState elif data in asciiLetters: self.currentToken = {"type": tokenTypes["StartTag"], "name": data, "data": [], "selfClosing": False, "selfClosingAcknowledged": False} self.state = self.tagNameState elif data == ">": # XXX In theory it could be something besides a tag name. But # do we really care? self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-tag-name-but-got-right-bracket"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<>"}) self.state = self.dataState elif data == "?": # XXX In theory it could be something besides a tag name. But # do we really care? self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-tag-name-but-got-question-mark"}) self.stream.unget(data) self.state = self.bogusCommentState else: # XXX self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-tag-name"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) self.stream.unget(data) self.state = self.dataState return True def closeTagOpenState(self): data = self.stream.char() if data in asciiLetters: self.currentToken = {"type": tokenTypes["EndTag"], "name": data, "data": [], "selfClosing": False} self.state = self.tagNameState elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-closing-tag-but-got-right-bracket"}) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-closing-tag-but-got-eof"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"}) self.state = self.dataState else: # XXX data can be _'_... self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-closing-tag-but-got-char", "datavars": {"data": data}}) self.stream.unget(data) self.state = self.bogusCommentState return True def tagNameState(self): data = self.stream.char() if data in spaceCharacters: self.state = self.beforeAttributeNameState elif data == ">": self.emitCurrentToken() elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-tag-name"}) self.state = self.dataState elif data == "/": self.state = self.selfClosingStartTagState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["name"] += "\uFFFD" else: self.currentToken["name"] += data # (Don't use charsUntil here, because tag names are # very short and it's faster to not do anything fancy) return True def rcdataLessThanSignState(self): data = self.stream.char() if data == "/": self.temporaryBuffer = "" self.state = self.rcdataEndTagOpenState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) self.stream.unget(data) self.state = self.rcdataState return True def rcdataEndTagOpenState(self): data = self.stream.char() if data in asciiLetters: self.temporaryBuffer += data self.state = self.rcdataEndTagNameState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"}) self.stream.unget(data) self.state = self.rcdataState return True def rcdataEndTagNameState(self): appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower() data = self.stream.char() if data in spaceCharacters and appropriate: self.currentToken = {"type": tokenTypes["EndTag"], "name": self.temporaryBuffer, "data": [], "selfClosing": False} self.state = self.beforeAttributeNameState elif data == "/" and appropriate: self.currentToken = {"type": tokenTypes["EndTag"], "name": self.temporaryBuffer, "data": [], "selfClosing": False} self.state = self.selfClosingStartTagState elif data == ">" and appropriate: self.currentToken = {"type": tokenTypes["EndTag"], "name": self.temporaryBuffer, "data": [], "selfClosing": False} self.emitCurrentToken() self.state = self.dataState elif data in asciiLetters: self.temporaryBuffer += data else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</" + self.temporaryBuffer}) self.stream.unget(data) self.state = self.rcdataState return True def rawtextLessThanSignState(self): data = self.stream.char() if data == "/": self.temporaryBuffer = "" self.state = self.rawtextEndTagOpenState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) self.stream.unget(data) self.state = self.rawtextState return True def rawtextEndTagOpenState(self): data = self.stream.char() if data in asciiLetters: self.temporaryBuffer += data self.state = self.rawtextEndTagNameState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"}) self.stream.unget(data) self.state = self.rawtextState return True def rawtextEndTagNameState(self): appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower() data = self.stream.char() if data in spaceCharacters and appropriate: self.currentToken = {"type": tokenTypes["EndTag"], "name": self.temporaryBuffer, "data": [], "selfClosing": False} self.state = self.beforeAttributeNameState elif data == "/" and appropriate: self.currentToken = {"type": tokenTypes["EndTag"], "name": self.temporaryBuffer, "data": [], "selfClosing": False} self.state = self.selfClosingStartTagState elif data == ">" and appropriate: self.currentToken = {"type": tokenTypes["EndTag"], "name": self.temporaryBuffer, "data": [], "selfClosing": False} self.emitCurrentToken() self.state = self.dataState elif data in asciiLetters: self.temporaryBuffer += data else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</" + self.temporaryBuffer}) self.stream.unget(data) self.state = self.rawtextState return True def scriptDataLessThanSignState(self): data = self.stream.char() if data == "/": self.temporaryBuffer = "" self.state = self.scriptDataEndTagOpenState elif data == "!": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<!"}) self.state = self.scriptDataEscapeStartState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) self.stream.unget(data) self.state = self.scriptDataState return True def scriptDataEndTagOpenState(self): data = self.stream.char() if data in asciiLetters: self.temporaryBuffer += data self.state = self.scriptDataEndTagNameState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"}) self.stream.unget(data) self.state = self.scriptDataState return True def scriptDataEndTagNameState(self): appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower() data = self.stream.char() if data in spaceCharacters and appropriate: self.currentToken = {"type": tokenTypes["EndTag"], "name": self.temporaryBuffer, "data": [], "selfClosing": False} self.state = self.beforeAttributeNameState elif data == "/" and appropriate: self.currentToken = {"type": tokenTypes["EndTag"], "name": self.temporaryBuffer, "data": [], "selfClosing": False} self.state = self.selfClosingStartTagState elif data == ">" and appropriate: self.currentToken = {"type": tokenTypes["EndTag"], "name": self.temporaryBuffer, "data": [], "selfClosing": False} self.emitCurrentToken() self.state = self.dataState elif data in asciiLetters: self.temporaryBuffer += data else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</" + self.temporaryBuffer}) self.stream.unget(data) self.state = self.scriptDataState return True def scriptDataEscapeStartState(self): data = self.stream.char() if data == "-": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) self.state = self.scriptDataEscapeStartDashState else: self.stream.unget(data) self.state = self.scriptDataState return True def scriptDataEscapeStartDashState(self): data = self.stream.char() if data == "-": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) self.state = self.scriptDataEscapedDashDashState else: self.stream.unget(data) self.state = self.scriptDataState return True def scriptDataEscapedState(self): data = self.stream.char() if data == "-": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) self.state = self.scriptDataEscapedDashState elif data == "<": self.state = self.scriptDataEscapedLessThanSignState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) elif data == EOF: self.state = self.dataState else: chars = self.stream.charsUntil(("<", "-", "\u0000")) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data + chars}) return True def scriptDataEscapedDashState(self): data = self.stream.char() if data == "-": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) self.state = self.scriptDataEscapedDashDashState elif data == "<": self.state = self.scriptDataEscapedLessThanSignState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) self.state = self.scriptDataEscapedState elif data == EOF: self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) self.state = self.scriptDataEscapedState return True def scriptDataEscapedDashDashState(self): data = self.stream.char() if data == "-": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) elif data == "<": self.state = self.scriptDataEscapedLessThanSignState elif data == ">": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"}) self.state = self.scriptDataState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) self.state = self.scriptDataEscapedState elif data == EOF: self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) self.state = self.scriptDataEscapedState return True def scriptDataEscapedLessThanSignState(self): data = self.stream.char() if data == "/": self.temporaryBuffer = "" self.state = self.scriptDataEscapedEndTagOpenState elif data in asciiLetters: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<" + data}) self.temporaryBuffer = data self.state = self.scriptDataDoubleEscapeStartState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) self.stream.unget(data) self.state = self.scriptDataEscapedState return True def scriptDataEscapedEndTagOpenState(self): data = self.stream.char() if data in asciiLetters: self.temporaryBuffer = data self.state = self.scriptDataEscapedEndTagNameState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</"}) self.stream.unget(data) self.state = self.scriptDataEscapedState return True def scriptDataEscapedEndTagNameState(self): appropriate = self.currentToken and self.currentToken["name"].lower() == self.temporaryBuffer.lower() data = self.stream.char() if data in spaceCharacters and appropriate: self.currentToken = {"type": tokenTypes["EndTag"], "name": self.temporaryBuffer, "data": [], "selfClosing": False} self.state = self.beforeAttributeNameState elif data == "/" and appropriate: self.currentToken = {"type": tokenTypes["EndTag"], "name": self.temporaryBuffer, "data": [], "selfClosing": False} self.state = self.selfClosingStartTagState elif data == ">" and appropriate: self.currentToken = {"type": tokenTypes["EndTag"], "name": self.temporaryBuffer, "data": [], "selfClosing": False} self.emitCurrentToken() self.state = self.dataState elif data in asciiLetters: self.temporaryBuffer += data else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "</" + self.temporaryBuffer}) self.stream.unget(data) self.state = self.scriptDataEscapedState return True def scriptDataDoubleEscapeStartState(self): data = self.stream.char() if data in (spaceCharacters | frozenset(("/", ">"))): self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) if self.temporaryBuffer.lower() == "script": self.state = self.scriptDataDoubleEscapedState else: self.state = self.scriptDataEscapedState elif data in asciiLetters: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) self.temporaryBuffer += data else: self.stream.unget(data) self.state = self.scriptDataEscapedState return True def scriptDataDoubleEscapedState(self): data = self.stream.char() if data == "-": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) self.state = self.scriptDataDoubleEscapedDashState elif data == "<": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) self.state = self.scriptDataDoubleEscapedLessThanSignState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) elif data == EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-script-in-script"}) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) return True def scriptDataDoubleEscapedDashState(self): data = self.stream.char() if data == "-": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) self.state = self.scriptDataDoubleEscapedDashDashState elif data == "<": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) self.state = self.scriptDataDoubleEscapedLessThanSignState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) self.state = self.scriptDataDoubleEscapedState elif data == EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-script-in-script"}) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) self.state = self.scriptDataDoubleEscapedState return True def scriptDataDoubleEscapedDashDashState(self): data = self.stream.char() if data == "-": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "-"}) elif data == "<": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "<"}) self.state = self.scriptDataDoubleEscapedLessThanSignState elif data == ">": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": ">"}) self.state = self.scriptDataState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "\uFFFD"}) self.state = self.scriptDataDoubleEscapedState elif data == EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-script-in-script"}) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) self.state = self.scriptDataDoubleEscapedState return True def scriptDataDoubleEscapedLessThanSignState(self): data = self.stream.char() if data == "/": self.tokenQueue.append({"type": tokenTypes["Characters"], "data": "/"}) self.temporaryBuffer = "" self.state = self.scriptDataDoubleEscapeEndState else: self.stream.unget(data) self.state = self.scriptDataDoubleEscapedState return True def scriptDataDoubleEscapeEndState(self): data = self.stream.char() if data in (spaceCharacters | frozenset(("/", ">"))): self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) if self.temporaryBuffer.lower() == "script": self.state = self.scriptDataEscapedState else: self.state = self.scriptDataDoubleEscapedState elif data in asciiLetters: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) self.temporaryBuffer += data else: self.stream.unget(data) self.state = self.scriptDataDoubleEscapedState return True def beforeAttributeNameState(self): data = self.stream.char() if data in spaceCharacters: self.stream.charsUntil(spaceCharacters, True) elif data in asciiLetters: self.currentToken["data"].append([data, ""]) self.state = self.attributeNameState elif data == ">": self.emitCurrentToken() elif data == "/": self.state = self.selfClosingStartTagState elif data in ("'", '"', "=", "<"): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-character-in-attribute-name"}) self.currentToken["data"].append([data, ""]) self.state = self.attributeNameState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"].append(["\uFFFD", ""]) self.state = self.attributeNameState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-attribute-name-but-got-eof"}) self.state = self.dataState else: self.currentToken["data"].append([data, ""]) self.state = self.attributeNameState return True def attributeNameState(self): data = self.stream.char() leavingThisState = True emitToken = False if data == "=": self.state = self.beforeAttributeValueState elif data in asciiLetters: self.currentToken["data"][-1][0] += data +\ self.stream.charsUntil(asciiLetters, True) leavingThisState = False elif data == ">": # XXX If we emit here the attributes are converted to a dict # without being checked and when the code below runs we error # because data is a dict not a list emitToken = True elif data in spaceCharacters: self.state = self.afterAttributeNameState elif data == "/": self.state = self.selfClosingStartTagState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"][-1][0] += "\uFFFD" leavingThisState = False elif data in ("'", '"', "<"): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-character-in-attribute-name"}) self.currentToken["data"][-1][0] += data leavingThisState = False elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-attribute-name"}) self.state = self.dataState else: self.currentToken["data"][-1][0] += data leavingThisState = False if leavingThisState: # Attributes are not dropped at this stage. That happens when the # start tag token is emitted so values can still be safely appended # to attributes, but we do want to report the parse error in time. self.currentToken["data"][-1][0] = ( self.currentToken["data"][-1][0].translate(asciiUpper2Lower)) for name, _ in self.currentToken["data"][:-1]: if self.currentToken["data"][-1][0] == name: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "duplicate-attribute"}) break # XXX Fix for above XXX if emitToken: self.emitCurrentToken() return True def afterAttributeNameState(self): data = self.stream.char() if data in spaceCharacters: self.stream.charsUntil(spaceCharacters, True) elif data == "=": self.state = self.beforeAttributeValueState elif data == ">": self.emitCurrentToken() elif data in asciiLetters: self.currentToken["data"].append([data, ""]) self.state = self.attributeNameState elif data == "/": self.state = self.selfClosingStartTagState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"].append(["\uFFFD", ""]) self.state = self.attributeNameState elif data in ("'", '"', "<"): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-character-after-attribute-name"}) self.currentToken["data"].append([data, ""]) self.state = self.attributeNameState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-end-of-tag-but-got-eof"}) self.state = self.dataState else: self.currentToken["data"].append([data, ""]) self.state = self.attributeNameState return True def beforeAttributeValueState(self): data = self.stream.char() if data in spaceCharacters: self.stream.charsUntil(spaceCharacters, True) elif data == "\"": self.state = self.attributeValueDoubleQuotedState elif data == "&": self.state = self.attributeValueUnQuotedState self.stream.unget(data) elif data == "'": self.state = self.attributeValueSingleQuotedState elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-attribute-value-but-got-right-bracket"}) self.emitCurrentToken() elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"][-1][1] += "\uFFFD" self.state = self.attributeValueUnQuotedState elif data in ("=", "<", "`"): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "equals-in-unquoted-attribute-value"}) self.currentToken["data"][-1][1] += data self.state = self.attributeValueUnQuotedState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-attribute-value-but-got-eof"}) self.state = self.dataState else: self.currentToken["data"][-1][1] += data self.state = self.attributeValueUnQuotedState return True def attributeValueDoubleQuotedState(self): data = self.stream.char() if data == "\"": self.state = self.afterAttributeValueState elif data == "&": self.processEntityInAttribute('"') elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"][-1][1] += "\uFFFD" elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-attribute-value-double-quote"}) self.state = self.dataState else: self.currentToken["data"][-1][1] += data +\ self.stream.charsUntil(("\"", "&", "\u0000")) return True def attributeValueSingleQuotedState(self): data = self.stream.char() if data == "'": self.state = self.afterAttributeValueState elif data == "&": self.processEntityInAttribute("'") elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"][-1][1] += "\uFFFD" elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-attribute-value-single-quote"}) self.state = self.dataState else: self.currentToken["data"][-1][1] += data +\ self.stream.charsUntil(("'", "&", "\u0000")) return True def attributeValueUnQuotedState(self): data = self.stream.char() if data in spaceCharacters: self.state = self.beforeAttributeNameState elif data == "&": self.processEntityInAttribute(">") elif data == ">": self.emitCurrentToken() elif data in ('"', "'", "=", "<", "`"): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-character-in-unquoted-attribute-value"}) self.currentToken["data"][-1][1] += data elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"][-1][1] += "\uFFFD" elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-attribute-value-no-quotes"}) self.state = self.dataState else: self.currentToken["data"][-1][1] += data + self.stream.charsUntil( frozenset(("&", ">", '"', "'", "=", "<", "`", "\u0000")) | spaceCharacters) return True def afterAttributeValueState(self): data = self.stream.char() if data in spaceCharacters: self.state = self.beforeAttributeNameState elif data == ">": self.emitCurrentToken() elif data == "/": self.state = self.selfClosingStartTagState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-EOF-after-attribute-value"}) self.stream.unget(data) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-character-after-attribute-value"}) self.stream.unget(data) self.state = self.beforeAttributeNameState return True def selfClosingStartTagState(self): data = self.stream.char() if data == ">": self.currentToken["selfClosing"] = True self.emitCurrentToken() elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-EOF-after-solidus-in-tag"}) self.stream.unget(data) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-character-after-solidus-in-tag"}) self.stream.unget(data) self.state = self.beforeAttributeNameState return True def bogusCommentState(self): # Make a new comment token and give it as value all the characters # until the first > or EOF (charsUntil checks for EOF automatically) # and emit it. data = self.stream.charsUntil(">") data = data.replace("\u0000", "\uFFFD") self.tokenQueue.append( {"type": tokenTypes["Comment"], "data": data}) # Eat the character directly after the bogus comment which is either a # ">" or an EOF. self.stream.char() self.state = self.dataState return True def markupDeclarationOpenState(self): charStack = [self.stream.char()] if charStack[-1] == "-": charStack.append(self.stream.char()) if charStack[-1] == "-": self.currentToken = {"type": tokenTypes["Comment"], "data": ""} self.state = self.commentStartState return True elif charStack[-1] in ('d', 'D'): matched = True for expected in (('o', 'O'), ('c', 'C'), ('t', 'T'), ('y', 'Y'), ('p', 'P'), ('e', 'E')): charStack.append(self.stream.char()) if charStack[-1] not in expected: matched = False break if matched: self.currentToken = {"type": tokenTypes["Doctype"], "name": "", "publicId": None, "systemId": None, "correct": True} self.state = self.doctypeState return True elif (charStack[-1] == "[" and self.parser is not None and self.parser.tree.openElements and self.parser.tree.openElements[-1].namespace != self.parser.tree.defaultNamespace): matched = True for expected in ["C", "D", "A", "T", "A", "["]: charStack.append(self.stream.char()) if charStack[-1] != expected: matched = False break if matched: self.state = self.cdataSectionState return True self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-dashes-or-doctype"}) while charStack: self.stream.unget(charStack.pop()) self.state = self.bogusCommentState return True def commentStartState(self): data = self.stream.char() if data == "-": self.state = self.commentStartDashState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"] += "\uFFFD" elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "incorrect-comment"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-comment"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["data"] += data self.state = self.commentState return True def commentStartDashState(self): data = self.stream.char() if data == "-": self.state = self.commentEndState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"] += "-\uFFFD" elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "incorrect-comment"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-comment"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["data"] += "-" + data self.state = self.commentState return True def commentState(self): data = self.stream.char() if data == "-": self.state = self.commentEndDashState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"] += "\uFFFD" elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-comment"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["data"] += data + \ self.stream.charsUntil(("-", "\u0000")) return True def commentEndDashState(self): data = self.stream.char() if data == "-": self.state = self.commentEndState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"] += "-\uFFFD" self.state = self.commentState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-comment-end-dash"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["data"] += "-" + data self.state = self.commentState return True def commentEndState(self): data = self.stream.char() if data == ">": self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"] += "--\uFFFD" self.state = self.commentState elif data == "!": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-bang-after-double-dash-in-comment"}) self.state = self.commentEndBangState elif data == "-": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-dash-after-double-dash-in-comment"}) self.currentToken["data"] += data elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-comment-double-dash"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: # XXX self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-comment"}) self.currentToken["data"] += "--" + data self.state = self.commentState return True def commentEndBangState(self): data = self.stream.char() if data == ">": self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data == "-": self.currentToken["data"] += "--!" self.state = self.commentEndDashState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["data"] += "--!\uFFFD" self.state = self.commentState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-comment-end-bang-state"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["data"] += "--!" + data self.state = self.commentState return True def doctypeState(self): data = self.stream.char() if data in spaceCharacters: self.state = self.beforeDoctypeNameState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-doctype-name-but-got-eof"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "need-space-after-doctype"}) self.stream.unget(data) self.state = self.beforeDoctypeNameState return True def beforeDoctypeNameState(self): data = self.stream.char() if data in spaceCharacters: pass elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-doctype-name-but-got-right-bracket"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["name"] = "\uFFFD" self.state = self.doctypeNameState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-doctype-name-but-got-eof"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["name"] = data self.state = self.doctypeNameState return True def doctypeNameState(self): data = self.stream.char() if data in spaceCharacters: self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) self.state = self.afterDoctypeNameState elif data == ">": self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["name"] += "\uFFFD" self.state = self.doctypeNameState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype-name"}) self.currentToken["correct"] = False self.currentToken["name"] = self.currentToken["name"].translate(asciiUpper2Lower) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["name"] += data return True def afterDoctypeNameState(self): data = self.stream.char() if data in spaceCharacters: pass elif data == ">": self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.currentToken["correct"] = False self.stream.unget(data) self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: if data in ("p", "P"): matched = True for expected in (("u", "U"), ("b", "B"), ("l", "L"), ("i", "I"), ("c", "C")): data = self.stream.char() if data not in expected: matched = False break if matched: self.state = self.afterDoctypePublicKeywordState return True elif data in ("s", "S"): matched = True for expected in (("y", "Y"), ("s", "S"), ("t", "T"), ("e", "E"), ("m", "M")): data = self.stream.char() if data not in expected: matched = False break if matched: self.state = self.afterDoctypeSystemKeywordState return True # All the characters read before the current 'data' will be # [a-zA-Z], so they're garbage in the bogus doctype and can be # discarded; only the latest character might be '>' or EOF # and needs to be ungetted self.stream.unget(data) self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "expected-space-or-right-bracket-in-doctype", "datavars": {"data": data}}) self.currentToken["correct"] = False self.state = self.bogusDoctypeState return True def afterDoctypePublicKeywordState(self): data = self.stream.char() if data in spaceCharacters: self.state = self.beforeDoctypePublicIdentifierState elif data in ("'", '"'): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.stream.unget(data) self.state = self.beforeDoctypePublicIdentifierState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.stream.unget(data) self.state = self.beforeDoctypePublicIdentifierState return True def beforeDoctypePublicIdentifierState(self): data = self.stream.char() if data in spaceCharacters: pass elif data == "\"": self.currentToken["publicId"] = "" self.state = self.doctypePublicIdentifierDoubleQuotedState elif data == "'": self.currentToken["publicId"] = "" self.state = self.doctypePublicIdentifierSingleQuotedState elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-end-of-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.currentToken["correct"] = False self.state = self.bogusDoctypeState return True def doctypePublicIdentifierDoubleQuotedState(self): data = self.stream.char() if data == "\"": self.state = self.afterDoctypePublicIdentifierState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["publicId"] += "\uFFFD" elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-end-of-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["publicId"] += data return True def doctypePublicIdentifierSingleQuotedState(self): data = self.stream.char() if data == "'": self.state = self.afterDoctypePublicIdentifierState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["publicId"] += "\uFFFD" elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-end-of-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["publicId"] += data return True def afterDoctypePublicIdentifierState(self): data = self.stream.char() if data in spaceCharacters: self.state = self.betweenDoctypePublicAndSystemIdentifiersState elif data == ">": self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data == '"': self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.currentToken["systemId"] = "" self.state = self.doctypeSystemIdentifierDoubleQuotedState elif data == "'": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.currentToken["systemId"] = "" self.state = self.doctypeSystemIdentifierSingleQuotedState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.currentToken["correct"] = False self.state = self.bogusDoctypeState return True def betweenDoctypePublicAndSystemIdentifiersState(self): data = self.stream.char() if data in spaceCharacters: pass elif data == ">": self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data == '"': self.currentToken["systemId"] = "" self.state = self.doctypeSystemIdentifierDoubleQuotedState elif data == "'": self.currentToken["systemId"] = "" self.state = self.doctypeSystemIdentifierSingleQuotedState elif data == EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.currentToken["correct"] = False self.state = self.bogusDoctypeState return True def afterDoctypeSystemKeywordState(self): data = self.stream.char() if data in spaceCharacters: self.state = self.beforeDoctypeSystemIdentifierState elif data in ("'", '"'): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.stream.unget(data) self.state = self.beforeDoctypeSystemIdentifierState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.stream.unget(data) self.state = self.beforeDoctypeSystemIdentifierState return True def beforeDoctypeSystemIdentifierState(self): data = self.stream.char() if data in spaceCharacters: pass elif data == "\"": self.currentToken["systemId"] = "" self.state = self.doctypeSystemIdentifierDoubleQuotedState elif data == "'": self.currentToken["systemId"] = "" self.state = self.doctypeSystemIdentifierSingleQuotedState elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.currentToken["correct"] = False self.state = self.bogusDoctypeState return True def doctypeSystemIdentifierDoubleQuotedState(self): data = self.stream.char() if data == "\"": self.state = self.afterDoctypeSystemIdentifierState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["systemId"] += "\uFFFD" elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-end-of-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["systemId"] += data return True def doctypeSystemIdentifierSingleQuotedState(self): data = self.stream.char() if data == "'": self.state = self.afterDoctypeSystemIdentifierState elif data == "\u0000": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) self.currentToken["systemId"] += "\uFFFD" elif data == ">": self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-end-of-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.currentToken["systemId"] += data return True def afterDoctypeSystemIdentifierState(self): data = self.stream.char() if data in spaceCharacters: pass elif data == ">": self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "eof-in-doctype"}) self.currentToken["correct"] = False self.tokenQueue.append(self.currentToken) self.state = self.dataState else: self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "unexpected-char-in-doctype"}) self.state = self.bogusDoctypeState return True def bogusDoctypeState(self): data = self.stream.char() if data == ">": self.tokenQueue.append(self.currentToken) self.state = self.dataState elif data is EOF: # XXX EMIT self.stream.unget(data) self.tokenQueue.append(self.currentToken) self.state = self.dataState else: pass return True def cdataSectionState(self): data = [] while True: data.append(self.stream.charsUntil("]")) data.append(self.stream.charsUntil(">")) char = self.stream.char() if char == EOF: break else: assert char == ">" if data[-1][-2:] == "]]": data[-1] = data[-1][:-2] break else: data.append(char) data = "".join(data) # pylint:disable=redefined-variable-type # Deal with null here rather than in the parser nullCount = data.count("\u0000") if nullCount > 0: for _ in range(nullCount): self.tokenQueue.append({"type": tokenTypes["ParseError"], "data": "invalid-codepoint"}) data = data.replace("\u0000", "\uFFFD") if data: self.tokenQueue.append({"type": tokenTypes["Characters"], "data": data}) self.state = self.dataState return True
gpl-3.0
Andypsamp/CODjunit
beets/art.py
14
6851
# This file is part of beets. # Copyright 2015, Adrian Sampson. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. """High-level utilities for manipulating image files associated with music and items' embedded album art. """ import subprocess import platform from tempfile import NamedTemporaryFile import imghdr import os from beets.util import displayable_path, syspath from beets.util.artresizer import ArtResizer from beets import mediafile from beets import config def mediafile_image(image_path, maxwidth=None): """Return a `mediafile.Image` object for the path. """ with open(syspath(image_path), 'rb') as f: data = f.read() return mediafile.Image(data, type=mediafile.ImageType.front) def get_art(log, item): # Extract the art. try: mf = mediafile.MediaFile(syspath(item.path)) except mediafile.UnreadableFileError as exc: log.warning(u'Could not extract art from {0}: {1}', displayable_path(item.path), exc) return return mf.art def embed_item(log, item, imagepath, maxwidth=None, itempath=None, compare_threshold=0, ifempty=False, as_album=False): """Embed an image into the item's media file. """ # Conditions and filters. if compare_threshold: if not check_art_similarity(log, item, imagepath, compare_threshold): log.info(u'Image not similar; skipping.') return if ifempty and get_art(log, item): log.info(u'media file already contained art') return if maxwidth and not as_album: imagepath = resize_image(log, imagepath, maxwidth) # Get the `Image` object from the file. try: log.debug(u'embedding {0}', displayable_path(imagepath)) image = mediafile_image(imagepath, maxwidth) except IOError as exc: log.warning(u'could not read image file: {0}', exc) return # Make sure the image kind is safe (some formats only support PNG # and JPEG). if image.mime_type not in ('image/jpeg', 'image/png'): log.info('not embedding image of unsupported type: {}', image.mime_type) return item.try_write(path=itempath, tags={'images': [image]}) def embed_album(log, album, maxwidth=None, quiet=False, compare_threshold=0, ifempty=False): """Embed album art into all of the album's items. """ imagepath = album.artpath if not imagepath: log.info(u'No album art present for {0}', album) return if not os.path.isfile(syspath(imagepath)): log.info(u'Album art not found at {0} for {1}', displayable_path(imagepath), album) return if maxwidth: imagepath = resize_image(log, imagepath, maxwidth) log.info(u'Embedding album art into {0}', album) for item in album.items(): embed_item(log, item, imagepath, maxwidth, None, compare_threshold, ifempty, as_album=True) def resize_image(log, imagepath, maxwidth): """Returns path to an image resized to maxwidth. """ log.debug(u'Resizing album art to {0} pixels wide', maxwidth) imagepath = ArtResizer.shared.resize(maxwidth, syspath(imagepath)) return imagepath def check_art_similarity(log, item, imagepath, compare_threshold): """A boolean indicating if an image is similar to embedded item art. """ with NamedTemporaryFile(delete=True) as f: art = extract(log, f.name, item) if art: is_windows = platform.system() == "Windows" # Converting images to grayscale tends to minimize the weight # of colors in the diff score. convert_proc = subprocess.Popen( [b'convert', syspath(imagepath), syspath(art), b'-colorspace', b'gray', b'MIFF:-'], stdout=subprocess.PIPE, close_fds=not is_windows, ) compare_proc = subprocess.Popen( [b'compare', b'-metric', b'PHASH', b'-', b'null:'], stdin=convert_proc.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=not is_windows, ) convert_proc.stdout.close() stdout, stderr = compare_proc.communicate() if compare_proc.returncode: if compare_proc.returncode != 1: log.debug(u'IM phashes compare failed for {0}, {1}', displayable_path(imagepath), displayable_path(art)) return out_str = stderr else: out_str = stdout try: phash_diff = float(out_str) except ValueError: log.debug(u'IM output is not a number: {0!r}', out_str) return log.debug(u'compare PHASH score is {0}', phash_diff) return phash_diff <= compare_threshold return True def extract(log, outpath, item): art = get_art(log, item) if not art: log.info(u'No album art present in {0}, skipping.', item) return # Add an extension to the filename. ext = imghdr.what(None, h=art) if not ext: log.warning(u'Unknown image type in {0}.', displayable_path(item.path)) return outpath += b'.' + ext log.info(u'Extracting album art from: {0} to: {1}', item, displayable_path(outpath)) with open(syspath(outpath), 'wb') as f: f.write(art) return outpath def extract_first(log, outpath, items): for item in items: real_path = extract(log, outpath, item) if real_path: return real_path def clear(log, lib, query): id3v23 = config['id3v23'].get(bool) items = lib.items(query) log.info(u'Clearing album art from {0} items', len(items)) for item in items: log.debug(u'Clearing art for {0}', item) try: mf = mediafile.MediaFile(syspath(item.path), id3v23) except mediafile.UnreadableFileError as exc: log.warning(u'Could not read file {0}: {1}', displayable_path(item.path), exc) else: del mf.art mf.save()
mit
hawk78/pyrpcgen
rpc/rpcsec/base.py
1
1071
from rpc.rpc_const import AUTH_NONE from rpc.rpc_type import opaque_auth class SecError(Exception): pass class SecFlavor(object): _none = opaque_auth(AUTH_NONE, '') def initialize(self, client): pass def secure_data(self, data, seqnum): """Filter procedure arguments before sending to server""" return data def unsecure_data(self, data, seqnum): """Filter procedure results received from server""" return data def make_cred(self): """Credential and seqnum sent with each RPC call""" return self._none, None def make_verf(self, data): """Verifier sent with each RPC call 'data' is packed header upto and including cred """ return self._none def make_reply_verf(self, data): """Verifier sent by server with each RPC reply""" # FRED - currently data is always '' return self._none def get_owner(self): """Return uid""" return 0 def get_group(self): """Return gid""" return 0
gpl-2.0
DLR-SC/RepoGuard
src/repoguard/checks/pylint_.py
2
3430
# pylint: disable-msg=W0232,R0903,C0103 # Copyright 2008 German Aerospace Center (DLR) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Python coding style check. """ import os import StringIO from tempfile import gettempdir from pylint import lint from pylint.reporters.text import TextReporter from repoguard.core.module import Check, ConfigSerializer, Array, String class Config(ConfigSerializer): """ Configuration for PyLint check. """ class types(ConfigSerializer.types): """ Configurable parameters. """ check_files = Array(String, optional=True, default=[".*\.py"]) ignore_files = Array(String, optional=True, default=[]) pylint_home = String( optional=True, default=os.path.join(gettempdir(), '.pylint.d') ) pylintrc = String(optional=True) class PyLint(Check): """ Check that executes the code checking tool PyLint from logilab on all commited python files. """ __config__ = Config def _run(self, config): """ Run the pylint check with the given config. :param config: The config object described by Config. :type config: Config """ files = self.transaction.get_files( config.check_files, config.ignore_files ) # Exit when no files has to be checked. if not files: self.logger.debug("PyLint check skipped. No files for check.") return self.success() # Defining pylint home directory. os.environ['PYLINTHOME'] = config.pylint_home self.logger.debug("PyLint Home is used at '%s'.", config.pylint_home) # Determine which pylintrc file is used for the validation. if config.pylintrc: self.logger.debug("Pylintrc is used at '%s'.", config.pylintrc) os.environ['PYLINTRC'] = config.pylintrc else: self.logger.debug("Default PyLintRC is used.") # Only added or updated files will be checked. files = [ self.transaction.get_file(name) for name, attr in files.iteritems() if attr in ["A", "U", "UU"] ] if not files: self.logger.debug("No files to validate. PyLint check skipped.") return self.success() output = StringIO.StringIO() reporter = TextReporter(output) # Mock to prevent the sys.exit called by pylint.lint.Run.__init__ lint.sys.exit = lambda _: 0 self.logger.debug("PyLint is running...") lint.Run(["--reports=n"] + files, reporter=reporter) output = output.getvalue() self.logger.debug("PyLint output:\n %s", output) if output: return self.error(output) else: return self.success()
apache-2.0
mlperf/training_results_v0.5
v0.5.0/google/cloud_v3.8/gnmt-tpuv3-8/code/gnmt/model/t2t/tensor2tensor/data_generators/algorithmic_math_test.py
3
3060
# coding=utf-8 # Copyright 2018 The Tensor2Tensor Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for tensor2tensor.data_generators.algorithmic_math.""" # TODO(rsepassi): This test is flaky. Disable, remove, or update. from __future__ import absolute_import from __future__ import division from __future__ import print_function import six import sympy from tensor2tensor.data_generators import algorithmic_math import tensorflow as tf class AlgorithmicMathTest(tf.test.TestCase): def testAlgebraInverse(self): dataset_objects = algorithmic_math.math_dataset_init(26) counter = 0 for d in algorithmic_math.algebra_inverse(26, 0, 3, 10): counter += 1 decoded_input = dataset_objects.int_decoder(d["inputs"]) solve_var, expression = decoded_input.split(":") lhs, rhs = expression.split("=") # Solve for the solve-var. result = sympy.solve("%s-(%s)" % (lhs, rhs), solve_var) target_expression = dataset_objects.int_decoder(d["targets"]) # Check that the target and sympy's solutions are equivalent. self.assertEqual( 0, sympy.simplify(str(result[0]) + "-(%s)" % target_expression)) self.assertEqual(counter, 10) def testAlgebraSimplify(self): dataset_objects = algorithmic_math.math_dataset_init(8, digits=5) counter = 0 for d in algorithmic_math.algebra_simplify(8, 0, 3, 10): counter += 1 expression = dataset_objects.int_decoder(d["inputs"]) target = dataset_objects.int_decoder(d["targets"]) # Check that the input and output are equivalent expressions. self.assertEqual(0, sympy.simplify("%s-(%s)" % (expression, target))) self.assertEqual(counter, 10) def testCalculusIntegrate(self): dataset_objects = algorithmic_math.math_dataset_init( 8, digits=5, functions={"log": "L"}) counter = 0 for d in algorithmic_math.calculus_integrate(8, 0, 3, 10): counter += 1 decoded_input = dataset_objects.int_decoder(d["inputs"]) var, expression = decoded_input.split(":") target = dataset_objects.int_decoder(d["targets"]) for fn_name, fn_char in six.iteritems(dataset_objects.functions): target = target.replace(fn_char, fn_name) # Take the derivative of the target. derivative = str(sympy.diff(target, var)) # Check that the derivative of the integral equals the input. self.assertEqual(0, sympy.simplify("%s-(%s)" % (expression, derivative))) self.assertEqual(counter, 10) if __name__ == "__main__": tf.test.main()
apache-2.0
alexis-roche/nireg
nireg/tests/test_fmri_realign4d.py
2
10533
from __future__ import absolute_import # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import warnings from nose.tools import assert_equal from numpy.testing import (assert_array_almost_equal, assert_array_equal, assert_raises) import numpy as np from nibabel import (load, Nifti1Image, io_orientation) from ..testing import funcfile from ..groupwise_registration import (Image4d, resample4d, SpaceTimeRealign, SpaceRealign, Realign4d, Realign4dAlgorithm, make_grid) from ..slicetiming.timefuncs import st_43210, st_02413, st_42031 from ..affine import Rigid im = load(funcfile) def test_scanner_time(): im4d = Image4d(im.get_data(), im.get_affine(), tr=3., slice_times=(0, 1, 2)) assert_equal(im4d.scanner_time(0, 0), 0.) assert_equal(im4d.scanner_time(0, im4d.tr), 1.) def test_slice_info(): im4d = Image4d(im.get_data(), im.get_affine(), tr=3., slice_times=(0, 1, 2), slice_info=(2, -1)) assert_equal(im4d.slice_axis, 2) assert_equal(im4d.slice_direction, -1) def test_slice_timing(): affine = np.eye(4) affine[0:3, 0:3] = im.get_affine()[0:3, 0:3] im4d = Image4d(im.get_data(), affine, tr=2., slice_times=0.0) x = resample4d(im4d, [Rigid() for i in range(im.shape[3])]) assert_array_almost_equal(im4d.get_data(), x) def test_realign4d_no_time_interp(): runs = [im, im] R = SpaceRealign(runs) assert R.slice_times == 0 def test_realign4d_ascending(): runs = [im, im] R = SpaceTimeRealign(runs, tr=3, slice_times='ascending', slice_info=2) assert_array_equal(R.slice_times, (0, 1, 2)) assert R.tr == 3 def test_realign4d_descending(): runs = [im, im] R = SpaceTimeRealign(runs, tr=3, slice_times='descending', slice_info=2) assert_array_equal(R.slice_times, (2, 1, 0)) assert R.tr == 3 def test_realign4d_ascending_interleaved(): runs = [im, im] R = SpaceTimeRealign(runs, tr=3, slice_times='asc_alt_2', slice_info=2) assert_array_equal(R.slice_times, (0, 2, 1)) assert R.tr == 3 def test_realign4d_descending_interleaved(): runs = [im, im] R = SpaceTimeRealign(runs, tr=3, slice_times='desc_alt_2', slice_info=2) assert_array_equal(R.slice_times, (1, 2, 0)) assert R.tr == 3 def test_realign4d(): """This tests whether realign4d yields the same results depending on whether the slice order is input explicitely or as slice_times='ascending'. Due to the very small size of the image used for testing (only 3 slices), optimization is numerically unstable. It seems to make the default optimizer, namely scipy.fmin.fmin_ncg, adopt a random behavior. To work around the resulting inconsistency in results, we use a custom steepest gradient descent as the optimizer, although it's generally not recommended in practice. """ runs = [im, im] orient = io_orientation(im.get_affine()) slice_axis = int(np.where(orient[:, 0] == 2)[0]) R1 = SpaceTimeRealign(runs, tr=2., slice_times='ascending', slice_info=slice_axis) R1.estimate(refscan=None, loops=1, between_loops=1, optimizer='steepest') nslices = im.shape[slice_axis] slice_times = (2. / float(nslices)) * np.arange(nslices) R2 = SpaceTimeRealign(runs, tr=2., slice_times=slice_times, slice_info=slice_axis) R2.estimate(refscan=None, loops=1, between_loops=1, optimizer='steepest') for r in range(2): for i in range(im.shape[3]): assert_array_almost_equal(R1._transforms[r][i].translation, R2._transforms[r][i].translation) assert_array_almost_equal(R1._transforms[r][i].rotation, R2._transforms[r][i].rotation) for i in range(im.shape[3]): assert_array_almost_equal(R1._mean_transforms[r].translation, R2._mean_transforms[r].translation) assert_array_almost_equal(R1._mean_transforms[r].rotation, R2._mean_transforms[r].rotation) def test_realign4d_runs_with_different_affines(): aff = im.get_affine() aff2 = aff.copy() aff2[0:3, 3] += 5 im2 = Nifti1Image(im.get_data(), aff2) runs = [im, im2] R = SpaceTimeRealign(runs, tr=2., slice_times='ascending', slice_info=2) R.estimate(refscan=None, loops=1, between_loops=1, optimizer='steepest') cor_im, cor_im2 = R.resample() assert_array_equal(cor_im2.get_affine(), aff) def test_realign4d_params(): # Some tests for input parameters to realign4d R = Realign4d(im, 3, [0, 1, 2], None) # No slice_info - OK assert_equal(R.tr, 3) # TR cannot be None for set slice times assert_raises(ValueError, Realign4d, im, None, [0, 1, 2], None) # TR can be None if slice times are None R = Realign4d(im, None, None) assert_equal(R.tr, 1) def test_spacetimerealign_params(): runs = [im, im] for slice_times in ('descending', '43210', st_43210, [2, 1, 0]): R = SpaceTimeRealign(runs, tr=3, slice_times=slice_times, slice_info=2) assert_array_equal(R.slice_times, (2, 1, 0)) assert_equal(R.tr, 3) for slice_times in ('asc_alt_2', '02413', st_02413, [0, 2, 1]): R = SpaceTimeRealign(runs, tr=3, slice_times=slice_times, slice_info=2) assert_array_equal(R.slice_times, (0, 2, 1)) assert_equal(R.tr, 3) for slice_times in ('desc_alt_2', '42031', st_42031, [1, 2, 0]): R = SpaceTimeRealign(runs, tr=3, slice_times=slice_times, slice_info=2) assert_array_equal(R.slice_times, (1, 2, 0)) assert_equal(R.tr, 3) # Check changing axis R = SpaceTimeRealign(runs, tr=21, slice_times='ascending', slice_info=1) assert_array_equal(R.slice_times, np.arange(21)) # Check slice_times and slice_info and TR required R = SpaceTimeRealign(runs, 3, 'ascending', 2) # OK assert_raises(ValueError, SpaceTimeRealign, runs, 3, None, 2) assert_raises(ValueError, SpaceTimeRealign, runs, 3, 'ascending', None) assert_raises(ValueError, SpaceTimeRealign, runs, None, [0, 1, 2], 2) # Test when TR and nslices are not the same R1 = SpaceTimeRealign(runs, tr=2., slice_times='ascending', slice_info=2) assert_array_equal(R1.slice_times, np.arange(3) / 3. * 2.) # Smoke test run R1.estimate(refscan=None, loops=1, between_loops=1, optimizer='steepest') # Test tighter borders for motion estimation R1.estimate(refscan=None, loops=1, between_loops=1, optimizer='steepest', borders=(5,5,5)) def reduced_dim(dim, subsampling, border): return max(1, int(np.ceil((dim - 2 * border) / float(subsampling)))) def test_lowlevel_params(): runs = [im, im] R = SpaceTimeRealign(runs, tr=21, slice_times='ascending', slice_info=1) borders=(3,2,1) R.estimate(refscan=None, loops=1, between_loops=1, optimizer='steepest', borders=borders) # Test tighter borders for motion estimation r = Realign4dAlgorithm(R._runs[0], borders=borders) nvoxels = np.prod(np.array([reduced_dim(im.shape[i], 1, borders[i]) for i in range(3)])) assert_array_equal(r.xyz.shape, (nvoxels, 3)) # Test wrong argument types raise errors assert_raises(ValueError, Realign4dAlgorithm, R._runs[0], subsampling=(3,3,3,1)) assert_raises(ValueError, Realign4dAlgorithm, R._runs[0], refscan='first') assert_raises(ValueError, Realign4dAlgorithm, R._runs[0], borders=(1,1,1,0)) assert_raises(ValueError, Realign4dAlgorithm, R._runs[0], xtol=None) assert_raises(ValueError, Realign4dAlgorithm, R._runs[0], ftol='dunno') assert_raises(ValueError, Realign4dAlgorithm, R._runs[0], gtol=(.1,.1,.1)) assert_raises(ValueError, Realign4dAlgorithm, R._runs[0], stepsize=None) assert_raises(ValueError, Realign4dAlgorithm, R._runs[0], maxiter=None) assert_raises(ValueError, Realign4dAlgorithm, R._runs[0], maxfun='none') def _test_make_grid(dims, subsampling, borders, expected_nvoxels): x = make_grid(dims, subsampling, borders) assert_equal(x.shape[0], expected_nvoxels) def test_make_grid_funfile(): dims = im.shape[0:3] borders = (3,2,1) nvoxels = np.prod(np.array([reduced_dim(dims[i], 1, borders[i]) for i in range(3)])) _test_make_grid(dims, (1,1,1), borders, nvoxels) def test_make_grid_default(): dims = np.random.randint(100, size=3) + 1 _test_make_grid(dims, (1,1,1), (0,0,0), np.prod(dims)) def test_make_grid_random_subsampling(): dims = np.random.randint(100, size=3) + 1 subsampling = np.random.randint(5, size=3) + 1 nvoxels = np.prod(np.array([reduced_dim(dims[i], subsampling[i], 0) for i in range(3)])) _test_make_grid(dims, subsampling, (0,0,0), nvoxels) def test_make_grid_random_borders(): dims = np.random.randint(100, size=3) + 1 borders = np.minimum((dims - 1) / 2, np.random.randint(10, size=3)) nvoxels = np.prod(np.array([reduced_dim(dims[i], 1, borders[i]) for i in range(3)])) _test_make_grid(dims, (1,1,1), borders, nvoxels) def test_make_grid_full_monthy(): dims = np.random.randint(100, size=3) + 1 subsampling = np.random.randint(5, size=3) + 1 borders = np.minimum((dims - 1) / 2, np.random.randint(10, size=3)) nvoxels = np.prod(np.array([reduced_dim(dims[i], subsampling[i], borders[i]) for i in range(3)])) _test_make_grid(dims, subsampling, borders, nvoxels) def test_spacerealign(): # Check space-only realigner runs = [im, im] R = SpaceRealign(runs) assert_equal(R.tr, 1) assert_equal(R.slice_times, 0.) # Smoke test run R.estimate(refscan=None, loops=1, between_loops=1, optimizer='steepest') def test_single_image(): # Check we can use a single image as argument R = SpaceTimeRealign(im, tr=3, slice_times='ascending', slice_info=2) R.estimate(refscan=None, loops=1, between_loops=1, optimizer='steepest') R = SpaceRealign(im) R.estimate(refscan=None, loops=1, between_loops=1, optimizer='steepest') R = Realign4d(im, 3, [0, 1, 2], (2, 1)) R.estimate(refscan=None, loops=1, between_loops=1, optimizer='steepest')
bsd-3-clause
philoniare/horizon
openstack_dashboard/dashboards/admin/volumes/volume_types/tests.py
16
9761
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.core.urlresolvers import reverse from django import http from mox3.mox import IsA # noqa from openstack_dashboard import api from openstack_dashboard.api import cinder from openstack_dashboard.api import keystone from openstack_dashboard.test import helpers as test class VolumeTypeTests(test.BaseAdminViewTests): @test.create_stubs({cinder: ('volume_type_create',)}) def test_create_volume_type(self): formData = {'name': 'volume type 1', 'vol_type_description': 'test desc'} cinder.volume_type_create( IsA(http.HttpRequest), formData['name'], formData['vol_type_description']).AndReturn( self.volume_types.first()) self.mox.ReplayAll() res = self.client.post( reverse('horizon:admin:volumes:volume_types:create_type'), formData) self.assertNoFormErrors(res) redirect = reverse('horizon:admin:volumes:volume_types_tab') self.assertRedirectsNoFollow(res, redirect) @test.create_stubs({cinder: ('volume_type_get', 'volume_type_update')}) def test_update_volume_type(self): volume_type = self.cinder_volume_types.first() formData = {'name': volume_type.name, 'description': 'test desc updated'} volume_type = cinder.volume_type_get( IsA(http.HttpRequest), volume_type.id).AndReturn(volume_type) cinder.volume_type_update( IsA(http.HttpRequest), volume_type.id, formData['name'], formData['description']).AndReturn(volume_type) self.mox.ReplayAll() url = reverse('horizon:admin:volumes:volume_types:update_type', args=[volume_type.id]) res = self.client.post(url, formData) self.assertNoFormErrors(res) redirect = reverse('horizon:admin:volumes:volume_types_tab') self.assertRedirectsNoFollow(res, redirect) @test.create_stubs({api.nova: ('server_list',), cinder: ('volume_list', 'volume_type_list_with_qos_associations', 'qos_spec_list', 'volume_type_delete', 'volume_encryption_type_list'), keystone: ('tenant_list',)}) def test_delete_volume_type(self): volume_type = self.cinder_volume_types.first() formData = {'action': 'volume_types__delete__%s' % volume_type.id} encryption_list = (self.cinder_volume_encryption_types.list()[0], self.cinder_volume_encryption_types.list()[1]) cinder.volume_type_list_with_qos_associations( IsA(http.HttpRequest)).\ AndReturn(self.volume_types.list()) cinder.qos_spec_list(IsA(http.HttpRequest)).\ AndReturn(self.cinder_qos_specs.list()) cinder.volume_encryption_type_list(IsA(http.HttpRequest))\ .AndReturn(encryption_list) cinder.volume_type_delete(IsA(http.HttpRequest), volume_type.id) self.mox.ReplayAll() res = self.client.post( reverse('horizon:admin:volumes:volumes_tab'), formData) redirect = reverse('horizon:admin:volumes:volumes_tab') self.assertNoFormErrors(res) self.assertRedirectsNoFollow(res, redirect) @test.create_stubs({cinder: ('volume_encryption_type_create', 'volume_type_list',)}) def test_create_volume_type_encryption(self): volume_type1 = self.volume_types.list()[0] volume_type2 = self.volume_types.list()[1] volume_type1.id = u'1' volume_type2.id = u'2' volume_type_list = [volume_type1, volume_type2] formData = {'name': u'An Encrypted Volume Type', 'provider': u'a-provider', 'control_location': u'front-end', 'cipher': u'a-cipher', 'key_size': 512, 'volume_type_id': volume_type1.id} cinder.volume_type_list(IsA(http.HttpRequest))\ .AndReturn(volume_type_list) cinder.volume_encryption_type_create(IsA(http.HttpRequest), formData['volume_type_id'], formData) self.mox.ReplayAll() url = reverse('horizon:admin:volumes:' 'volume_types:create_type_encryption', args=[volume_type1.id]) res = self.client.post(url, formData) self.assertNoFormErrors(res) self.assertTemplateUsed( res, 'admin/volumes/volume_types/create_volume_type_encryption.html') @test.create_stubs({cinder: ('volume_encryption_type_get', 'volume_type_list',)}) def test_type_encryption_detail_view_unencrypted(self): volume_type1 = self.volume_types.list()[0] volume_type1.id = u'1' volume_type_list = [volume_type1] vol_unenc_type = self.cinder_volume_encryption_types.list()[2] cinder.volume_encryption_type_get(IsA(http.HttpRequest), volume_type1.id)\ .AndReturn(vol_unenc_type) cinder.volume_type_list(IsA(http.HttpRequest))\ .AndReturn(volume_type_list) self.mox.ReplayAll() url = reverse('horizon:admin:volumes:' 'volume_types:type_encryption_detail', args=[volume_type1.id]) res = self.client.get(url) self.assertTemplateUsed( res, 'admin/volumes/volume_types/volume_encryption_type_detail.html') self.assertContains(res, "<h3>Volume Type is Unencrypted.</h3>", 1, 200) self.assertNoMessages() @test.create_stubs({cinder: ('volume_encryption_type_get', 'volume_type_list',)}) def test_type_encryption_detail_view_encrypted(self): volume_type = self.volume_types.first() volume_type.id = u'1' volume_type.name = "An Encrypted Volume Name" volume_type_list = [volume_type] vol_enc_type = self.cinder_volume_encryption_types.list()[0] cinder.volume_encryption_type_get(IsA(http.HttpRequest), volume_type.id)\ .AndReturn(vol_enc_type) cinder.volume_type_list(IsA(http.HttpRequest))\ .AndReturn(volume_type_list) self.mox.ReplayAll() url = reverse('horizon:admin:volumes' ':volume_types:type_encryption_detail', args=[volume_type.id]) res = self.client.get(url) self.assertTemplateUsed( res, 'admin/volumes/volume_types/volume_encryption_type_detail.html') self.assertContains(res, "<h3>Volume Type Encryption Overview</h3>", 1, 200) self.assertContains(res, "<dd>%s</dd>" % volume_type.name, 1, 200) self.assertContains(res, "<dd>%s</dd>" % vol_enc_type.control_location, 1, 200) self.assertContains(res, "<dd>%s</dd>" % vol_enc_type.key_size, 1, 200) self.assertContains(res, "<dd>%s</dd>" % vol_enc_type.provider, 1, 200) self.assertContains(res, "<dd>%s</dd>" % vol_enc_type.cipher, 1, 200) self.assertNoMessages() @test.create_stubs({cinder: ('extension_supported', 'volume_type_list_with_qos_associations', 'qos_spec_list', 'volume_encryption_type_list', 'volume_encryption_type_delete',)}) def test_delete_volume_type_encryption(self): volume_type = self.volume_types.first() volume_type.id = u'1' formData = {'action': 'volume_types__delete_encryption__%s' % volume_type.id} encryption_list = (self.cinder_volume_encryption_types.list()[0], self.cinder_volume_encryption_types.list()[1]) cinder.extension_supported(IsA(http.HttpRequest), 'VolumeTypeEncryption')\ .AndReturn(True) cinder.volume_type_list_with_qos_associations( IsA(http.HttpRequest))\ .AndReturn(self.volume_types.list()) cinder.qos_spec_list(IsA(http.HttpRequest))\ .AndReturn(self.cinder_qos_specs.list()) cinder.volume_encryption_type_list(IsA(http.HttpRequest))\ .AndReturn(encryption_list) cinder.volume_encryption_type_delete(IsA(http.HttpRequest), volume_type.id) self.mox.ReplayAll() res = self.client.post( reverse('horizon:admin:volumes:volume_types_tab'), formData) redirect = reverse('horizon:admin:volumes:volume_types_tab') self.assertNoFormErrors(res) self.assertRedirectsNoFollow(res, redirect)
apache-2.0
venumech/cookiecutter
tests/test_generate_hooks.py
27
4438
#!/usr/bin/env python # -*- coding: utf-8 -*- """ test_generate_hooks ------------------- Tests formerly known from a unittest residing in test_generate.py named TestHooks.test_ignore_hooks_dirs TestHooks.test_run_python_hooks TestHooks.test_run_python_hooks_cwd TestHooks.test_run_shell_hooks """ from __future__ import unicode_literals import os import sys import stat import pytest from cookiecutter import generate from cookiecutter import utils @pytest.fixture(scope='function') def remove_additional_folders(request): """ Remove some special folders which are created by the tests. """ def fin_remove_additional_folders(): if os.path.exists('tests/test-pyhooks/inputpyhooks'): utils.rmtree('tests/test-pyhooks/inputpyhooks') if os.path.exists('inputpyhooks'): utils.rmtree('inputpyhooks') if os.path.exists('tests/test-shellhooks'): utils.rmtree('tests/test-shellhooks') request.addfinalizer(fin_remove_additional_folders) @pytest.mark.usefixtures('clean_system', 'remove_additional_folders') def test_ignore_hooks_dirs(): generate.generate_files( context={ 'cookiecutter': {'pyhooks': 'pyhooks'} }, repo_dir='tests/test-pyhooks/', output_dir='tests/test-pyhooks/' ) assert not os.path.exists('tests/test-pyhooks/inputpyhooks/hooks') @pytest.mark.usefixtures('clean_system', 'remove_additional_folders') def test_run_python_hooks(): generate.generate_files( context={ 'cookiecutter': {'pyhooks': 'pyhooks'} }, repo_dir='tests/test-pyhooks/'.replace("/", os.sep), output_dir='tests/test-pyhooks/'.replace("/", os.sep) ) assert os.path.exists('tests/test-pyhooks/inputpyhooks/python_pre.txt') assert os.path.exists('tests/test-pyhooks/inputpyhooks/python_post.txt') @pytest.mark.usefixtures('clean_system', 'remove_additional_folders') def test_run_python_hooks_cwd(): generate.generate_files( context={ 'cookiecutter': {'pyhooks': 'pyhooks'} }, repo_dir='tests/test-pyhooks/' ) assert os.path.exists('inputpyhooks/python_pre.txt') assert os.path.exists('inputpyhooks/python_post.txt') def make_test_repo(name): hooks = os.path.join(name, 'hooks') template = os.path.join(name, 'input{{cookiecutter.shellhooks}}') os.mkdir(name) os.mkdir(hooks) os.mkdir(template) with open(os.path.join(template, 'README.rst'), 'w') as f: f.write("foo\n===\n\nbar\n") if sys.platform.startswith('win'): filename = os.path.join(hooks, 'pre_gen_project.bat') with open(filename, 'w') as f: f.write("@echo off\n") f.write("\n") f.write("echo pre generation hook\n") f.write("echo. >shell_pre.txt\n") filename = os.path.join(hooks, 'post_gen_project.bat') with open(filename, 'w') as f: f.write("@echo off\n") f.write("\n") f.write("echo post generation hook\n") f.write("echo. >shell_post.txt\n") else: filename = os.path.join(hooks, 'pre_gen_project.sh') with open(filename, 'w') as f: f.write("#!/bin/bash\n") f.write("\n") f.write("echo 'pre generation hook';\n") f.write("touch 'shell_pre.txt'\n") # Set the execute bit os.chmod(filename, os.stat(filename).st_mode | stat.S_IXUSR) filename = os.path.join(hooks, 'post_gen_project.sh') with open(filename, 'w') as f: f.write("#!/bin/bash\n") f.write("\n") f.write("echo 'post generation hook';\n") f.write("touch 'shell_post.txt'\n") # Set the execute bit os.chmod(filename, os.stat(filename).st_mode | stat.S_IXUSR) @pytest.mark.usefixtures('clean_system', 'remove_additional_folders') def test_run_shell_hooks(): make_test_repo('tests/test-shellhooks') generate.generate_files( context={ 'cookiecutter': {'shellhooks': 'shellhooks'} }, repo_dir='tests/test-shellhooks/', output_dir='tests/test-shellhooks/' ) shell_pre_file = 'tests/test-shellhooks/inputshellhooks/shell_pre.txt' shell_post_file = 'tests/test-shellhooks/inputshellhooks/shell_post.txt' assert os.path.exists(shell_pre_file) assert os.path.exists(shell_post_file)
bsd-3-clause
lepinkainen/pyfibot
pyfibot/modules/available/module_expl.py
1
4422
# -*- encoding: utf-8 -*- import os import os.path import sys import re import random import fnmatch def expl_parseterm(expl): expl = expl.split(" ") expl = expl[0] expl = expl.lower() expl = expl.strip() invalidchars = re.compile(r"[^a-z0-9\ :\.-]") expl = invalidchars.sub("_", expl) return expl def expl_getdir(channel): expldir = os.path.join(sys.path[0], "expl", channel) if not os.path.exists(expldir): return None return expldir def expl_getlist(expldir): list = os.listdir(expldir) dotfile = re.compile(r"(^[^\.])") list = filter(dotfile.match, list) return list def expl_getexpl(expldir, term): f = file(os.path.join(expldir, term)) expl = f.read() f.close() return expl def check_params(bot, args, channel): """Do some initial checking for the stuff we need for every subcommand""" if not args: return False expldir = expl_getdir(channel) if not expldir: bot.log( "No expldir for channel %s, create %s to enable expl." % (channel, os.path.join(sys.path[0], "expl", channel)) ) return False termlist = expl_getlist(expldir) return expldir, termlist def command_expl(bot, user, channel, args): """Explains terms. Usage: .expl <term> See also: .rexpl, .add, .del, .ls""" try: expldir, termlist = check_params(bot, args, channel) except TypeError: return term = expl_parseterm(args) if term in termlist: expl = expl_getexpl(expldir, term) return bot.say(channel, "'%s': %s" % (term, expl)) else: return bot.say(channel, "Term '%s' not found. Try .help ls" % term) def command_rexpl(bot, user, channel, args): """Returns random explanation. See also: .expl, .add, .del, .ls""" try: expldir, termlist = check_params(bot, "none", channel) except TypeError: return term = termlist[random.randrange(0, len(termlist) - 1)] expl = expl_getexpl(expldir, term) return bot.say(channel, "'%s': %s" % (term, expl)) def command_add(bot, user, channel, args): """Adds explanation for term. Usage: .add <term> <explanation> See also: .expl, .rexpl, .del, .ls""" try: expldir, termlist = check_params(bot, args, channel) except TypeError: bot.log("No expldir for channel %s" % channel) return args = args.split(" ", 1) if not args[1]: return bot.say(user, "No explanation given") term = expl_parseterm(args[0]) if term in termlist: return bot.say(user, "Term '%s' already exists." % term) expl = args[1] f = file(os.path.join(expldir, term), "w") f.write(expl) f.write("\n") # add a newline to make it easier to admin f.close() bot.log("Term '%s' for %s added by %s: %s" % (term, channel, user, term)) return bot.say(user, "Term '%s' added: %s" % (term, expl)) def command_del(bot, user, channel, args): """Deletes term from explanation database. Usage: .del <term> See also: .expl, .rexpl, .add, .ls""" if not isAdmin(user): return try: expldir, termlist = check_params(bot, args, channel) except TypeError: return term = expl_parseterm(args) if term not in termlist: return bot.say(user, "Term '%s' doesn't exist." % term) expl = expl_getexpl(expldir, term) os.unlink(os.path.join(expldir, term)) bot.log( "Term '%s' for %s deleted by %s (contained: %s)" % (term, channel, user, expl) ) return bot.say(user, "Term '%s' deleted (contained: %s)" % (term, expl)) def command_ls(bot, user, channel, args): """Lists commands matching your Unix-like search query. Usage: .ls <query> Example: .ls ex[cp]la[!i]* matches 'explanation' and 'exclamation' but not 'explain'. See also: .expl, .rexpl, .add, .del""" try: expldir, termlist = check_params(bot, args, channel) except TypeError: return pattern = args.strip() matchlist = fnmatch.filter(termlist, pattern) matches = len(matchlist) if matches == 0: return bot.say(user, "No term matched '%s'." % pattern) first = "" if matches > 20: first = "first 20 of " matchlist.sort() return bot.say( user, "Terms matching '%s' (%stotal %d): %s" % (pattern, first, matches, ", ".join(matchlist[0:20])), )
bsd-3-clause
leansoft/edx-platform
lms/djangoapps/certificates/tests/tests.py
87
4494
""" Tests for the certificates models. """ from ddt import ddt, data, unpack from mock import patch from django.conf import settings from nose.plugins.attrib import attr from xmodule.modulestore.tests.factories import CourseFactory from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from student.tests.factories import UserFactory from certificates.models import ( CertificateStatuses, GeneratedCertificate, certificate_status_for_student, certificate_info_for_user ) from certificates.tests.factories import GeneratedCertificateFactory from util.milestones_helpers import ( set_prerequisite_courses, milestones_achieved_by_user, seed_milestone_relationship_types, ) @attr('shard_1') @ddt class CertificatesModelTest(ModuleStoreTestCase): """ Tests for the GeneratedCertificate model """ def test_certificate_status_for_student(self): student = UserFactory() course = CourseFactory.create(org='edx', number='verified', display_name='Verified Course') certificate_status = certificate_status_for_student(student, course.id) self.assertEqual(certificate_status['status'], CertificateStatuses.unavailable) self.assertEqual(certificate_status['mode'], GeneratedCertificate.MODES.honor) @unpack @data( {'allow_certificate': False, 'whitelisted': False, 'grade': None, 'output': ['N', 'N', 'N/A']}, {'allow_certificate': True, 'whitelisted': True, 'grade': None, 'output': ['Y', 'N', 'N/A']}, {'allow_certificate': True, 'whitelisted': False, 'grade': 0.9, 'output': ['Y', 'N', 'N/A']}, {'allow_certificate': False, 'whitelisted': True, 'grade': 0.8, 'output': ['N', 'N', 'N/A']}, {'allow_certificate': False, 'whitelisted': None, 'grade': 0.8, 'output': ['N', 'N', 'N/A']} ) def test_certificate_info_for_user(self, allow_certificate, whitelisted, grade, output): """ Verify that certificate_info_for_user works. """ student = UserFactory() course = CourseFactory.create(org='edx', number='verified', display_name='Verified Course') student.profile.allow_certificate = allow_certificate student.profile.save() certificate_info = certificate_info_for_user(student, course.id, grade, whitelisted) self.assertEqual(certificate_info, output) @patch.dict(settings.FEATURES, {'ENABLE_PREREQUISITE_COURSES': True, 'MILESTONES_APP': True}) def test_course_milestone_collected(self): seed_milestone_relationship_types() student = UserFactory() course = CourseFactory.create(org='edx', number='998', display_name='Test Course') pre_requisite_course = CourseFactory.create(org='edx', number='999', display_name='Pre requisite Course') # set pre-requisite course set_prerequisite_courses(course.id, [unicode(pre_requisite_course.id)]) # get milestones collected by user before completing the pre-requisite course completed_milestones = milestones_achieved_by_user(student, unicode(pre_requisite_course.id)) self.assertEqual(len(completed_milestones), 0) GeneratedCertificateFactory.create( user=student, course_id=pre_requisite_course.id, status=CertificateStatuses.generating, mode='verified' ) # get milestones collected by user after user has completed the pre-requisite course completed_milestones = milestones_achieved_by_user(student, unicode(pre_requisite_course.id)) self.assertEqual(len(completed_milestones), 1) self.assertEqual(completed_milestones[0]['namespace'], unicode(pre_requisite_course.id)) @patch.dict(settings.FEATURES, {'ENABLE_OPENBADGES': True}) @patch('certificates.badge_handler.BadgeHandler', spec=True) def test_badge_callback(self, handler): student = UserFactory() course = CourseFactory.create(org='edx', number='998', display_name='Test Course', issue_badges=True) cert = GeneratedCertificateFactory.create( user=student, course_id=course.id, status=CertificateStatuses.generating, mode='verified' ) # Check return value since class instance will be stored there. self.assertFalse(handler.return_value.award.called) cert.status = CertificateStatuses.downloadable cert.save() self.assertTrue(handler.return_value.award.called)
agpl-3.0
mancoast/CPythonPyc_test
cpython/253_test_rgbimg.py
19
1847
# Testing rgbimg module import warnings warnings.filterwarnings("ignore", "the rgbimg module is deprecated", DeprecationWarning, ".*test_rgbimg$") import rgbimg import os, uu from test.test_support import verbose, unlink, findfile class error(Exception): pass print 'RGBimg test suite:' def testimg(rgb_file, raw_file): rgb_file = findfile(rgb_file) raw_file = findfile(raw_file) width, height = rgbimg.sizeofimage(rgb_file) rgb = rgbimg.longimagedata(rgb_file) if len(rgb) != width * height * 4: raise error, 'bad image length' raw = open(raw_file, 'rb').read() if rgb != raw: raise error, \ 'images don\'t match for '+rgb_file+' and '+raw_file for depth in [1, 3, 4]: rgbimg.longstoimage(rgb, width, height, depth, '@.rgb') os.unlink('@.rgb') table = [ ('testrgb'+os.extsep+'uue', 'test'+os.extsep+'rgb'), ('testimg'+os.extsep+'uue', 'test'+os.extsep+'rawimg'), ('testimgr'+os.extsep+'uue', 'test'+os.extsep+'rawimg'+os.extsep+'rev'), ] for source, target in table: source = findfile(source) target = findfile(target) if verbose: print "uudecoding", source, "->", target, "..." uu.decode(source, target) if verbose: print "testing..." ttob = rgbimg.ttob(0) if ttob != 0: raise error, 'ttob should start out as zero' testimg('test'+os.extsep+'rgb', 'test'+os.extsep+'rawimg') ttob = rgbimg.ttob(1) if ttob != 0: raise error, 'ttob should be zero' testimg('test'+os.extsep+'rgb', 'test'+os.extsep+'rawimg'+os.extsep+'rev') ttob = rgbimg.ttob(0) if ttob != 1: raise error, 'ttob should be one' ttob = rgbimg.ttob(0) if ttob != 0: raise error, 'ttob should be zero' for source, target in table: unlink(findfile(target))
gpl-3.0
gylian/sickbeard
sickbeard/history.py
35
2768
# Author: Nic Wolfe <nic@wolfeden.ca> # URL: http://code.google.com/p/sickbeard/ # # This file is part of Sick Beard. # # Sick Beard is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Sick Beard is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Sick Beard. If not, see <http://www.gnu.org/licenses/>. import db import datetime from sickbeard.common import SNATCHED, SUBTITLED, Quality dateFormat = "%Y%m%d%H%M%S" def _logHistoryItem(action, showid, season, episode, quality, resource, provider): logDate = datetime.datetime.today().strftime(dateFormat) myDB = db.DBConnection() myDB.action("INSERT INTO history (action, date, showid, season, episode, quality, resource, provider) VALUES (?,?,?,?,?,?,?,?)", [action, logDate, showid, season, episode, quality, resource, provider]) def logSnatch(searchResult): for curEpObj in searchResult.episodes: showid = int(curEpObj.show.tvdbid) season = int(curEpObj.season) episode = int(curEpObj.episode) quality = searchResult.quality providerClass = searchResult.provider if providerClass != None: provider = providerClass.name else: provider = "unknown" action = Quality.compositeStatus(SNATCHED, searchResult.quality) resource = searchResult.name _logHistoryItem(action, showid, season, episode, quality, resource, provider) def logDownload(episode, filename, new_ep_quality, release_group=None): showid = int(episode.show.tvdbid) season = int(episode.season) epNum = int(episode.episode) quality = new_ep_quality # store the release group as the provider if possible if release_group: provider = release_group else: provider = -1 action = episode.status _logHistoryItem(action, showid, season, epNum, quality, filename, provider) def logSubtitle(showid, season, episode, status, subtitleResult): resource = subtitleResult.path provider = subtitleResult.service status, quality = Quality.splitCompositeStatus(status) action = Quality.compositeStatus(SUBTITLED, quality) _logHistoryItem(action, showid, season, episode, quality, resource, provider)
gpl-3.0
races1986/SafeLanguage
CEM/login.py
1
17748
#!/usr/bin/python # -*- coding: utf-8 -*- """ Script to log the robot in to a wiki account. Suggestion is to make a special account to use for robot use only. Make sure this robot account is well known on your home wiki before using. Parameters: -all Try to log in on all sites where a username is defined in user-config.py. -clean Use this option for logout. In combination with -all it will log out on all sites where a username is defined. -force Ignores if the user is already logged in, and tries to log in. -pass Useful in combination with -all when you have accounts for several sites and use the same password for all of them. Asks you for the password, then logs in on all given sites. -pass:XXXX Uses XXXX as password. Be careful if you use this parameter because your password will be shown on your screen, and will probably be saved in your command line history. This is NOT RECOMMENDED for use on computers where others have either physical or remote access. Use -pass instead. -sysop Log in with your sysop account. -test test whether you are logged-in -v -v (Doubly verbose) Shows http requests made when logging in. This might leak private data (password, session id), so make sure to check the output. Using -log is recommended: this will output a lot of data If not given as parameter, the script will ask for your username and password (password entry will be hidden), log in to your home wiki using this combination, and store the resulting cookies (containing your password hash, so keep it secured!) in a file in the login-data subdirectory. All scripts in this library will be looking for this cookie file and will use the login information if it is present. To log out, throw away the XX-login.data file that is created in the login-data subdirectory. """ # # (C) Rob W.W. Hooft, 2003 # (C) Pywikipedia bot team, 2003-2011 # # Distributed under the terms of the MIT license. # __version__='$Id: 49befc86f4b9e76e6c5d37e0bcdfa6647e8296f8 $' import re, os, query import urllib2 import wikipedia as pywikibot import config # On some wikis you are only allowed to run a bot if there is a link to # the bot's user page in a specific list. # If bots are listed in a template, the templates name must be given as # second parameter, otherwise it must be None botList = { 'wikipedia': { 'en': [u'Wikipedia:Bots/Status/active bots', 'BotS'], 'simple': [u'Wikipedia:Bots', '/links'] }, 'gentoo': { 'en': [u'Help:Bots', None], } } def show (mysite, sysop = False): if mysite.loggedInAs(sysop = sysop): pywikibot.output(u"You are logged in on %s as %s." % (repr(mysite), mysite.loggedInAs(sysop=sysop))) else: pywikibot.output(u"You are not logged in on %s." % repr(mysite)) class LoginManager: def __init__(self, password=None, sysop=False, site=None, username=None, verbose=False): self.site = site or pywikibot.getSite() self.sysop = sysop if username: self.username = username # perform writeback. if site.family.name not in config.usernames: config.usernames[site.family.name]={} config.usernames[site.family.name][self.site.lang]=username else: if sysop: try: self.username = config.sysopnames\ [self.site.family.name][self.site.lang] except: raise pywikibot.NoUsername(u'ERROR: Sysop username for %s:%s is undefined.\nIf you have a sysop account for that site, please add such a line to user-config.py:\n\nsysopnames[\'%s\'][\'%s\'] = \'myUsername\'' % (self.site.family.name, self.site.lang, self.site.family.name, self.site.lang)) else: try: self.username = config.usernames[self.site.family.name][self.site.lang] except: raise pywikibot.NoUsername(u'ERROR: Username for %s:%s is undefined.\nIf you have an account for that site, please add such a line to user-config.py:\n\nusernames[\'%s\'][\'%s\'] = \'myUsername\'' % (self.site.family.name, self.site.lang, self.site.family.name, self.site.lang)) self.password = password self.verbose = verbose if getattr(config, 'password_file', ''): self.readPassword() def botAllowed(self): """ Checks whether the bot is listed on a specific page to comply with the policy on the respective wiki. """ if self.site.family.name in botList \ and self.site.language() in botList[self.site.family.name]: botListPageTitle, botTemplate = botList[self.site.family.name][self.site.language()] botListPage = pywikibot.Page(self.site, botListPageTitle) if botTemplate: for template in botListPage.templatesWithParams(): if template[0] == botTemplate \ and template[1][0] == self.username: return True else: for linkedPage in botListPage.linkedPages(): if linkedPage.title(withNamespace=False) == self.username: return True return False else: # No bot policies on other sites return True def getCookie(self, api=config.use_api_login, remember=True, captcha = None): """ Login to the site. remember Remember login (default: True) captchaId A dictionary containing the captcha id and answer, if any Returns cookie data if succesful, None otherwise. """ if api: predata = { 'action': 'login', 'lgname': self.username, 'lgpassword': self.password, } if self.site.family.ldapDomain: predata['lgdomain'] = self.site.family.ldapDomain address = self.site.api_address() else: predata = { "wpName": self.username.encode(self.site.encoding()), "wpPassword": self.password, "wpLoginattempt": "Aanmelden & Inschrijven", # dutch button label seems to work for all wikis "wpRemember": str(int(bool(remember))), "wpSkipCookieCheck": '1' } if self.site.family.ldapDomain: # VistaPrint fix predata["wpDomain"] = self.site.family.ldapDomain if captcha: predata["wpCaptchaId"] = captcha['id'] predata["wpCaptchaWord"] = captcha['answer'] login_address = self.site.login_address() address = login_address + '&action=submit' if api: while True: # build the cookie L = {} L["cookieprefix"] = None index = self.site._userIndex(self.sysop) if self.site._cookies[index]: #if user is trying re-login, update the new information self.site.updateCookies(L, self.sysop) else: # clean type login, setup the new cookies files. self.site._setupCookies(L, self.sysop) response, data = query.GetData(predata, self.site, sysop=self.sysop, back_response = True) result = data['login']['result'] if result == "NeedToken": predata["lgtoken"] = data["login"]["token"] if ['lgtoken'] in data['login'].keys(): self.site._userName[index] = data['login']['lgusername'] self.site._token[index] = data['login']['lgtoken'] + "+\\" continue break if result != "Success": #if result == "NotExists": # #elif result == "WrongPass": # #elif result == "Throttled": # return False else: response, data = self.site.postData(address, self.site.urlEncode(predata), sysop=self.sysop) if self.verbose: fakepredata = predata fakepredata['wpPassword'] = u'XXXXX' pywikibot.output(u"self.site.postData(%s, %s)" % (address, self.site.urlEncode(fakepredata))) trans = config.transliterate config.transliterate = False #transliteration breaks for some reason #pywikibot.output(fakedata.decode(self.site.encoding())) config.transliterate = trans fakeresponsemsg = re.sub(r"(session|Token)=..........", r"session=XXXXXXXXXX", data) pywikibot.output(u"%s/%s\n%s" % (response.code, response.msg, fakeresponsemsg)) #pywikibot.cj.save(pywikibot.COOKIEFILE) Reat=re.compile(': (.*?)=(.*?);') L = {} if hasattr(response, 'sheaders'): ck = response.sheaders else: ck = response.info().getallmatchingheaders('set-cookie') for eat in ck: m = Reat.search(eat) if m: L[m.group(1)] = m.group(2) got_token = got_user = False for Ldata in L.keys(): if 'Token' in Ldata: got_token = True if 'User' in Ldata or 'UserName' in Ldata: got_user = True if got_token and got_user: #process the basic information to Site() index = self.site._userIndex(self.sysop) if api: #API result came back username, token and sessions. self.site._userName[index] = data['login']['lgusername'] self.site._token[index] = data['login']['lgtoken'] + "+\\" else: self.site._userName[index] = self.username if self.site._cookies[index]: #if user is trying re-login, update the new information self.site.updateCookies(L, self.sysop) else: # clean type login, setup the new cookies files. self.site._setupCookies(L, self.sysop) return True elif not captcha: solve = self.site.solveCaptcha(data) if solve: return self.getCookie(api = api, remember = remember, captcha = solve) return None def storecookiedata(self, filename, data): """ Store cookie data. The argument data is the raw data, as returned by getCookie(). Returns nothing. """ s = u'' for v, k in data.iteritems(): s += "%s=%s\n" % (v, k) f = open(pywikibot.config.datafilepath('login-data',filename), 'w') f.write(s) f.close() def readPassword(self): """ Read passwords from a file. DO NOT FORGET TO REMOVE READ ACCESS FOR OTHER USERS!!! Use chmod 600 password-file. All lines below should be valid Python tuples in the form (code, family, username, password) or (username, password) to set a default password for an username. Default usernames should occur above specific usernames. Example: ("my_username", "my_default_password") ("my_sysop_user", "my_sysop_password") ("en", "wikipedia", "my_en_user", "my_en_pass") """ password_f = open(pywikibot.config.datafilepath(config.password_file), 'r') for line in password_f: if not line.strip(): continue entry = eval(line) if len(entry) == 2: #for default userinfo if entry[0] == self.username: self.password = entry[1] elif len(entry) == 4: #for userinfo included code and family if entry[0] == self.site.lang and \ entry[1] == self.site.family.name and \ entry[2] == self.username: self.password = entry[3] password_f.close() def login(self, api=config.use_api_login, retry = False): if not self.password: # As we don't want the password to appear on the screen, we set # password = True self.password = pywikibot.input( u'Password for user %(name)s on %(site)s:' % {'name': self.username, 'site': self.site}, password = True) self.password = self.password.encode(self.site.encoding()) if api: pywikibot.output(u"Logging in to %(site)s as %(name)s via API." % {'name': self.username, 'site': self.site}) else: pywikibot.output(u"Logging in to %(site)s as %(name)s" % {'name': self.username, 'site': self.site}) try: cookiedata = self.getCookie(api) except NotImplementedError: pywikibot.output('API disabled because this site does not support.\nRetrying by ordinary way...') api = False return self.login(False, retry) if cookiedata: fn = '%s-%s-%s-login.data' % (self.site.family.name, self.site.lang, self.username) #self.storecookiedata(fn,cookiedata) pywikibot.output(u"Should be logged in now") # Show a warning according to the local bot policy if not self.botAllowed(): pywikibot.output(u'*** Your username is not listed on [[%s]].\n*** Please make sure you are allowed to use the robot before actually using it!' % botList[self.site.family.name][self.site.lang]) return True else: pywikibot.output(u"Login failed. Wrong password or CAPTCHA answer?") if api: pywikibot.output(u"API login failed, retrying using standard webpage.") return self.login(False, retry) if retry: self.password = None return self.login(api, True) else: return False def logout(self, api = config.use_api): flushCk = False if api and self.site.versionnumber() >= 12: if query.GetData({'action':'logout'}, self.site) == []: flushCk = True else: text = self.site.getUrl(self.site.get_address("Special:UserLogout")) if self.site.mediawiki_message('logouttext') in text: #confirm loggedout flushCk = True if flushCk: self.site._removeCookies(self.username) return True return False def showCaptchaWindow(self, url): pass def main(): username = password = None sysop = False logall = False forceLogin = False verbose = False clean = False testonly = False for arg in pywikibot.handleArgs(): if arg.startswith("-pass"): if len(arg) == 5: password = pywikibot.input(u'Password for all accounts:', password = True) else: password = arg[6:] elif arg == "-clean": clean = True elif arg == "-sysop": sysop = True elif arg == "-all": logall = True elif arg == "-force": forceLogin = True elif arg == "-test": testonly = True else: pywikibot.showHelp('login') return if pywikibot.verbose > 1: pywikibot.warning(u"Using -v -v on login.py might leak private data. When sharing, please double check your password is not readable and log out your bots session.") verbose = True # only use this verbose when running from login.py if logall: if sysop: namedict = config.sysopnames else: namedict = config.usernames for familyName in namedict.iterkeys(): for lang in namedict[familyName].iterkeys(): if testonly: show(pywikibot.getSite(lang, familyName), sysop) else: try: site = pywikibot.getSite(lang, familyName) loginMan = LoginManager(password, sysop=sysop, site=site, verbose=verbose) if clean: loginMan.logout() else: if not forceLogin and site.loggedInAs(sysop = sysop): pywikibot.output(u'Already logged in on %s' % site) else: loginMan.login() except pywikibot.NoSuchSite: pywikibot.output(lang+ u'.' + familyName + u' is not a valid site, please remove it from your config') elif testonly: show(pywikibot.getSite(), sysop) elif clean: try: site = pywikibot.getSite() lgm = LoginManager(site = site) lgm.logout() except pywikibot.NoSuchSite: pass else: loginMan = LoginManager(password, sysop = sysop, verbose=verbose) loginMan.login() if __name__ == "__main__": try: main() finally: pywikibot.stopme()
epl-1.0
anthkris/oppia
extensions/rich_text_components/Collapsible/Collapsible.py
16
1566
# coding: utf-8 # # Copyright 2014 The Oppia Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, softwar # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from extensions.rich_text_components import base class Collapsible(base.BaseRichTextComponent): """A rich-text component representing a collapsible block.""" name = 'Collapsible' category = 'Basic Input' description = 'A collapsible block of HTML.' frontend_name = 'collapsible' tooltip = 'Insert collapsible block' is_complex = True is_block_element = True _customization_arg_specs = [{ 'name': 'heading', 'description': 'The heading for the collapsible block', 'schema': { 'type': 'unicode', }, 'default_value': 'Sample Header', }, { 'name': 'content', 'description': 'The content of the collapsible block', 'schema': { 'type': 'html', 'ui_config': { 'hide_complex_extensions': True, } }, 'default_value': 'You have opened the collapsible block.' }]
apache-2.0
gvanrossum/asyncio
examples/fetch3.py
13
7670
"""Fetch one URL and write its content to stdout. This version adds a primitive connection pool, redirect following and chunked transfer-encoding. It also supports a --iocp flag. """ import sys import urllib.parse from http.client import BadStatusLine from asyncio import * class ConnectionPool: # TODO: Locking? Close idle connections? def __init__(self, verbose=False): self.verbose = verbose self.connections = {} # {(host, port, ssl): (reader, writer)} def close(self): for _, writer in self.connections.values(): writer.close() @coroutine def open_connection(self, host, port, ssl): port = port or (443 if ssl else 80) ipaddrs = yield from get_event_loop().getaddrinfo(host, port) if self.verbose: print('* %s resolves to %s' % (host, ', '.join(ip[4][0] for ip in ipaddrs)), file=sys.stderr) for _, _, _, _, (h, p, *_) in ipaddrs: key = h, p, ssl conn = self.connections.get(key) if conn: reader, writer = conn if reader._eof: self.connections.pop(key) continue if self.verbose: print('* Reusing pooled connection', key, file=sys.stderr) return conn reader, writer = yield from open_connection(host, port, ssl=ssl) host, port, *_ = writer.get_extra_info('peername') key = host, port, ssl self.connections[key] = reader, writer if self.verbose: print('* New connection', key, file=sys.stderr) return reader, writer class Request: def __init__(self, url, verbose=True): self.url = url self.verbose = verbose self.parts = urllib.parse.urlparse(self.url) self.scheme = self.parts.scheme assert self.scheme in ('http', 'https'), repr(url) self.ssl = self.parts.scheme == 'https' self.netloc = self.parts.netloc self.hostname = self.parts.hostname self.port = self.parts.port or (443 if self.ssl else 80) self.path = (self.parts.path or '/') self.query = self.parts.query if self.query: self.full_path = '%s?%s' % (self.path, self.query) else: self.full_path = self.path self.http_version = 'HTTP/1.1' self.method = 'GET' self.headers = [] self.reader = None self.writer = None def vprint(self, *args): if self.verbose: print(*args, file=sys.stderr) @coroutine def connect(self, pool): self.vprint('* Connecting to %s:%s using %s' % (self.hostname, self.port, 'ssl' if self.ssl else 'tcp')) self.reader, self.writer = \ yield from pool.open_connection(self.hostname, self.port, ssl=self.ssl) self.vprint('* Connected to %s' % (self.writer.get_extra_info('peername'),)) @coroutine def putline(self, line): self.vprint('>', line) self.writer.write(line.encode('latin-1') + b'\r\n') ##yield from self.writer.drain() @coroutine def send_request(self): request = '%s %s %s' % (self.method, self.full_path, self.http_version) yield from self.putline(request) if 'host' not in {key.lower() for key, _ in self.headers}: self.headers.insert(0, ('Host', self.netloc)) for key, value in self.headers: line = '%s: %s' % (key, value) yield from self.putline(line) yield from self.putline('') @coroutine def get_response(self): response = Response(self.reader, self.verbose) yield from response.read_headers() return response class Response: def __init__(self, reader, verbose=True): self.reader = reader self.verbose = verbose self.http_version = None # 'HTTP/1.1' self.status = None # 200 self.reason = None # 'Ok' self.headers = [] # [('Content-Type', 'text/html')] def vprint(self, *args): if self.verbose: print(*args, file=sys.stderr) @coroutine def getline(self): line = (yield from self.reader.readline()).decode('latin-1').rstrip() self.vprint('<', line) return line @coroutine def read_headers(self): status_line = yield from self.getline() status_parts = status_line.split(None, 2) if len(status_parts) != 3: raise BadStatusLine(status_line) self.http_version, status, self.reason = status_parts self.status = int(status) while True: header_line = yield from self.getline() if not header_line: break # TODO: Continuation lines. key, value = header_line.split(':', 1) self.headers.append((key, value.strip())) def get_redirect_url(self, default=None): if self.status not in (300, 301, 302, 303, 307): return default return self.get_header('Location', default) def get_header(self, key, default=None): key = key.lower() for k, v in self.headers: if k.lower() == key: return v return default @coroutine def read(self): nbytes = None for key, value in self.headers: if key.lower() == 'content-length': nbytes = int(value) break if nbytes is None: if self.get_header('transfer-encoding', '').lower() == 'chunked': blocks = [] size = -1 while size: size_header = yield from self.reader.readline() if not size_header: break parts = size_header.split(b';') size = int(parts[0], 16) if size: block = yield from self.reader.readexactly(size) assert len(block) == size, (len(block), size) blocks.append(block) crlf = yield from self.reader.readline() assert crlf == b'\r\n', repr(crlf) body = b''.join(blocks) else: body = yield from self.reader.read() else: body = yield from self.reader.readexactly(nbytes) return body @coroutine def fetch(url, verbose=True, max_redirect=10): pool = ConnectionPool(verbose) try: for _ in range(max_redirect): request = Request(url, verbose) yield from request.connect(pool) yield from request.send_request() response = yield from request.get_response() body = yield from response.read() next_url = response.get_redirect_url() if not next_url: break url = urllib.parse.urljoin(url, next_url) print('redirect to', url, file=sys.stderr) return body finally: pool.close() def main(): if '--iocp' in sys.argv: from asyncio.windows_events import ProactorEventLoop loop = ProactorEventLoop() set_event_loop(loop) else: loop = get_event_loop() try: body = loop.run_until_complete(fetch(sys.argv[1], '-v' in sys.argv)) finally: loop.close() sys.stdout.buffer.write(body) if __name__ == '__main__': main()
apache-2.0
liangazhou/django-rdp
packages/Django-1.8.6/django/utils/lorem_ipsum.py
505
4960
""" Utility functions for generating "lorem ipsum" Latin text. """ from __future__ import unicode_literals import random COMMON_P = ( 'Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod ' 'tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim ' 'veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea ' 'commodo consequat. Duis aute irure dolor in reprehenderit in voluptate ' 'velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint ' 'occaecat cupidatat non proident, sunt in culpa qui officia deserunt ' 'mollit anim id est laborum.' ) WORDS = ('exercitationem', 'perferendis', 'perspiciatis', 'laborum', 'eveniet', 'sunt', 'iure', 'nam', 'nobis', 'eum', 'cum', 'officiis', 'excepturi', 'odio', 'consectetur', 'quasi', 'aut', 'quisquam', 'vel', 'eligendi', 'itaque', 'non', 'odit', 'tempore', 'quaerat', 'dignissimos', 'facilis', 'neque', 'nihil', 'expedita', 'vitae', 'vero', 'ipsum', 'nisi', 'animi', 'cumque', 'pariatur', 'velit', 'modi', 'natus', 'iusto', 'eaque', 'sequi', 'illo', 'sed', 'ex', 'et', 'voluptatibus', 'tempora', 'veritatis', 'ratione', 'assumenda', 'incidunt', 'nostrum', 'placeat', 'aliquid', 'fuga', 'provident', 'praesentium', 'rem', 'necessitatibus', 'suscipit', 'adipisci', 'quidem', 'possimus', 'voluptas', 'debitis', 'sint', 'accusantium', 'unde', 'sapiente', 'voluptate', 'qui', 'aspernatur', 'laudantium', 'soluta', 'amet', 'quo', 'aliquam', 'saepe', 'culpa', 'libero', 'ipsa', 'dicta', 'reiciendis', 'nesciunt', 'doloribus', 'autem', 'impedit', 'minima', 'maiores', 'repudiandae', 'ipsam', 'obcaecati', 'ullam', 'enim', 'totam', 'delectus', 'ducimus', 'quis', 'voluptates', 'dolores', 'molestiae', 'harum', 'dolorem', 'quia', 'voluptatem', 'molestias', 'magni', 'distinctio', 'omnis', 'illum', 'dolorum', 'voluptatum', 'ea', 'quas', 'quam', 'corporis', 'quae', 'blanditiis', 'atque', 'deserunt', 'laboriosam', 'earum', 'consequuntur', 'hic', 'cupiditate', 'quibusdam', 'accusamus', 'ut', 'rerum', 'error', 'minus', 'eius', 'ab', 'ad', 'nemo', 'fugit', 'officia', 'at', 'in', 'id', 'quos', 'reprehenderit', 'numquam', 'iste', 'fugiat', 'sit', 'inventore', 'beatae', 'repellendus', 'magnam', 'recusandae', 'quod', 'explicabo', 'doloremque', 'aperiam', 'consequatur', 'asperiores', 'commodi', 'optio', 'dolor', 'labore', 'temporibus', 'repellat', 'veniam', 'architecto', 'est', 'esse', 'mollitia', 'nulla', 'a', 'similique', 'eos', 'alias', 'dolore', 'tenetur', 'deleniti', 'porro', 'facere', 'maxime', 'corrupti') COMMON_WORDS = ('lorem', 'ipsum', 'dolor', 'sit', 'amet', 'consectetur', 'adipisicing', 'elit', 'sed', 'do', 'eiusmod', 'tempor', 'incididunt', 'ut', 'labore', 'et', 'dolore', 'magna', 'aliqua') def sentence(): """ Returns a randomly generated sentence of lorem ipsum text. The first word is capitalized, and the sentence ends in either a period or question mark. Commas are added at random. """ # Determine the number of comma-separated sections and number of words in # each section for this sentence. sections = [' '.join(random.sample(WORDS, random.randint(3, 12))) for i in range(random.randint(1, 5))] s = ', '.join(sections) # Convert to sentence case and add end punctuation. return '%s%s%s' % (s[0].upper(), s[1:], random.choice('?.')) def paragraph(): """ Returns a randomly generated paragraph of lorem ipsum text. The paragraph consists of between 1 and 4 sentences, inclusive. """ return ' '.join(sentence() for i in range(random.randint(1, 4))) def paragraphs(count, common=True): """ Returns a list of paragraphs as returned by paragraph(). If `common` is True, then the first paragraph will be the standard 'lorem ipsum' paragraph. Otherwise, the first paragraph will be random Latin text. Either way, subsequent paragraphs will be random Latin text. """ paras = [] for i in range(count): if common and i == 0: paras.append(COMMON_P) else: paras.append(paragraph()) return paras def words(count, common=True): """ Returns a string of `count` lorem ipsum words separated by a single space. If `common` is True, then the first 19 words will be the standard 'lorem ipsum' words. Otherwise, all words will be selected randomly. """ if common: word_list = list(COMMON_WORDS) else: word_list = [] c = len(word_list) if count > c: count -= c while count > 0: c = min(count, len(WORDS)) count -= c word_list += random.sample(WORDS, c) else: word_list = word_list[:count] return ' '.join(word_list)
apache-2.0
Changaco/oh-mainline
vendor/packages/PyYaml/lib/yaml/cyaml.py
537
3290
__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader', 'CBaseDumper', 'CSafeDumper', 'CDumper'] from _yaml import CParser, CEmitter from constructor import * from serializer import * from representer import * from resolver import * class CBaseLoader(CParser, BaseConstructor, BaseResolver): def __init__(self, stream): CParser.__init__(self, stream) BaseConstructor.__init__(self) BaseResolver.__init__(self) class CSafeLoader(CParser, SafeConstructor, Resolver): def __init__(self, stream): CParser.__init__(self, stream) SafeConstructor.__init__(self) Resolver.__init__(self) class CLoader(CParser, Constructor, Resolver): def __init__(self, stream): CParser.__init__(self, stream) Constructor.__init__(self) Resolver.__init__(self) class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): def __init__(self, stream, default_style=None, default_flow_style=None, canonical=None, indent=None, width=None, allow_unicode=None, line_break=None, encoding=None, explicit_start=None, explicit_end=None, version=None, tags=None): CEmitter.__init__(self, stream, canonical=canonical, indent=indent, width=width, encoding=encoding, allow_unicode=allow_unicode, line_break=line_break, explicit_start=explicit_start, explicit_end=explicit_end, version=version, tags=tags) Representer.__init__(self, default_style=default_style, default_flow_style=default_flow_style) Resolver.__init__(self) class CSafeDumper(CEmitter, SafeRepresenter, Resolver): def __init__(self, stream, default_style=None, default_flow_style=None, canonical=None, indent=None, width=None, allow_unicode=None, line_break=None, encoding=None, explicit_start=None, explicit_end=None, version=None, tags=None): CEmitter.__init__(self, stream, canonical=canonical, indent=indent, width=width, encoding=encoding, allow_unicode=allow_unicode, line_break=line_break, explicit_start=explicit_start, explicit_end=explicit_end, version=version, tags=tags) SafeRepresenter.__init__(self, default_style=default_style, default_flow_style=default_flow_style) Resolver.__init__(self) class CDumper(CEmitter, Serializer, Representer, Resolver): def __init__(self, stream, default_style=None, default_flow_style=None, canonical=None, indent=None, width=None, allow_unicode=None, line_break=None, encoding=None, explicit_start=None, explicit_end=None, version=None, tags=None): CEmitter.__init__(self, stream, canonical=canonical, indent=indent, width=width, encoding=encoding, allow_unicode=allow_unicode, line_break=line_break, explicit_start=explicit_start, explicit_end=explicit_end, version=version, tags=tags) Representer.__init__(self, default_style=default_style, default_flow_style=default_flow_style) Resolver.__init__(self)
agpl-3.0
vipins/ccccms
env/Lib/site-packages/django/contrib/gis/tests/inspectapp/tests.py
57
4848
from __future__ import absolute_import import os from django.db import connections from django.test import TestCase from django.contrib.gis.gdal import Driver from django.contrib.gis.geometry.test_data import TEST_DATA from django.contrib.gis.utils.ogrinspect import ogrinspect from .models import AllOGRFields class OGRInspectTest(TestCase): def test_poly(self): shp_file = os.path.join(TEST_DATA, 'test_poly', 'test_poly.shp') model_def = ogrinspect(shp_file, 'MyModel') expected = [ '# This is an auto-generated Django model module created by ogrinspect.', 'from django.contrib.gis.db import models', '', 'class MyModel(models.Model):', ' float = models.FloatField()', ' int = models.FloatField()', ' str = models.CharField(max_length=80)', ' geom = models.PolygonField(srid=-1)', ' objects = models.GeoManager()', ] self.assertEqual(model_def, '\n'.join(expected)) def test_date_field(self): shp_file = os.path.join(TEST_DATA, 'cities', 'cities.shp') model_def = ogrinspect(shp_file, 'City') expected = [ '# This is an auto-generated Django model module created by ogrinspect.', 'from django.contrib.gis.db import models', '', 'class City(models.Model):', ' name = models.CharField(max_length=80)', ' population = models.FloatField()', ' density = models.FloatField()', ' created = models.DateField()', ' geom = models.PointField(srid=-1)', ' objects = models.GeoManager()', ] self.assertEqual(model_def, '\n'.join(expected)) def test_time_field(self): # Only possible to test this on PostGIS at the momemnt. MySQL # complains about permissions, and SpatiaLite/Oracle are # insanely difficult to get support compiled in for in GDAL. if not connections['default'].ops.postgis: return # Getting the database identifier used by OGR, if None returned # GDAL does not have the support compiled in. ogr_db = get_ogr_db_string() if not ogr_db: return # writing shapefules via GDAL currently does not support writing OGRTime # fields, so we need to actually use a database model_def = ogrinspect(ogr_db, 'Measurement', layer_key=AllOGRFields._meta.db_table, decimal=['f_decimal']) expected = [ '# This is an auto-generated Django model module created by ogrinspect.', 'from django.contrib.gis.db import models', '', 'class Measurement(models.Model):', ' f_decimal = models.DecimalField(max_digits=0, decimal_places=0)', ' f_int = models.IntegerField()', ' f_datetime = models.DateTimeField()', ' f_time = models.TimeField()', ' f_float = models.FloatField()', ' f_char = models.CharField(max_length=10)', ' f_date = models.DateField()', ' geom = models.PolygonField()', ' objects = models.GeoManager()', ] self.assertEqual(model_def, '\n'.join(expected)) def get_ogr_db_string(): # Construct the DB string that GDAL will use to inspect the database. # GDAL will create its own connection to the database, so we re-use the # connection settings from the Django test. This approach is a bit fragile # and cannot work on any other database other than PostgreSQL at the moment. db = connections.databases['default'] # Map from the django backend into the OGR driver name and database identifier # http://www.gdal.org/ogr/ogr_formats.html # # TODO: Support Oracle (OCI), MySQL, and SpatiaLite. drivers = { 'django.contrib.gis.db.backends.postgis': ('PostgreSQL', 'PG'), } drv_name, db_str = drivers[db['ENGINE']] # Ensure that GDAL library has driver support for the database. try: Driver(drv_name) except: return None # Build the params of the OGR database connection string # TODO: connection strings are database-dependent, thus if # we ever test other backends, this will need to change. params = ["dbname='%s'" % db['NAME']] def add(key, template): value = db.get(key, None) # Don't add the parameter if it is not in django's settings if value: params.append(template % value) add('HOST', "host='%s'") add('PORT', "port='%s'") add('USER', "user='%s'") add('PASSWORD', "password='%s'") return '%s:%s' % (db_str, ' '.join(params))
bsd-3-clause
wwj718/murp-edx
common/djangoapps/third_party_auth/tests/test_pipeline.py
78
1656
"""Unit tests for third_party_auth/pipeline.py.""" import random from third_party_auth import pipeline, provider from third_party_auth.tests import testutil # Allow tests access to protected methods (or module-protected methods) under # test. pylint: disable-msg=protected-access class MakeRandomPasswordTest(testutil.TestCase): """Tests formation of random placeholder passwords.""" def setUp(self): super(MakeRandomPasswordTest, self).setUp() self.seed = 1 def test_default_args(self): self.assertEqual(pipeline._DEFAULT_RANDOM_PASSWORD_LENGTH, len(pipeline.make_random_password())) def test_probably_only_uses_charset(self): # This is ultimately probablistic since we could randomly select a good character 100000 consecutive times. for char in pipeline.make_random_password(length=100000): self.assertIn(char, pipeline._PASSWORD_CHARSET) def test_pseudorandomly_picks_chars_from_charset(self): random_instance = random.Random(self.seed) expected = ''.join( random_instance.choice(pipeline._PASSWORD_CHARSET) for _ in xrange(pipeline._DEFAULT_RANDOM_PASSWORD_LENGTH)) random_instance.seed(self.seed) self.assertEqual(expected, pipeline.make_random_password(choice_fn=random_instance.choice)) class ProviderUserStateTestCase(testutil.TestCase): """Tests ProviderUserState behavior.""" def test_get_unlink_form_name(self): state = pipeline.ProviderUserState(provider.GoogleOauth2, object(), False) self.assertEqual(provider.GoogleOauth2.NAME + '_unlink_form', state.get_unlink_form_name())
agpl-3.0
isse-augsburg/ssharp
Models/Small Models/DegradedMode/createGraphFigure.py
2
3466
#!/usr/local/bin/python3 # The MIT License (MIT) # # Copyright (c) 2014-2017, Institute for Software & Systems Engineering # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # Run under Windows using Python 3.6 # c:\Python36\Scripts\pip.exe install numpy # c:\Python36\Scripts\pip.exe install matplotlib # c:\Python36\python.exe createGraphFigure.py import csv import numpy import matplotlib.pyplot as pyplot from matplotlib.ticker import FuncFormatter fileToRead = 'graph.csv' csvDelimiter = ',' floatDelimiter = '.' csvRawDataFile = open(fileToRead,'r') csvRawData = csv.reader(csvRawDataFile, delimiter=csvDelimiter) csvRows = list(csvRawData) # read fromValues fromValues = [] for entry in (csvRows[1])[1:]: newValue = float(entry.replace(floatDelimiter, '.')) fromValues.append(newValue) # see http://matplotlib.org/api/matplotlib_configuration_api.html#matplotlib.rc titleFont = {'fontname':'Garamond','fontsize':16} labelFont = {'fontname':'Garamond','fontsize':14} #standardFont = {'family':'serif','serif':['Garamond'],'sans-serif':['Garamond']} standardFont = {'family':'serif','serif':['Garamond']} standardFigure = {'figsize': (10,5)} #pyplot.rcParams['figure.figsize'] = 10,5 pyplot.rc('font',**standardFont) pyplot.rc('figure',**standardFigure) def createCustomFormatter(scaleY): # https://matplotlib.org/2.0.0/examples/pylab_examples/custom_ticker1.html # http://matplotlib.org/api/ticker_api.html#tick-formatting # http://matplotlib.org/api/ticker_api.html#matplotlib.ticker.FormatStrFormatter # factor = 10.0 ** scaleY print(factor) def formatMoreBeautiful(value, tickPos): return '$ %.2f \\times\ 10^{-%s}$' % (value*factor, str(scaleY)) return FuncFormatter(formatMoreBeautiful) def printRow(rowToRead,yLabelName,fileName,scaleY): # read resultValues resultValues = [] row = csvRows[rowToRead] rowName = row[0] rowData = row[1:] print(len(rowData)) for entry in rowData: newValue = float(entry.replace(floatDelimiter, '.')) resultValues.append(newValue) pyplot.cla() pyplot.clf() fig, ax = pyplot.subplots() if scaleY != 1: ax.yaxis.set_major_formatter(createCustomFormatter(scaleY)) pyplot.plot(fromValues,resultValues, 'o-') pyplot.title('',**titleFont) pyplot.xlabel('Pr(F1)',**labelFont) pyplot.xticks([0.0, 1.0]) pyplot.ylabel(yLabelName,**labelFont) pyplot.savefig(fileName, format="svg") #pyplot.show() printRow(2,"Pr(Hazard)","graph.svg",1)
mit
Nirvedh/CoarseCoherence
tests/configs/tsunami-switcheroo-full.py
52
2444
# Copyright (c) 2012 ARM Limited # All rights reserved. # # The license below extends only to copyright in the software and shall # not be construed as granting a license to any other intellectual # property including but not limited to intellectual property relating # to a hardware implementation of the functionality of the software # licensed hereunder. You may use the software subject to the license # terms below provided that you ensure that this notice is replicated # unmodified and in its entirety in all distributions of the software, # modified or unmodified, in source code or in binary form. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Andreas Sandberg from m5.objects import * from alpha_generic import * import switcheroo root = LinuxAlphaFSSwitcheroo( mem_class=DDR3_1600_x64, cpu_classes=(AtomicSimpleCPU, TimingSimpleCPU, DerivO3CPU) ).create_root() # Setup a custom test method that uses the switcheroo tester that # switches between CPU models. run_test = switcheroo.run_test
bsd-3-clause
trondhindenes/ansible-modules-extras
windows/win_scheduled_task.py
49
2407
#!/usr/bin/python # -*- coding: utf-8 -*- # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # this is a windows documentation stub. actual code lives in the .ps1 # file of the same name DOCUMENTATION = ''' --- module: win_scheduled_task version_added: "2.0" short_description: Manage scheduled tasks description: - Manage scheduled tasks notes: - This module requires Windows Server 2012 or later. options: name: description: - Name of the scheduled task required: true description: description: - The description for the scheduled task required: false enabled: description: - Enable/disable the task choices: - yes - no default: yes state: description: - State that the task should become required: true choices: - present - absent user: description: - User to run scheduled task as required: false execute: description: - Command the scheduled task should execute required: false argument: description: - Arguments to provide scheduled task action required: false frequency: description: - The frequency of the command, not idempotent required: false choices: - daily - weekly time: description: - Time to execute scheduled task, not idempotent required: false days_of_week: description: - Days of the week to run a weekly task, not idempotent required: false path: description: - Folder path of scheduled task default: '\' ''' EXAMPLES = ''' # Create a scheduled task to open a command prompt win_scheduled_task: name="TaskName" execute="cmd" frequency="daily" time="9am" description="open command prompt" path="example" enable=yes state=present user=SYSTEM '''
gpl-3.0
wangjun/pyload
module/lib/thrift/server/TNonblockingServer.py
83
10950
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # """Implementation of non-blocking server. The main idea of the server is reciving and sending requests only from main thread. It also makes thread pool server in tasks terms, not connections. """ import threading import socket import Queue import select import struct import logging from thrift.transport import TTransport from thrift.protocol.TBinaryProtocol import TBinaryProtocolFactory __all__ = ['TNonblockingServer'] class Worker(threading.Thread): """Worker is a small helper to process incoming connection.""" def __init__(self, queue): threading.Thread.__init__(self) self.queue = queue def run(self): """Process queries from task queue, stop if processor is None.""" while True: try: processor, iprot, oprot, otrans, callback = self.queue.get() if processor is None: break processor.process(iprot, oprot) callback(True, otrans.getvalue()) except Exception: logging.exception("Exception while processing request") callback(False, '') WAIT_LEN = 0 WAIT_MESSAGE = 1 WAIT_PROCESS = 2 SEND_ANSWER = 3 CLOSED = 4 def locked(func): "Decorator which locks self.lock." def nested(self, *args, **kwargs): self.lock.acquire() try: return func(self, *args, **kwargs) finally: self.lock.release() return nested def socket_exception(func): "Decorator close object on socket.error." def read(self, *args, **kwargs): try: return func(self, *args, **kwargs) except socket.error: self.close() return read class Connection: """Basic class is represented connection. It can be in state: WAIT_LEN --- connection is reading request len. WAIT_MESSAGE --- connection is reading request. WAIT_PROCESS --- connection has just read whole request and waits for call ready routine. SEND_ANSWER --- connection is sending answer string (including length of answer). CLOSED --- socket was closed and connection should be deleted. """ def __init__(self, new_socket, wake_up): self.socket = new_socket self.socket.setblocking(False) self.status = WAIT_LEN self.len = 0 self.message = '' self.lock = threading.Lock() self.wake_up = wake_up def _read_len(self): """Reads length of request. It's really paranoic routine and it may be replaced by self.socket.recv(4).""" read = self.socket.recv(4 - len(self.message)) if len(read) == 0: # if we read 0 bytes and self.message is empty, it means client close # connection if len(self.message) != 0: logging.error("can't read frame size from socket") self.close() return self.message += read if len(self.message) == 4: self.len, = struct.unpack('!i', self.message) if self.len < 0: logging.error("negative frame size, it seems client"\ " doesn't use FramedTransport") self.close() elif self.len == 0: logging.error("empty frame, it's really strange") self.close() else: self.message = '' self.status = WAIT_MESSAGE @socket_exception def read(self): """Reads data from stream and switch state.""" assert self.status in (WAIT_LEN, WAIT_MESSAGE) if self.status == WAIT_LEN: self._read_len() # go back to the main loop here for simplicity instead of # falling through, even though there is a good chance that # the message is already available elif self.status == WAIT_MESSAGE: read = self.socket.recv(self.len - len(self.message)) if len(read) == 0: logging.error("can't read frame from socket (get %d of %d bytes)" % (len(self.message), self.len)) self.close() return self.message += read if len(self.message) == self.len: self.status = WAIT_PROCESS @socket_exception def write(self): """Writes data from socket and switch state.""" assert self.status == SEND_ANSWER sent = self.socket.send(self.message) if sent == len(self.message): self.status = WAIT_LEN self.message = '' self.len = 0 else: self.message = self.message[sent:] @locked def ready(self, all_ok, message): """Callback function for switching state and waking up main thread. This function is the only function witch can be called asynchronous. The ready can switch Connection to three states: WAIT_LEN if request was oneway. SEND_ANSWER if request was processed in normal way. CLOSED if request throws unexpected exception. The one wakes up main thread. """ assert self.status == WAIT_PROCESS if not all_ok: self.close() self.wake_up() return self.len = '' if len(message) == 0: # it was a oneway request, do not write answer self.message = '' self.status = WAIT_LEN else: self.message = struct.pack('!i', len(message)) + message self.status = SEND_ANSWER self.wake_up() @locked def is_writeable(self): "Returns True if connection should be added to write list of select." return self.status == SEND_ANSWER # it's not necessary, but... @locked def is_readable(self): "Returns True if connection should be added to read list of select." return self.status in (WAIT_LEN, WAIT_MESSAGE) @locked def is_closed(self): "Returns True if connection is closed." return self.status == CLOSED def fileno(self): "Returns the file descriptor of the associated socket." return self.socket.fileno() def close(self): "Closes connection" self.status = CLOSED self.socket.close() class TNonblockingServer: """Non-blocking server.""" def __init__(self, processor, lsocket, inputProtocolFactory=None, outputProtocolFactory=None, threads=10): self.processor = processor self.socket = lsocket self.in_protocol = inputProtocolFactory or TBinaryProtocolFactory() self.out_protocol = outputProtocolFactory or self.in_protocol self.threads = int(threads) self.clients = {} self.tasks = Queue.Queue() self._read, self._write = socket.socketpair() self.prepared = False def setNumThreads(self, num): """Set the number of worker threads that should be created.""" # implement ThreadPool interface assert not self.prepared, "You can't change number of threads for working server" self.threads = num def prepare(self): """Prepares server for serve requests.""" self.socket.listen() for _ in xrange(self.threads): thread = Worker(self.tasks) thread.setDaemon(True) thread.start() self.prepared = True def wake_up(self): """Wake up main thread. The server usualy waits in select call in we should terminate one. The simplest way is using socketpair. Select always wait to read from the first socket of socketpair. In this case, we can just write anything to the second socket from socketpair.""" self._write.send('1') def _select(self): """Does select on open connections.""" readable = [self.socket.handle.fileno(), self._read.fileno()] writable = [] for i, connection in self.clients.items(): if connection.is_readable(): readable.append(connection.fileno()) if connection.is_writeable(): writable.append(connection.fileno()) if connection.is_closed(): del self.clients[i] return select.select(readable, writable, readable) def handle(self): """Handle requests. WARNING! You must call prepare BEFORE calling handle. """ assert self.prepared, "You have to call prepare before handle" rset, wset, xset = self._select() for readable in rset: if readable == self._read.fileno(): # don't care i just need to clean readable flag self._read.recv(1024) elif readable == self.socket.handle.fileno(): client = self.socket.accept().handle self.clients[client.fileno()] = Connection(client, self.wake_up) else: connection = self.clients[readable] connection.read() if connection.status == WAIT_PROCESS: itransport = TTransport.TMemoryBuffer(connection.message) otransport = TTransport.TMemoryBuffer() iprot = self.in_protocol.getProtocol(itransport) oprot = self.out_protocol.getProtocol(otransport) self.tasks.put([self.processor, iprot, oprot, otransport, connection.ready]) for writeable in wset: self.clients[writeable].write() for oob in xset: self.clients[oob].close() del self.clients[oob] def close(self): """Closes the server.""" for _ in xrange(self.threads): self.tasks.put([None, None, None, None, None]) self.socket.close() self.prepared = False def serve(self): """Serve forever.""" self.prepare() while True: self.handle()
gpl-3.0
vineethguna/heroku-buildpack-libsandbox
vendor/distribute-0.6.34/setuptools/command/build_py.py
147
10457
import os.path, sys, fnmatch from distutils.command.build_py import build_py as _build_py from distutils.util import convert_path from glob import glob try: from distutils.util import Mixin2to3 as _Mixin2to3 # add support for converting doctests that is missing in 3.1 distutils from distutils import log from lib2to3.refactor import RefactoringTool, get_fixers_from_package import setuptools class DistutilsRefactoringTool(RefactoringTool): def log_error(self, msg, *args, **kw): log.error(msg, *args) def log_message(self, msg, *args): log.info(msg, *args) def log_debug(self, msg, *args): log.debug(msg, *args) class Mixin2to3(_Mixin2to3): def run_2to3(self, files, doctests = False): # See of the distribution option has been set, otherwise check the # setuptools default. if self.distribution.use_2to3 is not True: return if not files: return log.info("Fixing "+" ".join(files)) self.__build_fixer_names() self.__exclude_fixers() if doctests: if setuptools.run_2to3_on_doctests: r = DistutilsRefactoringTool(self.fixer_names) r.refactor(files, write=True, doctests_only=True) else: _Mixin2to3.run_2to3(self, files) def __build_fixer_names(self): if self.fixer_names: return self.fixer_names = [] for p in setuptools.lib2to3_fixer_packages: self.fixer_names.extend(get_fixers_from_package(p)) if self.distribution.use_2to3_fixers is not None: for p in self.distribution.use_2to3_fixers: self.fixer_names.extend(get_fixers_from_package(p)) def __exclude_fixers(self): excluded_fixers = getattr(self, 'exclude_fixers', []) if self.distribution.use_2to3_exclude_fixers is not None: excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers) for fixer_name in excluded_fixers: if fixer_name in self.fixer_names: self.fixer_names.remove(fixer_name) except ImportError: class Mixin2to3: def run_2to3(self, files, doctests=True): # Nothing done in 2.x pass class build_py(_build_py, Mixin2to3): """Enhanced 'build_py' command that includes data files with packages The data files are specified via a 'package_data' argument to 'setup()'. See 'setuptools.dist.Distribution' for more details. Also, this version of the 'build_py' command allows you to specify both 'py_modules' and 'packages' in the same setup operation. """ def finalize_options(self): _build_py.finalize_options(self) self.package_data = self.distribution.package_data self.exclude_package_data = self.distribution.exclude_package_data or {} if 'data_files' in self.__dict__: del self.__dict__['data_files'] self.__updated_files = [] self.__doctests_2to3 = [] def run(self): """Build modules, packages, and copy data files to build directory""" if not self.py_modules and not self.packages: return if self.py_modules: self.build_modules() if self.packages: self.build_packages() self.build_package_data() self.run_2to3(self.__updated_files, False) self.run_2to3(self.__updated_files, True) self.run_2to3(self.__doctests_2to3, True) # Only compile actual .py files, using our base class' idea of what our # output files are. self.byte_compile(_build_py.get_outputs(self, include_bytecode=0)) def __getattr__(self,attr): if attr=='data_files': # lazily compute data files self.data_files = files = self._get_data_files(); return files return _build_py.__getattr__(self,attr) def build_module(self, module, module_file, package): outfile, copied = _build_py.build_module(self, module, module_file, package) if copied: self.__updated_files.append(outfile) return outfile, copied def _get_data_files(self): """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" self.analyze_manifest() data = [] for package in self.packages or (): # Locate package source directory src_dir = self.get_package_dir(package) # Compute package build directory build_dir = os.path.join(*([self.build_lib] + package.split('.'))) # Length of path to strip from found files plen = len(src_dir)+1 # Strip directory from globbed filenames filenames = [ file[plen:] for file in self.find_data_files(package, src_dir) ] data.append( (package, src_dir, build_dir, filenames) ) return data def find_data_files(self, package, src_dir): """Return filenames for package's data files in 'src_dir'""" globs = (self.package_data.get('', []) + self.package_data.get(package, [])) files = self.manifest_files.get(package, [])[:] for pattern in globs: # Each pattern has to be converted to a platform-specific path files.extend(glob(os.path.join(src_dir, convert_path(pattern)))) return self.exclude_data_files(package, src_dir, files) def build_package_data(self): """Copy data files into build directory""" lastdir = None for package, src_dir, build_dir, filenames in self.data_files: for filename in filenames: target = os.path.join(build_dir, filename) self.mkpath(os.path.dirname(target)) srcfile = os.path.join(src_dir, filename) outf, copied = self.copy_file(srcfile, target) srcfile = os.path.abspath(srcfile) if copied and srcfile in self.distribution.convert_2to3_doctests: self.__doctests_2to3.append(outf) def analyze_manifest(self): self.manifest_files = mf = {} if not self.distribution.include_package_data: return src_dirs = {} for package in self.packages or (): # Locate package source directory src_dirs[assert_relative(self.get_package_dir(package))] = package self.run_command('egg_info') ei_cmd = self.get_finalized_command('egg_info') for path in ei_cmd.filelist.files: d,f = os.path.split(assert_relative(path)) prev = None oldf = f while d and d!=prev and d not in src_dirs: prev = d d, df = os.path.split(d) f = os.path.join(df, f) if d in src_dirs: if path.endswith('.py') and f==oldf: continue # it's a module, not data mf.setdefault(src_dirs[d],[]).append(path) def get_data_files(self): pass # kludge 2.4 for lazy computation if sys.version<"2.4": # Python 2.4 already has this code def get_outputs(self, include_bytecode=1): """Return complete list of files copied to the build directory This includes both '.py' files and data files, as well as '.pyc' and '.pyo' files if 'include_bytecode' is true. (This method is needed for the 'install_lib' command to do its job properly, and to generate a correct installation manifest.) """ return _build_py.get_outputs(self, include_bytecode) + [ os.path.join(build_dir, filename) for package, src_dir, build_dir,filenames in self.data_files for filename in filenames ] def check_package(self, package, package_dir): """Check namespace packages' __init__ for declare_namespace""" try: return self.packages_checked[package] except KeyError: pass init_py = _build_py.check_package(self, package, package_dir) self.packages_checked[package] = init_py if not init_py or not self.distribution.namespace_packages: return init_py for pkg in self.distribution.namespace_packages: if pkg==package or pkg.startswith(package+'.'): break else: return init_py f = open(init_py,'rbU') if 'declare_namespace'.encode() not in f.read(): from distutils import log log.warn( "WARNING: %s is a namespace package, but its __init__.py does\n" "not declare_namespace(); setuptools 0.7 will REQUIRE this!\n" '(See the setuptools manual under "Namespace Packages" for ' "details.)\n", package ) f.close() return init_py def initialize_options(self): self.packages_checked={} _build_py.initialize_options(self) def get_package_dir(self, package): res = _build_py.get_package_dir(self, package) if self.distribution.src_root is not None: return os.path.join(self.distribution.src_root, res) return res def exclude_data_files(self, package, src_dir, files): """Filter filenames for package's data files in 'src_dir'""" globs = (self.exclude_package_data.get('', []) + self.exclude_package_data.get(package, [])) bad = [] for pattern in globs: bad.extend( fnmatch.filter( files, os.path.join(src_dir, convert_path(pattern)) ) ) bad = dict.fromkeys(bad) seen = {} return [ f for f in files if f not in bad and f not in seen and seen.setdefault(f,1) # ditch dupes ] def assert_relative(path): if not os.path.isabs(path): return path from distutils.errors import DistutilsSetupError raise DistutilsSetupError( """Error: setup script specifies an absolute path: %s setup() arguments must *always* be /-separated paths relative to the setup.py directory, *never* absolute paths. """ % path )
mit
AnselCmy/ARPS
report_spider/report_spider/spiders/USTC006.py
3
3094
# -*- coding:utf-8 -*- import sys reload(sys) sys.setdefaultencoding('utf-8') import time import scrapy from Global_function import get_localtime, print_new_number, save_messages now_time = get_localtime(time.strftime("%Y-%m-%d", time.localtime())) # now_time = 20170401 class USTC006_Spider(scrapy.Spider): name = 'USTC006' start_urls = ['http://biox.ustc.edu.cn/xsbg/'] domain = 'http://biox.ustc.edu.cn/xsbg/' counts = 0 def parse(self, response): messages = response.xpath("//ul[@class='list-none metlist']/li") print_new_number(self.counts, 'USTC', self.name) for i in xrange(len(messages)): report_url = self.domain + messages[i].xpath(".//a/@href").extract()[0][2:] report_time = get_localtime(messages[i].xpath(".//span/text()").extract()[0].strip()) if report_time < now_time: return yield scrapy.Request(report_url, callback=self.parse_pages, meta={'link': report_url, 'number': i + 1}) # return def parse_pages(self, response): messages = response.xpath("//div[@class='justify']").xpath(".//p").xpath(".//text()").extract() sign = 0 title, time, address, speaker, person_introduce, content = '', '', '', '', '', '' for message in messages: if u'题目:' in message or u'题目:' in message: title = self.connect_messages(message, ':') if u'题目:' in message else self.connect_messages(message, ':') elif u'时间:' in message or u'时间:' in message: time = self.connect_messages(message, ':') if u'时间:' in message else self.connect_messages(message, ':') elif u'地点:' in message or u'地点:' in message: address = self.connect_messages(message, ':') if u'地点:' in message else self.connect_messages(message, ':') elif u'报告人:' in message or u'报告人:' in message: speaker = self.connect_messages(message, ':') if u'报告人:' in message else self.connect_messages(message, ':') elif u'摘要:' in message or u'摘要:' in message: sign = 1 content = self.connect_messages(message, ':') if u'摘要:' in message else self.connect_messages(message, ':') elif u'简介:' in message or u'简介:' in message: sign = 2 person_introduce = self.connect_messages(message, ':') if u'简介:' in message else self.connect_messages(message, ':') else: if u'联系人' in message or u'欢迎老师' in message: continue if sign == 1: content += '\n' + message.strip() elif sign == 2: person_introduce += '\n' + message.strip() else: pass if title != '': self.counts += 1 print_new_number(self.counts, 'USTC', self.name) all_messages = save_messages('USTC', self.name, title, time, address, speaker, person_introduce, content, '', response.meta['link'], response.meta['number'], u'中国科学技术大学', u'生命科学学院') return all_messages def connect_messages(self, messages, sign): text = '' message = messages.split(sign)[1:] for i in xrange(len(message)): if i > 0: text += ':' text += message[i].strip() return text
mit
jackjennings/Mechanic
src/lib/mechanic/github/request.py
4
1993
import requests from mechanic import logger class GithubRequest(object): __cache = {} def __init__(self, url): self.url = url def get(self): logger.debug('Requesting {}'.format(self.url)) return self.cache_response(self.url, self.get_with_etag_cache(self.url)) def get_with_etag_cache(self, url): headers = {} cached_response = self.cache.get(url, None) if cached_response is not None: etag = self.get_etag(cached_response) headers['If-None-Match'] = etag logger.debug('Headers: %s', headers) response = requests.get(url, headers=headers, auth=NullAuth()) self.log_header(response, 'x-ratelimit-limit') self.log_header(response, 'x-ratelimit-remaining') if response.status_code == 304: logger.info('Using cached response for {}'.format(self.url)) response = cached_response response.raise_for_status() return response def cache_response(self, url, response): if self.get_etag(response): self.cache[url] = response return response def get_etag(self, response): return response.headers['ETag'] def log_header(self, response, key): if key in response.headers: logger.debug('%s: %s', key, response.headers[key]) @property def cache(self): return self.__class__.__cache class NullAuth(requests.auth.AuthBase): '''force requests to ignore the ``.netrc`` Copied from: https://github.com/kennethreitz/requests/issues/2773#issuecomment-174312831 Some sites do not support regular authentication, but we still want to store credentials in the ``.netrc`` file and submit them as form elements. Without this, requests would otherwise use the .netrc which leads, on some sites, to a 401 error. Use with:: requests.get(url, auth=NullAuth()) ''' def __call__(self, r): return r
mit
neverchanje/parser
tests/lib/gtest-1.7.0/test/gtest_env_var_test.py
2408
3487
#!/usr/bin/env python # # Copyright 2008, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Verifies that Google Test correctly parses environment variables.""" __author__ = 'wan@google.com (Zhanyong Wan)' import os import gtest_test_utils IS_WINDOWS = os.name == 'nt' IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux' COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_env_var_test_') environ = os.environ.copy() def AssertEq(expected, actual): if expected != actual: print 'Expected: %s' % (expected,) print ' Actual: %s' % (actual,) raise AssertionError def SetEnvVar(env_var, value): """Sets the env variable to 'value'; unsets it when 'value' is None.""" if value is not None: environ[env_var] = value elif env_var in environ: del environ[env_var] def GetFlag(flag): """Runs gtest_env_var_test_ and returns its output.""" args = [COMMAND] if flag is not None: args += [flag] return gtest_test_utils.Subprocess(args, env=environ).output def TestFlag(flag, test_val, default_val): """Verifies that the given flag is affected by the corresponding env var.""" env_var = 'GTEST_' + flag.upper() SetEnvVar(env_var, test_val) AssertEq(test_val, GetFlag(flag)) SetEnvVar(env_var, None) AssertEq(default_val, GetFlag(flag)) class GTestEnvVarTest(gtest_test_utils.TestCase): def testEnvVarAffectsFlag(self): """Tests that environment variable should affect the corresponding flag.""" TestFlag('break_on_failure', '1', '0') TestFlag('color', 'yes', 'auto') TestFlag('filter', 'FooTest.Bar', '*') TestFlag('output', 'xml:tmp/foo.xml', '') TestFlag('print_time', '0', '1') TestFlag('repeat', '999', '1') TestFlag('throw_on_failure', '1', '0') TestFlag('death_test_style', 'threadsafe', 'fast') TestFlag('catch_exceptions', '0', '1') if IS_LINUX: TestFlag('death_test_use_fork', '1', '0') TestFlag('stack_trace_depth', '0', '100') if __name__ == '__main__': gtest_test_utils.Main()
mit
paurosello/frappe
frappe/email/doctype/auto_email_report/auto_email_report.py
1
5905
# -*- coding: utf-8 -*- # Copyright (c) 2015, Frappe Technologies and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe, json from frappe import _ from frappe.model.document import Document from datetime import timedelta import frappe.utils from frappe.utils import now, global_date_format, format_time from frappe.utils.xlsxutils import make_xlsx from frappe.utils.csvutils import to_csv max_reports_per_user = 3 class AutoEmailReport(Document): def autoname(self): self.name = _(self.report) def validate(self): self.validate_report_count() self.validate_emails() self.validate_report_format() def validate_emails(self): '''Cleanup list of emails''' if ',' in self.email_to: self.email_to.replace(',', '\n') valid = [] for email in self.email_to.split(): if email: frappe.utils.validate_email_add(email, True) valid.append(email) self.email_to = '\n'.join(valid) def validate_report_count(self): '''check that there are only 3 enabled reports per user''' count = frappe.db.sql('select count(*) from `tabAuto Email Report` where user=%s and enabled=1', self.user)[0][0] if count > max_reports_per_user + (-1 if self.flags.in_insert else 0): frappe.throw(_('Only {0} emailed reports are allowed per user').format(max_reports_per_user)) def validate_report_format(self): """ check if user has select correct report format """ valid_report_formats = ["HTML", "XLSX", "CSV"] if self.format not in valid_report_formats: frappe.throw(_("%s is not a valid report format. Report format should \ one of the following %s"%(frappe.bold(self.format), frappe.bold(", ".join(valid_report_formats))))) def get_report_content(self): '''Returns file in for the report in given format''' report = frappe.get_doc('Report', self.report) if self.report_type=='Report Builder' and self.data_modified_till: self.filters = json.loads(self.filters) if self.filters else {} self.filters['modified'] = ('>', frappe.utils.now_datetime() - timedelta(hours=self.data_modified_till)) columns, data = report.get_data(limit=self.no_of_rows or 100, user = self.user, filters = self.filters, as_dict=True) # add serial numbers columns.insert(0, frappe._dict(fieldname='idx', label='', width='30px')) for i in range(len(data)): data[i]['idx'] = i+1 if len(data)==0 and self.send_if_data: return None if self.format == 'HTML': return self.get_html_table(columns, data) elif self.format == 'XLSX': spreadsheet_data = self.get_spreadsheet_data(columns, data) xlsx_file = make_xlsx(spreadsheet_data, "Auto Email Report") return xlsx_file.getvalue() elif self.format == 'CSV': spreadsheet_data = self.get_spreadsheet_data(columns, data) return to_csv(spreadsheet_data) else: frappe.throw(_('Invalid Output Format')) def get_html_table(self, columns=None, data=None): date_time = global_date_format(now()) + ' ' + format_time(now()) report_doctype = frappe.db.get_value('Report', self.report, 'ref_doctype') return frappe.render_template('frappe/templates/emails/auto_email_report.html', { 'title': self.name, 'description': self.description, 'date_time': date_time, 'columns': columns, 'data': data, 'report_url': frappe.utils.get_url_to_report(self.report, self.report_type, report_doctype), 'report_name': self.report, 'edit_report_settings': frappe.utils.get_link_to_form('Auto Email Report', self.name) }) @staticmethod def get_spreadsheet_data(columns, data): out = [[_(df.label) for df in columns], ] for row in data: new_row = [] out.append(new_row) for df in columns: new_row.append(frappe.format(row[df.fieldname], df, row)) return out def get_file_name(self): return "{0}.{1}".format(self.report.replace(" ", "-").replace("/", "-"), self.format.lower()) def send(self): if self.filter_meta and not self.filters: frappe.throw(_("Please set filters value in Report Filter table.")) data = self.get_report_content() if not data: return attachments = None if self.format == "HTML": message = data else: message = self.get_html_table() if not self.format=='HTML': attachments = [{ 'fname': self.get_file_name(), 'fcontent': data }] frappe.sendmail( recipients = self.email_to.split(), subject = self.name, message = message, attachments = attachments, reference_doctype = self.doctype, reference_name = self.name ) @frappe.whitelist() def download(name): '''Download report locally''' auto_email_report = frappe.get_doc('Auto Email Report', name) auto_email_report.check_permission() data = auto_email_report.get_report_content() if not data: frappe.msgprint(_('No Data')) return frappe.local.response.filecontent = data frappe.local.response.type = "download" frappe.local.response.filename = auto_email_report.get_file_name() @frappe.whitelist() def send_now(name): '''Send Auto Email report now''' auto_email_report = frappe.get_doc('Auto Email Report', name) auto_email_report.check_permission() auto_email_report.send() def send_daily(): '''Check reports to be sent daily''' now = frappe.utils.now_datetime() for report in frappe.get_all('Auto Email Report', {'enabled': 1, 'frequency': ('in', ('Daily', 'Weekly'))}): auto_email_report = frappe.get_doc('Auto Email Report', report.name) # if not correct weekday, skip if auto_email_report.frequency=='Weekly': if now.weekday()!={'Monday':0,'Tuesday':1,'Wednesday':2, 'Thursday':3,'Friday':4,'Saturday':5,'Sunday':6}[auto_email_report.day_of_week]: continue auto_email_report.send() def send_monthly(): '''Check reports to be sent monthly''' for report in frappe.get_all('Auto Email Report', {'enabled': 1, 'frequency': 'Monthly'}): frappe.get_doc('Auto Email Report', report.name).send()
mit
stacywsmith/ansible
lib/ansible/modules/cloud/vmware/vmware_vsan_cluster.py
70
4093
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2015, Russell Teague <rteague2 () csc.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: vmware_vsan_cluster short_description: Configure VSAN clustering on an ESXi host description: - This module can be used to configure VSAN clustering on an ESXi host version_added: 2.0 author: "Russell Teague (@mtnbikenc)" notes: - Tested on vSphere 5.5 requirements: - "python >= 2.6" - PyVmomi options: cluster_uuid: description: - Desired cluster UUID required: False extends_documentation_fragment: vmware.documentation ''' EXAMPLES = ''' # Example command from Ansible Playbook - name: Configure VMware VSAN Cluster hosts: deploy_node gather_facts: False tags: - vsan tasks: - name: Configure VSAN on first host vmware_vsan_cluster: hostname: "{{ groups['esxi'][0] }}" username: "{{ esxi_username }}" password: "{{ site_password }}" register: vsan_cluster - name: Configure VSAN on remaining hosts vmware_vsan_cluster: hostname: "{{ item }}" username: "{{ esxi_username }}" password: "{{ site_password }}" cluster_uuid: "{{ vsan_cluster.cluster_uuid }}" with_items: "{{ groups['esxi'][1:] }}" ''' try: from pyVmomi import vim, vmodl HAS_PYVMOMI = True except ImportError: HAS_PYVMOMI = False def create_vsan_cluster(host_system, new_cluster_uuid): host_config_manager = host_system.configManager vsan_system = host_config_manager.vsanSystem vsan_config = vim.vsan.host.ConfigInfo() vsan_config.enabled = True if new_cluster_uuid is not None: vsan_config.clusterInfo = vim.vsan.host.ConfigInfo.ClusterInfo() vsan_config.clusterInfo.uuid = new_cluster_uuid vsan_config.storageInfo = vim.vsan.host.ConfigInfo.StorageInfo() vsan_config.storageInfo.autoClaimStorage = True task = vsan_system.UpdateVsan_Task(vsan_config) changed, result = wait_for_task(task) host_status = vsan_system.QueryHostStatus() cluster_uuid = host_status.uuid return changed, result, cluster_uuid def main(): argument_spec = vmware_argument_spec() argument_spec.update(dict(cluster_uuid=dict(required=False, type='str'))) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False) if not HAS_PYVMOMI: module.fail_json(msg='pyvmomi is required for this module') new_cluster_uuid = module.params['cluster_uuid'] try: content = connect_to_api(module, False) host = get_all_objs(content, [vim.HostSystem]) if not host: module.fail_json(msg="Unable to locate Physical Host.") host_system = host.keys()[0] changed, result, cluster_uuid = create_vsan_cluster(host_system, new_cluster_uuid) module.exit_json(changed=changed, result=result, cluster_uuid=cluster_uuid) except vmodl.RuntimeFault as runtime_fault: module.fail_json(msg=runtime_fault.msg) except vmodl.MethodFault as method_fault: module.fail_json(msg=method_fault.msg) except Exception as e: module.fail_json(msg=str(e)) from ansible.module_utils.vmware import * from ansible.module_utils.basic import * if __name__ == '__main__': main()
gpl-3.0
bnkr/suds
suds/mx/encoded.py
211
4651
# This program is free software; you can redistribute it and/or modify # it under the terms of the (LGPL) GNU Lesser General Public License as # published by the Free Software Foundation; either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Library Lesser General Public License for more details at # ( http://www.gnu.org/licenses/lgpl.html ). # # You should have received a copy of the GNU Lesser General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. # written by: Jeff Ortel ( jortel@redhat.com ) """ Provides encoded I{marshaller} classes. """ from logging import getLogger from suds import * from suds.mx import * from suds.mx.literal import Literal from suds.mx.typer import Typer from suds.sudsobject import Factory, Object from suds.xsd.query import TypeQuery log = getLogger(__name__) # # Add encoded extensions # aty = The soap (section 5) encoded array type. # Content.extensions.append('aty') class Encoded(Literal): """ A SOAP section (5) encoding marshaller. This marshaller supports rpc/encoded soap styles. """ def start(self, content): # # For soap encoded arrays, the 'aty' (array type) information # is extracted and added to the 'content'. Then, the content.value # is replaced with an object containing an 'item=[]' attribute # containing values that are 'typed' suds objects. # start = Literal.start(self, content) if start and isinstance(content.value, (list,tuple)): resolved = content.type.resolve() for c in resolved: if hasattr(c[0], 'aty'): content.aty = (content.tag, c[0].aty) self.cast(content) break return start def end(self, parent, content): # # For soap encoded arrays, the soapenc:arrayType attribute is # added with proper type and size information. # Eg: soapenc:arrayType="xs:int[3]" # Literal.end(self, parent, content) if content.aty is None: return tag, aty = content.aty ns0 = ('at0', aty[1]) ns1 = ('at1', 'http://schemas.xmlsoap.org/soap/encoding/') array = content.value.item child = parent.getChild(tag) child.addPrefix(ns0[0], ns0[1]) child.addPrefix(ns1[0], ns1[1]) name = '%s:arrayType' % ns1[0] value = '%s:%s[%d]' % (ns0[0], aty[0], len(array)) child.set(name, value) def encode(self, node, content): if content.type.any(): Typer.auto(node, content.value) return if content.real.any(): Typer.auto(node, content.value) return ns = None name = content.real.name if self.xstq: ns = content.real.namespace() Typer.manual(node, name, ns) def cast(self, content): """ Cast the I{untyped} list items found in content I{value}. Each items contained in the list is checked for XSD type information. Items (values) that are I{untyped}, are replaced with suds objects and type I{metadata} is added. @param content: The content holding the collection. @type content: L{Content} @return: self @rtype: L{Encoded} """ aty = content.aty[1] resolved = content.type.resolve() array = Factory.object(resolved.name) array.item = [] query = TypeQuery(aty) ref = query.execute(self.schema) if ref is None: raise TypeNotFound(qref) for x in content.value: if isinstance(x, (list, tuple)): array.item.append(x) continue if isinstance(x, Object): md = x.__metadata__ md.sxtype = ref array.item.append(x) continue if isinstance(x, dict): x = Factory.object(ref.name, x) md = x.__metadata__ md.sxtype = ref array.item.append(x) continue x = Factory.property(ref.name, x) md = x.__metadata__ md.sxtype = ref array.item.append(x) content.value = array return self
lgpl-3.0
mancoast/CPythonPyc_test
fail/322_test_fractions.py
51
24261
"""Tests for Lib/fractions.py.""" from decimal import Decimal from test.support import run_unittest, requires_IEEE_754 import math import numbers import operator import fractions import unittest from copy import copy, deepcopy from pickle import dumps, loads F = fractions.Fraction gcd = fractions.gcd class DummyFloat(object): """Dummy float class for testing comparisons with Fractions""" def __init__(self, value): if not isinstance(value, float): raise TypeError("DummyFloat can only be initialized from float") self.value = value def _richcmp(self, other, op): if isinstance(other, numbers.Rational): return op(F.from_float(self.value), other) elif isinstance(other, DummyFloat): return op(self.value, other.value) else: return NotImplemented def __eq__(self, other): return self._richcmp(other, operator.eq) def __le__(self, other): return self._richcmp(other, operator.le) def __lt__(self, other): return self._richcmp(other, operator.lt) def __ge__(self, other): return self._richcmp(other, operator.ge) def __gt__(self, other): return self._richcmp(other, operator.gt) # shouldn't be calling __float__ at all when doing comparisons def __float__(self): assert False, "__float__ should not be invoked for comparisons" # same goes for subtraction def __sub__(self, other): assert False, "__sub__ should not be invoked for comparisons" __rsub__ = __sub__ class DummyRational(object): """Test comparison of Fraction with a naive rational implementation.""" def __init__(self, num, den): g = gcd(num, den) self.num = num // g self.den = den // g def __eq__(self, other): if isinstance(other, fractions.Fraction): return (self.num == other._numerator and self.den == other._denominator) else: return NotImplemented def __lt__(self, other): return(self.num * other._denominator < self.den * other._numerator) def __gt__(self, other): return(self.num * other._denominator > self.den * other._numerator) def __le__(self, other): return(self.num * other._denominator <= self.den * other._numerator) def __ge__(self, other): return(self.num * other._denominator >= self.den * other._numerator) # this class is for testing comparisons; conversion to float # should never be used for a comparison, since it loses accuracy def __float__(self): assert False, "__float__ should not be invoked" class GcdTest(unittest.TestCase): def testMisc(self): self.assertEqual(0, gcd(0, 0)) self.assertEqual(1, gcd(1, 0)) self.assertEqual(-1, gcd(-1, 0)) self.assertEqual(1, gcd(0, 1)) self.assertEqual(-1, gcd(0, -1)) self.assertEqual(1, gcd(7, 1)) self.assertEqual(-1, gcd(7, -1)) self.assertEqual(1, gcd(-23, 15)) self.assertEqual(12, gcd(120, 84)) self.assertEqual(-12, gcd(84, -120)) def _components(r): return (r.numerator, r.denominator) class FractionTest(unittest.TestCase): def assertTypedEquals(self, expected, actual): """Asserts that both the types and values are the same.""" self.assertEqual(type(expected), type(actual)) self.assertEqual(expected, actual) def assertRaisesMessage(self, exc_type, message, callable, *args, **kwargs): """Asserts that callable(*args, **kwargs) raises exc_type(message).""" try: callable(*args, **kwargs) except exc_type as e: self.assertEqual(message, str(e)) else: self.fail("%s not raised" % exc_type.__name__) def testInit(self): self.assertEqual((0, 1), _components(F())) self.assertEqual((7, 1), _components(F(7))) self.assertEqual((7, 3), _components(F(F(7, 3)))) self.assertEqual((-1, 1), _components(F(-1, 1))) self.assertEqual((-1, 1), _components(F(1, -1))) self.assertEqual((1, 1), _components(F(-2, -2))) self.assertEqual((1, 2), _components(F(5, 10))) self.assertEqual((7, 15), _components(F(7, 15))) self.assertEqual((10**23, 1), _components(F(10**23))) self.assertEqual((3, 77), _components(F(F(3, 7), 11))) self.assertEqual((-9, 5), _components(F(2, F(-10, 9)))) self.assertEqual((2486, 2485), _components(F(F(22, 7), F(355, 113)))) self.assertRaisesMessage(ZeroDivisionError, "Fraction(12, 0)", F, 12, 0) self.assertRaises(TypeError, F, 1.5 + 3j) self.assertRaises(TypeError, F, "3/2", 3) self.assertRaises(TypeError, F, 3, 0j) self.assertRaises(TypeError, F, 3, 1j) @requires_IEEE_754 def testInitFromFloat(self): self.assertEqual((5, 2), _components(F(2.5))) self.assertEqual((0, 1), _components(F(-0.0))) self.assertEqual((3602879701896397, 36028797018963968), _components(F(0.1))) self.assertRaises(TypeError, F, float('nan')) self.assertRaises(TypeError, F, float('inf')) self.assertRaises(TypeError, F, float('-inf')) def testInitFromDecimal(self): self.assertEqual((11, 10), _components(F(Decimal('1.1')))) self.assertEqual((7, 200), _components(F(Decimal('3.5e-2')))) self.assertEqual((0, 1), _components(F(Decimal('.000e20')))) self.assertRaises(TypeError, F, Decimal('nan')) self.assertRaises(TypeError, F, Decimal('snan')) self.assertRaises(TypeError, F, Decimal('inf')) self.assertRaises(TypeError, F, Decimal('-inf')) def testFromString(self): self.assertEqual((5, 1), _components(F("5"))) self.assertEqual((3, 2), _components(F("3/2"))) self.assertEqual((3, 2), _components(F(" \n +3/2"))) self.assertEqual((-3, 2), _components(F("-3/2 "))) self.assertEqual((13, 2), _components(F(" 013/02 \n "))) self.assertEqual((16, 5), _components(F(" 3.2 "))) self.assertEqual((-16, 5), _components(F(" -3.2 "))) self.assertEqual((-3, 1), _components(F(" -3. "))) self.assertEqual((3, 5), _components(F(" .6 "))) self.assertEqual((1, 3125), _components(F("32.e-5"))) self.assertEqual((1000000, 1), _components(F("1E+06"))) self.assertEqual((-12300, 1), _components(F("-1.23e4"))) self.assertEqual((0, 1), _components(F(" .0e+0\t"))) self.assertEqual((0, 1), _components(F("-0.000e0"))) self.assertRaisesMessage( ZeroDivisionError, "Fraction(3, 0)", F, "3/0") self.assertRaisesMessage( ValueError, "Invalid literal for Fraction: '3/'", F, "3/") self.assertRaisesMessage( ValueError, "Invalid literal for Fraction: '/2'", F, "/2") self.assertRaisesMessage( ValueError, "Invalid literal for Fraction: '3 /2'", F, "3 /2") self.assertRaisesMessage( # Denominators don't need a sign. ValueError, "Invalid literal for Fraction: '3/+2'", F, "3/+2") self.assertRaisesMessage( # Imitate float's parsing. ValueError, "Invalid literal for Fraction: '+ 3/2'", F, "+ 3/2") self.assertRaisesMessage( # Avoid treating '.' as a regex special character. ValueError, "Invalid literal for Fraction: '3a2'", F, "3a2") self.assertRaisesMessage( # Don't accept combinations of decimals and rationals. ValueError, "Invalid literal for Fraction: '3/7.2'", F, "3/7.2") self.assertRaisesMessage( # Don't accept combinations of decimals and rationals. ValueError, "Invalid literal for Fraction: '3.2/7'", F, "3.2/7") self.assertRaisesMessage( # Allow 3. and .3, but not . ValueError, "Invalid literal for Fraction: '.'", F, ".") def testImmutable(self): r = F(7, 3) r.__init__(2, 15) self.assertEqual((7, 3), _components(r)) self.assertRaises(AttributeError, setattr, r, 'numerator', 12) self.assertRaises(AttributeError, setattr, r, 'denominator', 6) self.assertEqual((7, 3), _components(r)) # But if you _really_ need to: r._numerator = 4 r._denominator = 2 self.assertEqual((4, 2), _components(r)) # Which breaks some important operations: self.assertNotEqual(F(4, 2), r) def testFromFloat(self): self.assertRaises(TypeError, F.from_float, 3+4j) self.assertEqual((10, 1), _components(F.from_float(10))) bigint = 1234567890123456789 self.assertEqual((bigint, 1), _components(F.from_float(bigint))) self.assertEqual((0, 1), _components(F.from_float(-0.0))) self.assertEqual((10, 1), _components(F.from_float(10.0))) self.assertEqual((-5, 2), _components(F.from_float(-2.5))) self.assertEqual((99999999999999991611392, 1), _components(F.from_float(1e23))) self.assertEqual(float(10**23), float(F.from_float(1e23))) self.assertEqual((3602879701896397, 1125899906842624), _components(F.from_float(3.2))) self.assertEqual(3.2, float(F.from_float(3.2))) inf = 1e1000 nan = inf - inf self.assertRaisesMessage( TypeError, "Cannot convert inf to Fraction.", F.from_float, inf) self.assertRaisesMessage( TypeError, "Cannot convert -inf to Fraction.", F.from_float, -inf) self.assertRaisesMessage( TypeError, "Cannot convert nan to Fraction.", F.from_float, nan) def testFromDecimal(self): self.assertRaises(TypeError, F.from_decimal, 3+4j) self.assertEqual(F(10, 1), F.from_decimal(10)) self.assertEqual(F(0), F.from_decimal(Decimal("-0"))) self.assertEqual(F(5, 10), F.from_decimal(Decimal("0.5"))) self.assertEqual(F(5, 1000), F.from_decimal(Decimal("5e-3"))) self.assertEqual(F(5000), F.from_decimal(Decimal("5e3"))) self.assertEqual(1 - F(1, 10**30), F.from_decimal(Decimal("0." + "9" * 30))) self.assertRaisesMessage( TypeError, "Cannot convert Infinity to Fraction.", F.from_decimal, Decimal("inf")) self.assertRaisesMessage( TypeError, "Cannot convert -Infinity to Fraction.", F.from_decimal, Decimal("-inf")) self.assertRaisesMessage( TypeError, "Cannot convert NaN to Fraction.", F.from_decimal, Decimal("nan")) self.assertRaisesMessage( TypeError, "Cannot convert sNaN to Fraction.", F.from_decimal, Decimal("snan")) def testLimitDenominator(self): rpi = F('3.1415926535897932') self.assertEqual(rpi.limit_denominator(10000), F(355, 113)) self.assertEqual(-rpi.limit_denominator(10000), F(-355, 113)) self.assertEqual(rpi.limit_denominator(113), F(355, 113)) self.assertEqual(rpi.limit_denominator(112), F(333, 106)) self.assertEqual(F(201, 200).limit_denominator(100), F(1)) self.assertEqual(F(201, 200).limit_denominator(101), F(102, 101)) self.assertEqual(F(0).limit_denominator(10000), F(0)) def testConversions(self): self.assertTypedEquals(-1, math.trunc(F(-11, 10))) self.assertTypedEquals(-2, math.floor(F(-11, 10))) self.assertTypedEquals(-1, math.ceil(F(-11, 10))) self.assertTypedEquals(-1, math.ceil(F(-10, 10))) self.assertTypedEquals(-1, int(F(-11, 10))) self.assertTypedEquals(0, round(F(-1, 10))) self.assertTypedEquals(0, round(F(-5, 10))) self.assertTypedEquals(-2, round(F(-15, 10))) self.assertTypedEquals(-1, round(F(-7, 10))) self.assertEqual(False, bool(F(0, 1))) self.assertEqual(True, bool(F(3, 2))) self.assertTypedEquals(0.1, float(F(1, 10))) # Check that __float__ isn't implemented by converting the # numerator and denominator to float before dividing. self.assertRaises(OverflowError, float, int('2'*400+'7')) self.assertAlmostEqual(2.0/3, float(F(int('2'*400+'7'), int('3'*400+'1')))) self.assertTypedEquals(0.1+0j, complex(F(1,10))) def testRound(self): self.assertTypedEquals(F(-200), round(F(-150), -2)) self.assertTypedEquals(F(-200), round(F(-250), -2)) self.assertTypedEquals(F(30), round(F(26), -1)) self.assertTypedEquals(F(-2, 10), round(F(-15, 100), 1)) self.assertTypedEquals(F(-2, 10), round(F(-25, 100), 1)) def testArithmetic(self): self.assertEqual(F(1, 2), F(1, 10) + F(2, 5)) self.assertEqual(F(-3, 10), F(1, 10) - F(2, 5)) self.assertEqual(F(1, 25), F(1, 10) * F(2, 5)) self.assertEqual(F(1, 4), F(1, 10) / F(2, 5)) self.assertTypedEquals(2, F(9, 10) // F(2, 5)) self.assertTypedEquals(10**23, F(10**23, 1) // F(1)) self.assertEqual(F(2, 3), F(-7, 3) % F(3, 2)) self.assertEqual(F(8, 27), F(2, 3) ** F(3)) self.assertEqual(F(27, 8), F(2, 3) ** F(-3)) self.assertTypedEquals(2.0, F(4) ** F(1, 2)) z = pow(F(-1), F(1, 2)) self.assertAlmostEqual(z.real, 0) self.assertEqual(z.imag, 1) def testMixedArithmetic(self): self.assertTypedEquals(F(11, 10), F(1, 10) + 1) self.assertTypedEquals(1.1, F(1, 10) + 1.0) self.assertTypedEquals(1.1 + 0j, F(1, 10) + (1.0 + 0j)) self.assertTypedEquals(F(11, 10), 1 + F(1, 10)) self.assertTypedEquals(1.1, 1.0 + F(1, 10)) self.assertTypedEquals(1.1 + 0j, (1.0 + 0j) + F(1, 10)) self.assertTypedEquals(F(-9, 10), F(1, 10) - 1) self.assertTypedEquals(-0.9, F(1, 10) - 1.0) self.assertTypedEquals(-0.9 + 0j, F(1, 10) - (1.0 + 0j)) self.assertTypedEquals(F(9, 10), 1 - F(1, 10)) self.assertTypedEquals(0.9, 1.0 - F(1, 10)) self.assertTypedEquals(0.9 + 0j, (1.0 + 0j) - F(1, 10)) self.assertTypedEquals(F(1, 10), F(1, 10) * 1) self.assertTypedEquals(0.1, F(1, 10) * 1.0) self.assertTypedEquals(0.1 + 0j, F(1, 10) * (1.0 + 0j)) self.assertTypedEquals(F(1, 10), 1 * F(1, 10)) self.assertTypedEquals(0.1, 1.0 * F(1, 10)) self.assertTypedEquals(0.1 + 0j, (1.0 + 0j) * F(1, 10)) self.assertTypedEquals(F(1, 10), F(1, 10) / 1) self.assertTypedEquals(0.1, F(1, 10) / 1.0) self.assertTypedEquals(0.1 + 0j, F(1, 10) / (1.0 + 0j)) self.assertTypedEquals(F(10, 1), 1 / F(1, 10)) self.assertTypedEquals(10.0, 1.0 / F(1, 10)) self.assertTypedEquals(10.0 + 0j, (1.0 + 0j) / F(1, 10)) self.assertTypedEquals(0, F(1, 10) // 1) self.assertTypedEquals(0, F(1, 10) // 1.0) self.assertTypedEquals(10, 1 // F(1, 10)) self.assertTypedEquals(10**23, 10**22 // F(1, 10)) self.assertTypedEquals(10, 1.0 // F(1, 10)) self.assertTypedEquals(F(1, 10), F(1, 10) % 1) self.assertTypedEquals(0.1, F(1, 10) % 1.0) self.assertTypedEquals(F(0, 1), 1 % F(1, 10)) self.assertTypedEquals(0.0, 1.0 % F(1, 10)) # No need for divmod since we don't override it. # ** has more interesting conversion rules. self.assertTypedEquals(F(100, 1), F(1, 10) ** -2) self.assertTypedEquals(F(100, 1), F(10, 1) ** 2) self.assertTypedEquals(0.1, F(1, 10) ** 1.0) self.assertTypedEquals(0.1 + 0j, F(1, 10) ** (1.0 + 0j)) self.assertTypedEquals(4 , 2 ** F(2, 1)) z = pow(-1, F(1, 2)) self.assertAlmostEqual(0, z.real) self.assertEqual(1, z.imag) self.assertTypedEquals(F(1, 4) , 2 ** F(-2, 1)) self.assertTypedEquals(2.0 , 4 ** F(1, 2)) self.assertTypedEquals(0.25, 2.0 ** F(-2, 1)) self.assertTypedEquals(1.0 + 0j, (1.0 + 0j) ** F(1, 10)) def testMixingWithDecimal(self): # Decimal refuses mixed arithmetic (but not mixed comparisons) self.assertRaisesMessage( TypeError, "unsupported operand type(s) for +: 'Fraction' and 'Decimal'", operator.add, F(3,11), Decimal('3.1415926')) def testComparisons(self): self.assertTrue(F(1, 2) < F(2, 3)) self.assertFalse(F(1, 2) < F(1, 2)) self.assertTrue(F(1, 2) <= F(2, 3)) self.assertTrue(F(1, 2) <= F(1, 2)) self.assertFalse(F(2, 3) <= F(1, 2)) self.assertTrue(F(1, 2) == F(1, 2)) self.assertFalse(F(1, 2) == F(1, 3)) self.assertFalse(F(1, 2) != F(1, 2)) self.assertTrue(F(1, 2) != F(1, 3)) def testComparisonsDummyRational(self): self.assertTrue(F(1, 2) == DummyRational(1, 2)) self.assertTrue(DummyRational(1, 2) == F(1, 2)) self.assertFalse(F(1, 2) == DummyRational(3, 4)) self.assertFalse(DummyRational(3, 4) == F(1, 2)) self.assertTrue(F(1, 2) < DummyRational(3, 4)) self.assertFalse(F(1, 2) < DummyRational(1, 2)) self.assertFalse(F(1, 2) < DummyRational(1, 7)) self.assertFalse(F(1, 2) > DummyRational(3, 4)) self.assertFalse(F(1, 2) > DummyRational(1, 2)) self.assertTrue(F(1, 2) > DummyRational(1, 7)) self.assertTrue(F(1, 2) <= DummyRational(3, 4)) self.assertTrue(F(1, 2) <= DummyRational(1, 2)) self.assertFalse(F(1, 2) <= DummyRational(1, 7)) self.assertFalse(F(1, 2) >= DummyRational(3, 4)) self.assertTrue(F(1, 2) >= DummyRational(1, 2)) self.assertTrue(F(1, 2) >= DummyRational(1, 7)) self.assertTrue(DummyRational(1, 2) < F(3, 4)) self.assertFalse(DummyRational(1, 2) < F(1, 2)) self.assertFalse(DummyRational(1, 2) < F(1, 7)) self.assertFalse(DummyRational(1, 2) > F(3, 4)) self.assertFalse(DummyRational(1, 2) > F(1, 2)) self.assertTrue(DummyRational(1, 2) > F(1, 7)) self.assertTrue(DummyRational(1, 2) <= F(3, 4)) self.assertTrue(DummyRational(1, 2) <= F(1, 2)) self.assertFalse(DummyRational(1, 2) <= F(1, 7)) self.assertFalse(DummyRational(1, 2) >= F(3, 4)) self.assertTrue(DummyRational(1, 2) >= F(1, 2)) self.assertTrue(DummyRational(1, 2) >= F(1, 7)) def testComparisonsDummyFloat(self): x = DummyFloat(1./3.) y = F(1, 3) self.assertTrue(x != y) self.assertTrue(x < y or x > y) self.assertFalse(x == y) self.assertFalse(x <= y and x >= y) self.assertTrue(y != x) self.assertTrue(y < x or y > x) self.assertFalse(y == x) self.assertFalse(y <= x and y >= x) def testMixedLess(self): self.assertTrue(2 < F(5, 2)) self.assertFalse(2 < F(4, 2)) self.assertTrue(F(5, 2) < 3) self.assertFalse(F(4, 2) < 2) self.assertTrue(F(1, 2) < 0.6) self.assertFalse(F(1, 2) < 0.4) self.assertTrue(0.4 < F(1, 2)) self.assertFalse(0.5 < F(1, 2)) self.assertFalse(float('inf') < F(1, 2)) self.assertTrue(float('-inf') < F(0, 10)) self.assertFalse(float('nan') < F(-3, 7)) self.assertTrue(F(1, 2) < float('inf')) self.assertFalse(F(17, 12) < float('-inf')) self.assertFalse(F(144, -89) < float('nan')) def testMixedLessEqual(self): self.assertTrue(0.5 <= F(1, 2)) self.assertFalse(0.6 <= F(1, 2)) self.assertTrue(F(1, 2) <= 0.5) self.assertFalse(F(1, 2) <= 0.4) self.assertTrue(2 <= F(4, 2)) self.assertFalse(2 <= F(3, 2)) self.assertTrue(F(4, 2) <= 2) self.assertFalse(F(5, 2) <= 2) self.assertFalse(float('inf') <= F(1, 2)) self.assertTrue(float('-inf') <= F(0, 10)) self.assertFalse(float('nan') <= F(-3, 7)) self.assertTrue(F(1, 2) <= float('inf')) self.assertFalse(F(17, 12) <= float('-inf')) self.assertFalse(F(144, -89) <= float('nan')) def testBigFloatComparisons(self): # Because 10**23 can't be represented exactly as a float: self.assertFalse(F(10**23) == float(10**23)) # The first test demonstrates why these are important. self.assertFalse(1e23 < float(F(math.trunc(1e23) + 1))) self.assertTrue(1e23 < F(math.trunc(1e23) + 1)) self.assertFalse(1e23 <= F(math.trunc(1e23) - 1)) self.assertTrue(1e23 > F(math.trunc(1e23) - 1)) self.assertFalse(1e23 >= F(math.trunc(1e23) + 1)) def testBigComplexComparisons(self): self.assertFalse(F(10**23) == complex(10**23)) self.assertRaises(TypeError, operator.gt, F(10**23), complex(10**23)) self.assertRaises(TypeError, operator.le, F(10**23), complex(10**23)) x = F(3, 8) z = complex(0.375, 0.0) w = complex(0.375, 0.2) self.assertTrue(x == z) self.assertFalse(x != z) self.assertFalse(x == w) self.assertTrue(x != w) for op in operator.lt, operator.le, operator.gt, operator.ge: self.assertRaises(TypeError, op, x, z) self.assertRaises(TypeError, op, z, x) self.assertRaises(TypeError, op, x, w) self.assertRaises(TypeError, op, w, x) def testMixedEqual(self): self.assertTrue(0.5 == F(1, 2)) self.assertFalse(0.6 == F(1, 2)) self.assertTrue(F(1, 2) == 0.5) self.assertFalse(F(1, 2) == 0.4) self.assertTrue(2 == F(4, 2)) self.assertFalse(2 == F(3, 2)) self.assertTrue(F(4, 2) == 2) self.assertFalse(F(5, 2) == 2) self.assertFalse(F(5, 2) == float('nan')) self.assertFalse(float('nan') == F(3, 7)) self.assertFalse(F(5, 2) == float('inf')) self.assertFalse(float('-inf') == F(2, 5)) def testStringification(self): self.assertEqual("Fraction(7, 3)", repr(F(7, 3))) self.assertEqual("Fraction(6283185307, 2000000000)", repr(F('3.1415926535'))) self.assertEqual("Fraction(-1, 100000000000000000000)", repr(F(1, -10**20))) self.assertEqual("7/3", str(F(7, 3))) self.assertEqual("7", str(F(7, 1))) def testHash(self): self.assertEqual(hash(2.5), hash(F(5, 2))) self.assertEqual(hash(10**50), hash(F(10**50))) self.assertNotEqual(hash(float(10**23)), hash(F(10**23))) # Check that __hash__ produces the same value as hash(), for # consistency with int and Decimal. (See issue #10356.) self.assertEqual(hash(F(-1)), F(-1).__hash__()) def testApproximatePi(self): # Algorithm borrowed from # http://docs.python.org/lib/decimal-recipes.html three = F(3) lasts, t, s, n, na, d, da = 0, three, 3, 1, 0, 0, 24 while abs(s - lasts) > F(1, 10**9): lasts = s n, na = n+na, na+8 d, da = d+da, da+32 t = (t * n) / d s += t self.assertAlmostEqual(math.pi, s) def testApproximateCos1(self): # Algorithm borrowed from # http://docs.python.org/lib/decimal-recipes.html x = F(1) i, lasts, s, fact, num, sign = 0, 0, F(1), 1, 1, 1 while abs(s - lasts) > F(1, 10**9): lasts = s i += 2 fact *= i * (i-1) num *= x * x sign *= -1 s += num / fact * sign self.assertAlmostEqual(math.cos(1), s) def test_copy_deepcopy_pickle(self): r = F(13, 7) self.assertEqual(r, loads(dumps(r))) self.assertEqual(id(r), id(copy(r))) self.assertEqual(id(r), id(deepcopy(r))) def test_slots(self): # Issue 4998 r = F(13, 7) self.assertRaises(AttributeError, setattr, r, 'a', 10) def test_main(): run_unittest(FractionTest, GcdTest) if __name__ == '__main__': test_main()
gpl-3.0
davidbrown3/reinforcement-learning
DP/PolicyIteration.py
1
1776
import numpy as np from PolicyEvaluation import policy_eval def policy_improvement(env, discount_factor=1.0): """ Policy Improvement Algorithm. Iteratively evaluates and improves a policy until an optimal policy is found. Args: env: The OpenAI envrionment. policy_eval_fn: Policy Evaluation function that takes 3 arguments: policy, env, discount_factor. discount_factor: Lambda discount factor. Returns: A tuple (policy, V). policy is the optimal policy, a matrix of shape [S, A] where each state s contains a valid probability distribution over actions. V is the value function for the optimal policy. """ # Start with a random policy Policy = np.ones([env.nS, env.nA]) / env.nA V = policy_eval(Policy, env, theta=0.01) while True: for StateIdx, StateName in enumerate(env.P.keys()): StateInfo = env.P[StateName] ActionValues = np.zeros(env.nA) for ActionIdx, ActionName in enumerate(StateInfo.keys()): # For now assume that all probabilities are 1 ActionInfo = StateInfo[ActionName][0] Reward = ActionInfo[2] NextState = ActionInfo[1] NextStateValue = V[NextState] ActionValues[ActionIdx] = Reward + discount_factor*NextStateValue MaxValueIdx = np.argmax(ActionValues) Policy[StateIdx,:] = 0 Policy[StateIdx,MaxValueIdx] = 1 VNew = policy_eval(Policy, env, theta=0.01) if np.all(VNew==V): V = VNew break else: V = VNew return Policy, V
mit
tszym/ansible
lib/ansible/modules/cloud/vmware/vsphere_guest.py
8
71539
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: vsphere_guest short_description: Create/delete/manage a guest VM through VMware vSphere. description: - Create/delete/reconfigure a guest VM through VMware vSphere. This module has a dependency on pysphere >= 1.7 version_added: "1.6" options: vcenter_hostname: description: - The hostname of the vcenter server the module will connect to, to create the guest. required: true default: null aliases: [] validate_certs: description: - Validate SSL certs. Note, if running on python without SSLContext support (typically, python < 2.7.9) you will have to set this to C(no) as pysphere does not support validating certificates on older python. Prior to 2.1, this module would always validate on python >= 2.7.9 and never validate on python <= 2.7.8. required: false default: yes choices: ['yes', 'no'] version_added: 2.1 guest: description: - The virtual server name you wish to manage. required: true username: description: - Username to connect to vcenter as. required: true default: null password: description: - Password of the user to connect to vcenter as. required: true default: null resource_pool: description: - The name of the resource_pool to create the VM in. required: false default: None cluster: description: - The name of the cluster to create the VM in. By default this is derived from the host you tell the module to build the guest on. required: false default: None esxi: description: - Dictionary which includes datacenter and hostname on which the VM should be created. For standalone ESXi hosts, ha-datacenter should be used as the datacenter name required: false default: null state: description: - Indicate desired state of the vm. 'reconfigured' only applies changes to 'vm_cdrom', 'memory_mb', and 'num_cpus' in vm_hardware parameter. The 'memory_mb' and 'num_cpus' changes are applied to powered-on vms when hot-plugging is enabled for the guest. default: present choices: ['present', 'powered_off', 'absent', 'powered_on', 'restarted', 'reconfigured'] from_template: version_added: "1.9" description: - Specifies if the VM should be deployed from a template (mutually exclusive with 'state' parameter). No guest customization changes to hardware such as CPU, RAM, NICs or Disks can be applied when launching from template. default: no choices: ['yes', 'no'] template_src: version_added: "1.9" description: - Name of the source template to deploy from default: None snapshot_to_clone: description: - A string that when specified, will create a linked clone copy of the VM. Snapshot must already be taken in vCenter. version_added: "2.0" required: false default: none power_on_after_clone: description: - Specifies if the VM should be powered on after the clone. required: false default: yes choices: ['yes', 'no'] vm_disk: description: - A key, value list of disks and their sizes and which datastore to keep it in. required: false default: null vm_hardware: description: - A key, value list of VM config settings. Must include ['memory_mb', 'num_cpus', 'osid', 'scsi']. required: false default: null vm_nic: description: - A key, value list of nics, their types and what network to put them on. required: false default: null vm_extra_config: description: - A key, value pair of any extra values you want set or changed in the vmx file of the VM. Useful to set advanced options on the VM. required: false default: null vm_hw_version: description: - Desired hardware version identifier (for example, "vmx-08" for vms that needs to be managed with vSphere Client). Note that changing hardware version of existing vm is not supported. required: false default: null version_added: "1.7" vmware_guest_facts: description: - Gather facts from vCenter on a particular VM required: false default: null force: description: - Boolean. Allows you to run commands which may alter the running state of a guest. Also used to reconfigure and destroy. default: "no" choices: [ "yes", "no" ] notes: - This module should run from a system that can access vSphere directly. Either by using local_action, or using delegate_to. author: "Richard Hoop (@rhoop) <wrhoop@gmail.com>" requirements: - "python >= 2.6" - pysphere ''' EXAMPLES = ''' --- # Create a new VM on an ESX server # Returns changed = False when the VM already exists # Returns changed = True and a adds ansible_facts from the new VM # State will set the power status of a guest upon creation. Use powered_on to create and boot. # Options ['state', 'vm_extra_config', 'vm_disk', 'vm_nic', 'vm_hardware', 'esxi'] are required together # Note: vm_floppy support added in 2.0 - vsphere_guest: vcenter_hostname: vcenter.mydomain.local username: myuser password: mypass guest: newvm001 state: powered_on vm_extra_config: vcpu.hotadd: yes mem.hotadd: yes notes: This is a test VM folder: MyFolder vm_disk: disk1: size_gb: 10 type: thin datastore: storage001 # VMs can be put into folders. The value given here is either the full path # to the folder (e.g. production/customerA/lamp) or just the last component # of the path (e.g. lamp): folder: production/customerA/lamp vm_nic: nic1: type: vmxnet3 network: VM Network network_type: standard nic2: type: vmxnet3 network: dvSwitch Network network_type: dvs vm_hardware: memory_mb: 2048 num_cpus: 2 osid: centos64Guest scsi: paravirtual vm_cdrom: type: "iso" iso_path: "DatastoreName/cd-image.iso" vm_floppy: type: "image" image_path: "DatastoreName/floppy-image.flp" esxi: datacenter: MyDatacenter hostname: esx001.mydomain.local # Reconfigure the CPU and Memory on the newly created VM # Will return the changes made - vsphere_guest: vcenter_hostname: vcenter.mydomain.local username: myuser password: mypass guest: newvm001 state: reconfigured vm_extra_config: vcpu.hotadd: yes mem.hotadd: yes notes: This is a test VM vm_disk: disk1: size_gb: 10 type: thin datastore: storage001 vm_nic: nic1: type: vmxnet3 network: VM Network network_type: standard vm_hardware: memory_mb: 4096 num_cpus: 4 osid: centos64Guest scsi: paravirtual esxi: datacenter: MyDatacenter hostname: esx001.mydomain.local # Deploy a guest from a template - vsphere_guest: vcenter_hostname: vcenter.mydomain.local username: myuser password: mypass guest: newvm001 from_template: yes template_src: centosTemplate cluster: MainCluster resource_pool: "/Resources" vm_extra_config: folder: MyFolder # Task to gather facts from a vSphere cluster only if the system is a VMware guest - vsphere_guest: vcenter_hostname: vcenter.mydomain.local username: myuser password: mypass guest: newvm001 vmware_guest_facts: yes --- # Typical output of a vsphere_facts run on a guest # If vmware tools is not installed, ipadresses with return None - hw_eth0: - addresstype: "assigned" label: "Network adapter 1" macaddress: "00:22:33:33:44:55" macaddress_dash: "00-22-33-33-44-55" ipaddresses: ['192.0.2.100', '2001:DB8:56ff:feac:4d8a'] summary: "VM Network" hw_guest_full_name: "newvm001" hw_guest_id: "rhel6_64Guest" hw_memtotal_mb: 2048 hw_name: "centos64Guest" hw_power_status: "POWERED ON" hw_processor_count: 2 hw_product_uuid: "ef50bac8-2845-40ff-81d9-675315501dac" # hw_power_status will be one of the following values: # - POWERED ON # - POWERED OFF # - SUSPENDED # - POWERING ON # - POWERING OFF # - SUSPENDING # - RESETTING # - BLOCKED ON MSG # - REVERTING TO SNAPSHOT # - UNKNOWN # as seen in the VMPowerState-Class of PySphere: http://git.io/vlwOq --- # Remove a vm from vSphere # The VM must be powered_off or you need to use force to force a shutdown - vsphere_guest: vcenter_hostname: vcenter.mydomain.local username: myuser password: mypass guest: newvm001 state: absent force: yes ''' import os import re import ssl import traceback HAS_PYSPHERE = False try: from pysphere import VIServer, VIProperty, MORTypes from pysphere.resources import VimService_services as VI from pysphere.vi_task import VITask from pysphere import VIApiException HAS_PYSPHERE = True except ImportError: pass from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.six import string_types from ansible.module_utils._text import to_native # TODO: # Ability to set CPU/Memory reservations def add_scsi_controller(module, s, config, devices, type="paravirtual", bus_num=0, disk_ctrl_key=1): # add a scsi controller scsi_ctrl_spec = config.new_deviceChange() scsi_ctrl_spec.set_element_operation('add') if type == "lsi": # For RHEL5 scsi_ctrl = VI.ns0.VirtualLsiLogicController_Def("scsi_ctrl").pyclass() elif type == "paravirtual": # For RHEL6 scsi_ctrl = VI.ns0.ParaVirtualSCSIController_Def("scsi_ctrl").pyclass() elif type == "lsi_sas": scsi_ctrl = VI.ns0.VirtualLsiLogicSASController_Def( "scsi_ctrl").pyclass() elif type == "bus_logic": scsi_ctrl = VI.ns0.VirtualBusLogicController_Def("scsi_ctrl").pyclass() else: s.disconnect() module.fail_json( msg="Error adding scsi controller to vm spec. No scsi controller" " type of: %s" % (type)) scsi_ctrl.set_element_busNumber(int(bus_num)) scsi_ctrl.set_element_key(int(disk_ctrl_key)) scsi_ctrl.set_element_sharedBus("noSharing") scsi_ctrl_spec.set_element_device(scsi_ctrl) # Add the scsi controller to the VM spec. devices.append(scsi_ctrl_spec) return disk_ctrl_key def add_disk(module, s, config_target, config, devices, datastore, type="thin", size=200000, disk_ctrl_key=1, disk_number=0, key=0): # add a vmdk disk # Verify the datastore exists datastore_name, ds = find_datastore(module, s, datastore, config_target) # create a new disk - file based - for the vm disk_spec = config.new_deviceChange() disk_spec.set_element_fileOperation("create") disk_spec.set_element_operation("add") disk_ctlr = VI.ns0.VirtualDisk_Def("disk_ctlr").pyclass() disk_backing = VI.ns0.VirtualDiskFlatVer2BackingInfo_Def( "disk_backing").pyclass() disk_backing.set_element_fileName(datastore_name) disk_backing.set_element_diskMode("persistent") if type != "thick": disk_backing.set_element_thinProvisioned(1) disk_ctlr.set_element_key(key) disk_ctlr.set_element_controllerKey(int(disk_ctrl_key)) disk_ctlr.set_element_unitNumber(int(disk_number)) disk_ctlr.set_element_backing(disk_backing) disk_ctlr.set_element_capacityInKB(int(size)) disk_spec.set_element_device(disk_ctlr) devices.append(disk_spec) def add_cdrom(module, s, config_target, config, devices, default_devs, type="client", vm_cd_iso_path=None): # Add a cd-rom # Make sure the datastore exists. if vm_cd_iso_path: iso_location = vm_cd_iso_path.split('/', 1) datastore, ds = find_datastore( module, s, iso_location[0], config_target) iso_path = iso_location[1] # find ide controller ide_ctlr = None for dev in default_devs: if dev.typecode.type[1] == "VirtualIDEController": ide_ctlr = dev # add a cdrom based on a physical device if ide_ctlr: cd_spec = config.new_deviceChange() cd_spec.set_element_operation('add') cd_ctrl = VI.ns0.VirtualCdrom_Def("cd_ctrl").pyclass() if type == "iso": iso = VI.ns0.VirtualCdromIsoBackingInfo_Def("iso").pyclass() ds_ref = iso.new_datastore(ds) ds_ref.set_attribute_type(ds.get_attribute_type()) iso.set_element_datastore(ds_ref) iso.set_element_fileName("%s %s" % (datastore, iso_path)) cd_ctrl.set_element_backing(iso) cd_ctrl.set_element_key(20) cd_ctrl.set_element_controllerKey(ide_ctlr.get_element_key()) cd_ctrl.set_element_unitNumber(0) cd_spec.set_element_device(cd_ctrl) elif type == "client": client = VI.ns0.VirtualCdromRemoteAtapiBackingInfo_Def( "client").pyclass() client.set_element_deviceName("") cd_ctrl.set_element_backing(client) cd_ctrl.set_element_key(20) cd_ctrl.set_element_controllerKey(ide_ctlr.get_element_key()) cd_ctrl.set_element_unitNumber(0) cd_spec.set_element_device(cd_ctrl) else: s.disconnect() module.fail_json( msg="Error adding cdrom of type %s to vm spec. " " cdrom type can either be iso or client" % (type)) devices.append(cd_spec) def add_floppy(module, s, config_target, config, devices, default_devs, type="image", vm_floppy_image_path=None): # Add a floppy # Make sure the datastore exists. if vm_floppy_image_path: image_location = vm_floppy_image_path.split('/', 1) datastore, ds = find_datastore( module, s, image_location[0], config_target) image_path = image_location[1] floppy_spec = config.new_deviceChange() floppy_spec.set_element_operation('add') floppy_ctrl = VI.ns0.VirtualFloppy_Def("floppy_ctrl").pyclass() if type == "image": image = VI.ns0.VirtualFloppyImageBackingInfo_Def("image").pyclass() ds_ref = image.new_datastore(ds) ds_ref.set_attribute_type(ds.get_attribute_type()) image.set_element_datastore(ds_ref) image.set_element_fileName("%s %s" % (datastore, image_path)) floppy_ctrl.set_element_backing(image) floppy_ctrl.set_element_key(3) floppy_spec.set_element_device(floppy_ctrl) elif type == "client": client = VI.ns0.VirtualFloppyRemoteDeviceBackingInfo_Def( "client").pyclass() client.set_element_deviceName("/dev/fd0") floppy_ctrl.set_element_backing(client) floppy_ctrl.set_element_key(3) floppy_spec.set_element_device(floppy_ctrl) else: s.disconnect() module.fail_json( msg="Error adding floppy of type %s to vm spec. " " floppy type can either be image or client" % (type)) devices.append(floppy_spec) def add_nic(module, s, nfmor, config, devices, nic_type="vmxnet3", network_name="VM Network", network_type="standard"): # add a NIC # Different network card types are: "VirtualE1000", # "VirtualE1000e","VirtualPCNet32", "VirtualVmxnet", "VirtualNmxnet2", # "VirtualVmxnet3" nic_spec = config.new_deviceChange() nic_spec.set_element_operation("add") if nic_type == "e1000": nic_ctlr = VI.ns0.VirtualE1000_Def("nic_ctlr").pyclass() elif nic_type == "e1000e": nic_ctlr = VI.ns0.VirtualE1000e_Def("nic_ctlr").pyclass() elif nic_type == "pcnet32": nic_ctlr = VI.ns0.VirtualPCNet32_Def("nic_ctlr").pyclass() elif nic_type == "vmxnet": nic_ctlr = VI.ns0.VirtualVmxnet_Def("nic_ctlr").pyclass() elif nic_type == "vmxnet2": nic_ctlr = VI.ns0.VirtualVmxnet2_Def("nic_ctlr").pyclass() elif nic_type == "vmxnet3": nic_ctlr = VI.ns0.VirtualVmxnet3_Def("nic_ctlr").pyclass() else: s.disconnect() module.fail_json( msg="Error adding nic to vm spec. No nic type of: %s" % (nic_type)) if network_type == "standard": nic_backing = VI.ns0.VirtualEthernetCardNetworkBackingInfo_Def( "nic_backing").pyclass() nic_backing.set_element_deviceName(network_name) elif network_type == "dvs": # Get the portgroup key portgroupKey = find_portgroup_key(module, s, nfmor, network_name) # Get the dvswitch uuid dvswitch_uuid = find_dvswitch_uuid(module, s, nfmor, portgroupKey) nic_backing_port = VI.ns0.DistributedVirtualSwitchPortConnection_Def( "nic_backing_port").pyclass() nic_backing_port.set_element_switchUuid(dvswitch_uuid) nic_backing_port.set_element_portgroupKey(portgroupKey) nic_backing = VI.ns0.VirtualEthernetCardDistributedVirtualPortBackingInfo_Def( "nic_backing").pyclass() nic_backing.set_element_port(nic_backing_port) else: s.disconnect() module.fail_json( msg="Error adding nic backing to vm spec. No network type of:" " %s" % (network_type)) nic_ctlr.set_element_addressType("generated") nic_ctlr.set_element_backing(nic_backing) nic_ctlr.set_element_key(4) nic_spec.set_element_device(nic_ctlr) devices.append(nic_spec) def find_datastore(module, s, datastore, config_target): # Verify the datastore exists and put it in brackets if it does. ds = None if config_target: for d in config_target.Datastore: if (d.Datastore.Accessible and (datastore and d.Datastore.Name == datastore) or (not datastore)): ds = d.Datastore.Datastore datastore = d.Datastore.Name break else: for ds_mor, ds_name in s.get_datastores().items(): ds_props = VIProperty(s, ds_mor) if (ds_props.summary.accessible and (datastore and ds_name == datastore) or (not datastore)): ds = ds_mor datastore = ds_name if not ds: s.disconnect() module.fail_json(msg="Datastore: %s does not appear to exist" % (datastore)) datastore_name = "[%s]" % datastore return datastore_name, ds def find_portgroup_key(module, s, nfmor, network_name): # Find a portgroups key given the portgroup name. # Grab all the distributed virtual portgroup's names and key's. dvpg_mors = s._retrieve_properties_traversal( property_names=['name', 'key'], from_node=nfmor, obj_type='DistributedVirtualPortgroup') # Get the correct portgroup managed object. dvpg_mor = None for dvpg in dvpg_mors: if dvpg_mor: break for p in dvpg.PropSet: if p.Name == "name" and p.Val == network_name: dvpg_mor = dvpg if dvpg_mor: break # If dvpg_mor is empty we didn't find the named portgroup. if dvpg_mor is None: s.disconnect() module.fail_json( msg="Could not find the distributed virtual portgroup named" " %s" % network_name) # Get the portgroup key portgroupKey = None for p in dvpg_mor.PropSet: if p.Name == "key": portgroupKey = p.Val return portgroupKey def find_dvswitch_uuid(module, s, nfmor, portgroupKey): # Find a dvswitch's uuid given a portgroup key. # Function searches all dvswitches in the datacenter to find the switch # that has the portgroup key. # Grab the dvswitch uuid and portgroup properties dvswitch_mors = s._retrieve_properties_traversal( property_names=['uuid', 'portgroup'], from_node=nfmor, obj_type='DistributedVirtualSwitch') dvswitch_mor = None # Get the dvswitches managed object for dvswitch in dvswitch_mors: if dvswitch_mor: break for p in dvswitch.PropSet: if p.Name == "portgroup": pg_mors = p.Val.ManagedObjectReference for pg_mor in pg_mors: if dvswitch_mor: break key_mor = s._get_object_properties( pg_mor, property_names=['key']) for key in key_mor.PropSet: if key.Val == portgroupKey: dvswitch_mor = dvswitch # Get the switches uuid dvswitch_uuid = None for p in dvswitch_mor.PropSet: if p.Name == "uuid": dvswitch_uuid = p.Val return dvswitch_uuid def spec_singleton(spec, request, vm): if not spec: _this = request.new__this(vm._mor) _this.set_attribute_type(vm._mor.get_attribute_type()) request.set_element__this(_this) spec = request.new_spec() return spec def get_cdrom_params(module, s, vm_cdrom): cdrom_type = None cdrom_iso_path = None try: cdrom_type = vm_cdrom['type'] except KeyError: s.disconnect() module.fail_json( msg="Error on %s definition. cdrom type needs to be" " specified." % vm_cdrom) if cdrom_type == 'iso': try: cdrom_iso_path = vm_cdrom['iso_path'] except KeyError: s.disconnect() module.fail_json( msg="Error on %s definition. cdrom iso_path needs" " to be specified." % vm_cdrom) return cdrom_type, cdrom_iso_path def vmdisk_id(vm, current_datastore_name): id_list = [] for vm_disk in vm._disks: if current_datastore_name in vm_disk['descriptor']: id_list.append(vm_disk['device']['key']) return id_list def deploy_template(vsphere_client, guest, resource_pool, template_src, esxi, module, cluster_name, snapshot_to_clone, power_on_after_clone, vm_extra_config): vmTemplate = vsphere_client.get_vm_by_name(template_src) vmTarget = None if esxi: datacenter = esxi['datacenter'] esxi_hostname = esxi['hostname'] # Datacenter managed object reference dclist = [k for k, v in vsphere_client.get_datacenters().items() if v == datacenter] if dclist: dcmor=dclist[0] else: vsphere_client.disconnect() module.fail_json(msg="Cannot find datacenter named: %s" % datacenter) dcprops = VIProperty(vsphere_client, dcmor) # hostFolder managed reference hfmor = dcprops.hostFolder._obj # Grab the computerResource name and host properties crmors = vsphere_client._retrieve_properties_traversal( property_names=['name', 'host'], from_node=hfmor, obj_type='ComputeResource') # Grab the host managed object reference of the esxi_hostname try: hostmor = [k for k, v in vsphere_client.get_hosts().items() if v == esxi_hostname][0] except IndexError: vsphere_client.disconnect() module.fail_json(msg="Cannot find esx host named: %s" % esxi_hostname) # Grab the computeResource managed object reference of the host we are # creating the VM on. crmor = None for cr in crmors: if crmor: break for p in cr.PropSet: if p.Name == "host": for h in p.Val.get_element_ManagedObjectReference(): if h == hostmor: crmor = cr.Obj break if crmor: break crprops = VIProperty(vsphere_client, crmor) rpmor = crprops.resourcePool._obj elif resource_pool: try: cluster = [k for k, v in vsphere_client.get_clusters().items() if v == cluster_name][0] if cluster_name else None except IndexError: vsphere_client.disconnect() module.fail_json(msg="Cannot find Cluster named: %s" % cluster_name) try: rpmor = [k for k, v in vsphere_client.get_resource_pools( from_mor=cluster).items() if v == resource_pool][0] except IndexError: vsphere_client.disconnect() module.fail_json(msg="Cannot find Resource Pool named: %s" % resource_pool) else: module.fail_json(msg="You need to specify either esxi:[datacenter,hostname] or [cluster,resource_pool]") try: vmTarget = vsphere_client.get_vm_by_name(guest) except Exception: pass if not vmTemplate.is_powered_off(): module.fail_json( msg="Source %s must be powered off" % template_src ) try: if not vmTarget: cloneArgs = dict(resourcepool=rpmor, power_on=False) if snapshot_to_clone is not None: #check if snapshot_to_clone is specified, Create a Linked Clone instead of a full clone. cloneArgs["linked"] = True cloneArgs["snapshot"] = snapshot_to_clone if vm_extra_config.get("folder") is not None: # if a folder is specified, clone the VM into it cloneArgs["folder"] = vm_extra_config.get("folder") vmTemplate.clone(guest, **cloneArgs) vm = vsphere_client.get_vm_by_name(guest) # VM was created. If there is any extra config options specified, set if vm_extra_config: vm.set_extra_config(vm_extra_config) # Power on if asked if power_on_after_clone is True: state = 'powered_on' power_state(vm, state, True) changed = True else: changed = False vsphere_client.disconnect() module.exit_json(changed=changed) except Exception as e: module.fail_json( msg="Could not clone selected machine: %s" % e ) # example from https://github.com/kalazzerx/pysphere/blob/master/examples/pysphere_create_disk_and_add_to_vm.py # was used. def update_disks(vsphere_client, vm, module, vm_disk, changes): request = VI.ReconfigVM_TaskRequestMsg() changed = False for cnf_disk in vm_disk: disk_id = re.sub("disk", "", cnf_disk) disk_type = vm_disk[cnf_disk]['type'] found = False for dev_key in vm._devices: if vm._devices[dev_key]['type'] == 'VirtualDisk': hdd_id = vm._devices[dev_key]['label'].split()[2] if disk_id == hdd_id: found = True continue if not found: VI.ReconfigVM_TaskRequestMsg() _this = request.new__this(vm._mor) _this.set_attribute_type(vm._mor.get_attribute_type()) request.set_element__this(_this) spec = request.new_spec() dc = spec.new_deviceChange() dc.Operation = "add" dc.FileOperation = "create" hd = VI.ns0.VirtualDisk_Def("hd").pyclass() hd.Key = -100 hd.UnitNumber = int(disk_id) hd.CapacityInKB = int(vm_disk[cnf_disk]['size_gb']) * 1024 * 1024 hd.ControllerKey = 1000 # module.fail_json(msg="peos : %s" % vm_disk[cnf_disk]) backing = VI.ns0.VirtualDiskFlatVer2BackingInfo_Def("backing").pyclass() backing.FileName = "[%s]" % vm_disk[cnf_disk]['datastore'] backing.DiskMode = "persistent" backing.Split = False backing.WriteThrough = False if disk_type == 'thin': backing.ThinProvisioned = True else: backing.ThinProvisioned = False backing.EagerlyScrub = False hd.Backing = backing dc.Device = hd spec.DeviceChange = [dc] request.set_element_spec(spec) ret = vsphere_client._proxy.ReconfigVM_Task(request)._returnval # Wait for the task to finish task = VITask(ret, vsphere_client) status = task.wait_for_state([task.STATE_SUCCESS, task.STATE_ERROR]) if status == task.STATE_SUCCESS: changed = True changes[cnf_disk] = vm_disk[cnf_disk] elif status == task.STATE_ERROR: module.fail_json( msg="Error reconfiguring vm: %s, [%s]" % ( task.get_error_message(), vm_disk[cnf_disk])) return changed, changes def reconfigure_vm(vsphere_client, vm, module, esxi, resource_pool, cluster_name, guest, vm_extra_config, vm_hardware, vm_disk, vm_nic, state, force): spec = None changed = False changes = {} request = None shutdown = False poweron = vm.is_powered_on() devices = [] memoryHotAddEnabled = bool(vm.properties.config.memoryHotAddEnabled) cpuHotAddEnabled = bool(vm.properties.config.cpuHotAddEnabled) cpuHotRemoveEnabled = bool(vm.properties.config.cpuHotRemoveEnabled) changed, changes = update_disks(vsphere_client, vm, module, vm_disk, changes) vm.properties._flush_cache() request = VI.ReconfigVM_TaskRequestMsg() # Change extra config if vm_extra_config: spec = spec_singleton(spec, request, vm) extra_config = [] for k,v in vm_extra_config.items(): ec = spec.new_extraConfig() ec.set_element_key(str(k)) ec.set_element_value(str(v)) extra_config.append(ec) spec.set_element_extraConfig(extra_config) changes["extra_config"] = vm_extra_config # Change Memory if 'memory_mb' in vm_hardware: if int(vm_hardware['memory_mb']) != vm.properties.config.hardware.memoryMB: spec = spec_singleton(spec, request, vm) if vm.is_powered_on(): if force: # No hot add but force if not memoryHotAddEnabled: shutdown = True elif int(vm_hardware['memory_mb']) < vm.properties.config.hardware.memoryMB: shutdown = True else: # Fail on no hot add and no force if not memoryHotAddEnabled: module.fail_json( msg="memoryHotAdd is not enabled. force is " "required for shutdown") # Fail on no force and memory shrink elif int(vm_hardware['memory_mb']) < vm.properties.config.hardware.memoryMB: module.fail_json( msg="Cannot lower memory on a live VM. force is " "required for shutdown") # set the new RAM size spec.set_element_memoryMB(int(vm_hardware['memory_mb'])) changes['memory'] = vm_hardware['memory_mb'] # ===( Reconfigure Network )====# if vm_nic: changed = reconfigure_net(vsphere_client, vm, module, esxi, resource_pool, guest, vm_nic, cluster_name) # Change Num CPUs if 'num_cpus' in vm_hardware: if int(vm_hardware['num_cpus']) != vm.properties.config.hardware.numCPU: spec = spec_singleton(spec, request, vm) if vm.is_powered_on(): if force: # No hot add but force if not cpuHotAddEnabled: shutdown = True elif int(vm_hardware['num_cpus']) < vm.properties.config.hardware.numCPU: if not cpuHotRemoveEnabled: shutdown = True else: # Fail on no hot add and no force if not cpuHotAddEnabled: module.fail_json( msg="cpuHotAdd is not enabled. force is " "required for shutdown") # Fail on no force and cpu shrink without hot remove elif int(vm_hardware['num_cpus']) < vm.properties.config.hardware.numCPU: if not cpuHotRemoveEnabled: module.fail_json( msg="Cannot lower CPU on a live VM without " "cpuHotRemove. force is required for shutdown") spec.set_element_numCPUs(int(vm_hardware['num_cpus'])) changes['cpu'] = vm_hardware['num_cpus'] # Change CDROM if 'vm_cdrom' in vm_hardware: spec = spec_singleton(spec, request, vm) cdrom_type, cdrom_iso_path = get_cdrom_params(module, vsphere_client, vm_hardware['vm_cdrom']) cdrom = None current_devices = vm.properties.config.hardware.device for dev in current_devices: if dev._type == 'VirtualCdrom': cdrom = dev._obj break if cdrom_type == 'iso': iso_location = cdrom_iso_path.split('/', 1) datastore, ds = find_datastore( module, vsphere_client, iso_location[0], None) iso_path = iso_location[1] iso = VI.ns0.VirtualCdromIsoBackingInfo_Def('iso').pyclass() iso.set_element_fileName('%s %s' % (datastore, iso_path)) cdrom.set_element_backing(iso) cdrom.Connectable.set_element_connected(True) cdrom.Connectable.set_element_startConnected(True) elif cdrom_type == 'client': client = VI.ns0.VirtualCdromRemoteAtapiBackingInfo_Def('client').pyclass() client.set_element_deviceName("") cdrom.set_element_backing(client) cdrom.Connectable.set_element_connected(True) cdrom.Connectable.set_element_startConnected(True) else: vsphere_client.disconnect() module.fail_json( msg="Error adding cdrom of type %s to vm spec. " " cdrom type can either be iso or client" % (cdrom_type)) dev_change = spec.new_deviceChange() dev_change.set_element_device(cdrom) dev_change.set_element_operation('edit') devices.append(dev_change) changes['cdrom'] = vm_hardware['vm_cdrom'] # Resize hard drives if vm_disk: spec = spec_singleton(spec, request, vm) # Get a list of the VM's hard drives dev_list = [d for d in vm.properties.config.hardware.device if d._type=='VirtualDisk'] if len(vm_disk) > len(dev_list): vsphere_client.disconnect() module.fail_json(msg="Error in vm_disk definition. Too many disks defined in comparison to the VM's disk profile.") disk_num = 0 dev_changes = [] disks_changed = {} for disk in sorted(vm_disk): try: disksize = int(vm_disk[disk]['size_gb']) # Convert the disk size to kilobytes disksize = disksize * 1024 * 1024 except (KeyError, ValueError): vsphere_client.disconnect() module.fail_json(msg="Error in '%s' definition. Size needs to be specified as an integer." % disk) # Make sure the new disk size is higher than the current value dev = dev_list[disk_num] if disksize < int(dev.capacityInKB): vsphere_client.disconnect() module.fail_json(msg="Error in '%s' definition. New size needs to be higher than the current value (%s GB)." % (disk, int(dev.capacityInKB) / 1024 / 1024)) # Set the new disk size elif disksize > int(dev.capacityInKB): dev_obj = dev._obj dev_obj.set_element_capacityInKB(disksize) dev_change = spec.new_deviceChange() dev_change.set_element_operation("edit") dev_change.set_element_device(dev_obj) dev_changes.append(dev_change) disks_changed[disk] = {'size_gb': int(vm_disk[disk]['size_gb'])} disk_num = disk_num + 1 if dev_changes: spec.set_element_deviceChange(dev_changes) changes['disks'] = disks_changed if len(changes): if shutdown and vm.is_powered_on(): try: vm.power_off(sync_run=True) vm.get_status() except Exception as e: module.fail_json(msg='Failed to shutdown vm %s: %s' % (guest, to_native(e)), exception=traceback.format_exc()) if len(devices): spec.set_element_deviceChange(devices) request.set_element_spec(spec) ret = vsphere_client._proxy.ReconfigVM_Task(request)._returnval # Wait for the task to finish task = VITask(ret, vsphere_client) status = task.wait_for_state([task.STATE_SUCCESS, task.STATE_ERROR]) if status == task.STATE_SUCCESS: changed = True elif status == task.STATE_ERROR: module.fail_json( msg="Error reconfiguring vm: %s" % task.get_error_message()) if vm.is_powered_off() and poweron: try: vm.power_on(sync_run=True) except Exception as e: module.fail_json( msg='Failed to power on vm %s : %s' % (guest, to_native(e)), exception=traceback.format_exc() ) vsphere_client.disconnect() if changed: module.exit_json(changed=True, changes=changes) module.exit_json(changed=False) def reconfigure_net(vsphere_client, vm, module, esxi, resource_pool, guest, vm_nic, cluster_name=None): s = vsphere_client nics = {} request = VI.ReconfigVM_TaskRequestMsg() _this = request.new__this(vm._mor) _this.set_attribute_type(vm._mor.get_attribute_type()) request.set_element__this(_this) nic_changes = [] datacenter = esxi['datacenter'] # Datacenter managed object reference dclist = [k for k, v in vsphere_client.get_datacenters().items() if v == datacenter] if dclist: dcmor=dclist[0] else: vsphere_client.disconnect() module.fail_json(msg="Cannot find datacenter named: %s" % datacenter) dcprops = VIProperty(vsphere_client, dcmor) nfmor = dcprops.networkFolder._obj for k,v in vm_nic.items(): nicNum = k[len(k) -1] if vm_nic[k]['network_type'] == 'dvs': portgroupKey = find_portgroup_key(module, s, nfmor, vm_nic[k]['network']) todvs = True elif vm_nic[k]['network_type'] == 'standard': todvs = False # Detect cards that need to be changed and network type (and act accordingly) for dev in vm.properties.config.hardware.device: if dev._type in ["VirtualE1000", "VirtualE1000e", "VirtualPCNet32", "VirtualVmxnet", "VirtualNmxnet2", "VirtualVmxnet3"]: devNum = dev.deviceInfo.label[len(dev.deviceInfo.label) - 1] if devNum == nicNum: fromdvs = dev.deviceInfo.summary.split(':')[0] == 'DVSwitch' if todvs and fromdvs: if dev.backing.port._obj.get_element_portgroupKey() != portgroupKey: nics[k] = (dev, portgroupKey, 1) elif fromdvs and not todvs: nics[k] = (dev, '', 2) elif not fromdvs and todvs: nics[k] = (dev, portgroupKey, 3) elif not fromdvs and not todvs: if dev.backing._obj.get_element_deviceName() != vm_nic[k]['network']: nics[k] = (dev, '', 2) else: pass else: module.exit_json() if len(nics) > 0: for nic, obj in nics.items(): """ 1,2 and 3 are used to mark which action should be taken 1 = from a distributed switch to a distributed switch 2 = to a standard switch 3 = to a distributed switch """ dev = obj[0] pgKey = obj[1] dvsKey = obj[2] if dvsKey == 1: dev.backing.port._obj.set_element_portgroupKey(pgKey) dev.backing.port._obj.set_element_portKey('') if dvsKey == 3: dvswitch_uuid = find_dvswitch_uuid(module, s, nfmor, pgKey) nic_backing_port = VI.ns0.DistributedVirtualSwitchPortConnection_Def( "nic_backing_port").pyclass() nic_backing_port.set_element_switchUuid(dvswitch_uuid) nic_backing_port.set_element_portgroupKey(pgKey) nic_backing_port.set_element_portKey('') nic_backing = VI.ns0.VirtualEthernetCardDistributedVirtualPortBackingInfo_Def( "nic_backing").pyclass() nic_backing.set_element_port(nic_backing_port) dev._obj.set_element_backing(nic_backing) if dvsKey == 2: nic_backing = VI.ns0.VirtualEthernetCardNetworkBackingInfo_Def( "nic_backing").pyclass() nic_backing.set_element_deviceName(vm_nic[nic]['network']) dev._obj.set_element_backing(nic_backing) for nic, obj in nics.items(): dev = obj[0] spec = request.new_spec() nic_change = spec.new_deviceChange() nic_change.set_element_device(dev._obj) nic_change.set_element_operation("edit") nic_changes.append(nic_change) spec.set_element_deviceChange(nic_changes) request.set_element_spec(spec) ret = vsphere_client._proxy.ReconfigVM_Task(request)._returnval task = VITask(ret, vsphere_client) status = task.wait_for_state([task.STATE_SUCCESS, task.STATE_ERROR]) if status == task.STATE_SUCCESS: return(True) elif status == task.STATE_ERROR: module.fail_json(msg="Could not change network %s" % task.get_error_message()) elif len(nics) == 0: return(False) def _build_folder_tree(nodes, parent): tree = {} for node in nodes: if node['parent'] == parent: tree[node['name']] = dict.copy(node) tree[node['name']]['subfolders'] = _build_folder_tree(nodes, node['id']) del tree[node['name']]['parent'] return tree def _find_path_in_tree(tree, path): for name, o in tree.items(): if name == path[0]: if len(path) == 1: return o else: return _find_path_in_tree(o['subfolders'], path[1:]) return None def _get_folderid_for_path(vsphere_client, datacenter, path): content = vsphere_client._retrieve_properties_traversal(property_names=['name', 'parent'], obj_type=MORTypes.Folder) if not content: return {} node_list = [ { 'id': o.Obj, 'name': o.PropSet[0].Val, 'parent': (o.PropSet[1].Val if len(o.PropSet) > 1 else None) } for o in content ] tree = _build_folder_tree(node_list, datacenter) tree = _find_path_in_tree(tree, ['vm'])['subfolders'] folder = _find_path_in_tree(tree, path.split('/')) return folder['id'] if folder else None def create_vm(vsphere_client, module, esxi, resource_pool, cluster_name, guest, vm_extra_config, vm_hardware, vm_disk, vm_nic, vm_hw_version, state): datacenter = esxi['datacenter'] esxi_hostname = esxi['hostname'] # Datacenter managed object reference dclist = [k for k, v in vsphere_client.get_datacenters().items() if v == datacenter] if dclist: dcmor=dclist[0] else: vsphere_client.disconnect() module.fail_json(msg="Cannot find datacenter named: %s" % datacenter) dcprops = VIProperty(vsphere_client, dcmor) # hostFolder managed reference hfmor = dcprops.hostFolder._obj # virtualmachineFolder managed object reference if vm_extra_config.get('folder'): # try to find the folder by its full path, e.g. 'production/customerA/lamp' vmfmor = _get_folderid_for_path(vsphere_client, dcmor, vm_extra_config.get('folder')) # try the legacy behaviour of just matching the folder name, so 'lamp' alone matches 'production/customerA/lamp' if vmfmor is None: for mor, name in vsphere_client._get_managed_objects(MORTypes.Folder).items(): if name == vm_extra_config['folder']: vmfmor = mor # if neither of strategies worked, bail out if vmfmor is None: vsphere_client.disconnect() module.fail_json(msg="Cannot find folder named: %s" % vm_extra_config['folder']) else: vmfmor = dcprops.vmFolder._obj # networkFolder managed object reference nfmor = dcprops.networkFolder._obj # Grab the computerResource name and host properties crmors = vsphere_client._retrieve_properties_traversal( property_names=['name', 'host'], from_node=hfmor, obj_type='ComputeResource') # Grab the host managed object reference of the esxi_hostname try: hostmor = [k for k, v in vsphere_client.get_hosts().items() if v == esxi_hostname][0] except IndexError: vsphere_client.disconnect() module.fail_json(msg="Cannot find esx host named: %s" % esxi_hostname) # Grab the computerResource managed object reference of the host we are # creating the VM on. crmor = None for cr in crmors: if crmor: break for p in cr.PropSet: if p.Name == "host": for h in p.Val.get_element_ManagedObjectReference(): if h == hostmor: crmor = cr.Obj break if crmor: break crprops = VIProperty(vsphere_client, crmor) # Get resource pool managed reference # Requires that a cluster name be specified. if resource_pool: try: cluster = [k for k, v in vsphere_client.get_clusters().items() if v == cluster_name][0] if cluster_name else None except IndexError: vsphere_client.disconnect() module.fail_json(msg="Cannot find Cluster named: %s" % cluster_name) try: rpmor = [k for k, v in vsphere_client.get_resource_pools( from_mor=cluster).items() if v == resource_pool][0] except IndexError: vsphere_client.disconnect() module.fail_json(msg="Cannot find Resource Pool named: %s" % resource_pool) else: rpmor = crprops.resourcePool._obj # CREATE VM CONFIGURATION # get config target request = VI.QueryConfigTargetRequestMsg() _this = request.new__this(crprops.environmentBrowser._obj) _this.set_attribute_type( crprops.environmentBrowser._obj.get_attribute_type()) request.set_element__this(_this) h = request.new_host(hostmor) h.set_attribute_type(hostmor.get_attribute_type()) request.set_element_host(h) config_target = vsphere_client._proxy.QueryConfigTarget(request)._returnval # get default devices request = VI.QueryConfigOptionRequestMsg() _this = request.new__this(crprops.environmentBrowser._obj) _this.set_attribute_type( crprops.environmentBrowser._obj.get_attribute_type()) request.set_element__this(_this) h = request.new_host(hostmor) h.set_attribute_type(hostmor.get_attribute_type()) request.set_element_host(h) config_option = vsphere_client._proxy.QueryConfigOption(request)._returnval default_devs = config_option.DefaultDevice # add parameters to the create vm task create_vm_request = VI.CreateVM_TaskRequestMsg() config = create_vm_request.new_config() if vm_hw_version: config.set_element_version(vm_hw_version) vmfiles = config.new_files() datastore_name, ds = find_datastore( module, vsphere_client, vm_disk['disk1']['datastore'], config_target) vmfiles.set_element_vmPathName(datastore_name) config.set_element_files(vmfiles) config.set_element_name(guest) if 'notes' in vm_extra_config: config.set_element_annotation(vm_extra_config['notes']) config.set_element_memoryMB(int(vm_hardware['memory_mb'])) config.set_element_numCPUs(int(vm_hardware['num_cpus'])) config.set_element_guestId(vm_hardware['osid']) devices = [] # Attach all the hardware we want to the VM spec. # Add a scsi controller to the VM spec. disk_ctrl_key = add_scsi_controller( module, vsphere_client, config, devices, vm_hardware['scsi']) if vm_disk: disk_num = 0 disk_key = 0 bus_num = 0 disk_ctrl = 1 for disk in sorted(vm_disk): try: datastore = vm_disk[disk]['datastore'] except KeyError: vsphere_client.disconnect() module.fail_json( msg="Error on %s definition. datastore needs to be" " specified." % disk) try: disksize = int(vm_disk[disk]['size_gb']) # Convert the disk size to kiloboytes disksize = disksize * 1024 * 1024 except (KeyError, ValueError): vsphere_client.disconnect() module.fail_json(msg="Error on %s definition. size needs to be specified as an integer." % disk) try: disktype = vm_disk[disk]['type'] except KeyError: vsphere_client.disconnect() module.fail_json( msg="Error on %s definition. type needs to be" " specified." % disk) if disk_num == 7: disk_num = disk_num + 1 disk_key = disk_key + 1 elif disk_num > 15: bus_num = bus_num + 1 disk_ctrl = disk_ctrl + 1 disk_ctrl_key = add_scsi_controller( module, vsphere_client, config, devices, type=vm_hardware['scsi'], bus_num=bus_num, disk_ctrl_key=disk_ctrl) disk_num = 0 disk_key = 0 # Add the disk to the VM spec. add_disk( module, vsphere_client, config_target, config, devices, datastore, disktype, disksize, disk_ctrl_key, disk_num, disk_key) disk_num = disk_num + 1 disk_key = disk_key + 1 if 'vm_cdrom' in vm_hardware: cdrom_type, cdrom_iso_path = get_cdrom_params(module, vsphere_client, vm_hardware['vm_cdrom']) # Add a CD-ROM device to the VM. add_cdrom(module, vsphere_client, config_target, config, devices, default_devs, cdrom_type, cdrom_iso_path) if 'vm_floppy' in vm_hardware: floppy_image_path = None floppy_type = None try: floppy_type = vm_hardware['vm_floppy']['type'] except KeyError: vsphere_client.disconnect() module.fail_json( msg="Error on %s definition. floppy type needs to be" " specified." % vm_hardware['vm_floppy']) if floppy_type == 'image': try: floppy_image_path = vm_hardware['vm_floppy']['image_path'] except KeyError: vsphere_client.disconnect() module.fail_json( msg="Error on %s definition. floppy image_path needs" " to be specified." % vm_hardware['vm_floppy']) # Add a floppy to the VM. add_floppy(module, vsphere_client, config_target, config, devices, default_devs, floppy_type, floppy_image_path) if vm_nic: for nic in sorted(vm_nic): try: nictype = vm_nic[nic]['type'] except KeyError: vsphere_client.disconnect() module.fail_json( msg="Error on %s definition. type needs to be " " specified." % nic) try: network = vm_nic[nic]['network'] except KeyError: vsphere_client.disconnect() module.fail_json( msg="Error on %s definition. network needs to be " " specified." % nic) try: network_type = vm_nic[nic]['network_type'] except KeyError: vsphere_client.disconnect() module.fail_json( msg="Error on %s definition. network_type needs to be " " specified." % nic) # Add the nic to the VM spec. add_nic(module, vsphere_client, nfmor, config, devices, nictype, network, network_type) config.set_element_deviceChange(devices) create_vm_request.set_element_config(config) folder_mor = create_vm_request.new__this(vmfmor) folder_mor.set_attribute_type(vmfmor.get_attribute_type()) create_vm_request.set_element__this(folder_mor) rp_mor = create_vm_request.new_pool(rpmor) rp_mor.set_attribute_type(rpmor.get_attribute_type()) create_vm_request.set_element_pool(rp_mor) host_mor = create_vm_request.new_host(hostmor) host_mor.set_attribute_type(hostmor.get_attribute_type()) create_vm_request.set_element_host(host_mor) # CREATE THE VM taskmor = vsphere_client._proxy.CreateVM_Task(create_vm_request)._returnval task = VITask(taskmor, vsphere_client) task.wait_for_state([task.STATE_SUCCESS, task.STATE_ERROR]) if task.get_state() == task.STATE_ERROR: vsphere_client.disconnect() module.fail_json(msg="Error creating vm: %s" % task.get_error_message()) else: # We always need to get the vm because we are going to gather facts vm = vsphere_client.get_vm_by_name(guest) # VM was created. If there is any extra config options specified, set # them here , disconnect from vcenter, then exit. if vm_extra_config: vm.set_extra_config(vm_extra_config) # Power on the VM if it was requested power_state(vm, state, True) vmfacts=gather_facts(vm) vsphere_client.disconnect() module.exit_json( ansible_facts=vmfacts, changed=True, changes="Created VM %s" % guest) def delete_vm(vsphere_client, module, guest, vm, force): try: if vm.is_powered_on(): if force: try: vm.power_off(sync_run=True) vm.get_status() except Exception as e: module.fail_json( msg='Failed to shutdown vm %s: %s' % (guest, to_native(e)), exception=traceback.format_exc()) else: module.fail_json( msg='You must use either shut the vm down first or ' 'use force ') # Invoke Destroy_Task request = VI.Destroy_TaskRequestMsg() _this = request.new__this(vm._mor) _this.set_attribute_type(vm._mor.get_attribute_type()) request.set_element__this(_this) ret = vsphere_client._proxy.Destroy_Task(request)._returnval task = VITask(ret, vsphere_client) # Wait for the task to finish status = task.wait_for_state( [task.STATE_SUCCESS, task.STATE_ERROR]) if status == task.STATE_ERROR: vsphere_client.disconnect() module.fail_json(msg="Error removing vm: %s %s" % task.get_error_message()) module.exit_json(changed=True, changes="VM %s deleted" % guest) except Exception as e: module.fail_json( msg='Failed to delete vm %s : %s' % (guest, to_native(e)), exception=traceback.format_exc()) def power_state(vm, state, force): """ Correctly set the power status for a VM determined by the current and requested states. force is forceful """ power_status = vm.get_status() check_status = ' '.join(state.split("_")).upper() # Need Force if not force and power_status in [ 'SUSPENDED', 'POWERING ON', 'RESETTING', 'BLOCKED ON MSG' ]: return "VM is in %s power state. Force is required!" % power_status # State is already true if power_status == check_status: return False else: try: if state == 'powered_off': vm.power_off(sync_run=True) elif state == 'powered_on': vm.power_on(sync_run=True) elif state == 'restarted': if power_status in ('POWERED ON', 'POWERING ON', 'RESETTING'): vm.reset(sync_run=False) else: return "Cannot restart VM in the current state %s" \ % power_status return True except Exception as e: return e return False def gather_facts(vm): """ Gather facts for VM directly from vsphere. """ vm.get_properties() facts = { 'module_hw': True, 'hw_name': vm.properties.name, 'hw_power_status': vm.get_status(), 'hw_guest_full_name': vm.properties.config.guestFullName, 'hw_guest_id': vm.properties.config.guestId, 'hw_product_uuid': vm.properties.config.uuid, 'hw_instance_uuid': vm.properties.config.instanceUuid, 'hw_processor_count': vm.properties.config.hardware.numCPU, 'hw_memtotal_mb': vm.properties.config.hardware.memoryMB, 'hw_interfaces':[], } netInfo = vm.get_property('net') netDict = {} if netInfo: for net in netInfo: netDict[net['mac_address']] = net['ip_addresses'] ifidx = 0 for entry in vm.properties.config.hardware.device: if not hasattr(entry, 'macAddress'): continue factname = 'hw_eth' + str(ifidx) facts[factname] = { 'addresstype': entry.addressType, 'label': entry.deviceInfo.label, 'macaddress': entry.macAddress, 'ipaddresses': netDict.get(entry.macAddress, None), 'macaddress_dash': entry.macAddress.replace(':', '-'), 'summary': entry.deviceInfo.summary, } facts['hw_interfaces'].append('eth'+str(ifidx)) ifidx += 1 return facts class DefaultVMConfig(object): """ Shallow and deep dict comparison for interfaces """ def __init__(self, check_dict, interface_dict): self.check_dict, self.interface_dict = check_dict, interface_dict self.set_current, self.set_past = set( check_dict.keys()), set(interface_dict.keys()) self.intersect = self.set_current.intersection(self.set_past) self.recursive_missing = None def shallow_diff(self): return self.set_past - self.intersect def recursive_diff(self): if not self.recursive_missing: self.recursive_missing = [] for key, value in self.interface_dict.items(): if isinstance(value, dict): for k, v in value.items(): if k in self.check_dict[key]: if not isinstance(self.check_dict[key][k], v): try: if v == int: self.check_dict[key][k] = int(self.check_dict[key][k]) elif v == string_types: self.check_dict[key][k] = to_native(self.check_dict[key][k], errors='surrogate_or_strict') else: raise ValueError except ValueError: self.recursive_missing.append((k, v)) else: self.recursive_missing.append((k, v)) return self.recursive_missing def config_check(name, passed, default, module): """ Checks that the dict passed for VM configuration matches the required interface declared at the top of __main__ """ diff = DefaultVMConfig(passed, default) if len(diff.shallow_diff()): module.fail_json( msg="Missing required key/pair [%s]. %s must contain %s" % (', '.join(diff.shallow_diff()), name, default)) if diff.recursive_diff(): module.fail_json( msg="Config mismatch for %s on %s" % (name, diff.recursive_diff())) return True def main(): vm = None proto_vm_hardware = { 'memory_mb': int, 'num_cpus': int, 'scsi': string_types, 'osid': string_types } proto_vm_disk = { 'disk1': { 'datastore': string_types, 'size_gb': int, 'type': string_types } } proto_vm_nic = { 'nic1': { 'type': string_types, 'network': string_types, 'network_type': string_types } } proto_esxi = { 'datacenter': string_types, 'hostname': string_types } module = AnsibleModule( argument_spec=dict( vcenter_hostname=dict( type='str', default=os.environ.get('VMWARE_HOST') ), username=dict( type='str', default=os.environ.get('VMWARE_USER') ), password=dict( type='str', no_log=True, default=os.environ.get('VMWARE_PASSWORD') ), state=dict( required=False, choices=[ 'powered_on', 'powered_off', 'present', 'absent', 'restarted', 'reconfigured' ], default='present'), vmware_guest_facts=dict(required=False, type='bool'), from_template=dict(required=False, type='bool'), template_src=dict(required=False, type='str'), snapshot_to_clone=dict(required=False, default=None, type='str'), guest=dict(required=True, type='str'), vm_disk=dict(required=False, type='dict', default={}), vm_nic=dict(required=False, type='dict', default={}), vm_hardware=dict(required=False, type='dict', default={}), vm_extra_config=dict(required=False, type='dict', default={}), vm_hw_version=dict(required=False, default=None, type='str'), resource_pool=dict(required=False, default=None, type='str'), cluster=dict(required=False, default=None, type='str'), force=dict(required=False, type='bool', default=False), esxi=dict(required=False, type='dict', default={}), validate_certs=dict(required=False, type='bool', default=True), power_on_after_clone=dict(required=False, type='bool', default=True) ), supports_check_mode=False, mutually_exclusive=[['state', 'vmware_guest_facts'],['state', 'from_template']], required_together=[ ['state', 'force'], [ 'state', 'vm_disk', 'vm_nic', 'vm_hardware', 'esxi' ], ['from_template', 'template_src'], ], ) if not HAS_PYSPHERE: module.fail_json(msg='pysphere module required') vcenter_hostname = module.params['vcenter_hostname'] username = module.params['username'] password = module.params['password'] vmware_guest_facts = module.params['vmware_guest_facts'] state = module.params['state'] guest = module.params['guest'] force = module.params['force'] vm_disk = module.params['vm_disk'] vm_nic = module.params['vm_nic'] vm_hardware = module.params['vm_hardware'] vm_extra_config = module.params['vm_extra_config'] vm_hw_version = module.params['vm_hw_version'] esxi = module.params['esxi'] resource_pool = module.params['resource_pool'] cluster = module.params['cluster'] template_src = module.params['template_src'] from_template = module.params['from_template'] snapshot_to_clone = module.params['snapshot_to_clone'] power_on_after_clone = module.params['power_on_after_clone'] validate_certs = module.params['validate_certs'] # CONNECT TO THE SERVER viserver = VIServer() if validate_certs and not hasattr(ssl, 'SSLContext') and not vcenter_hostname.startswith('http://'): module.fail_json(msg='pysphere does not support verifying certificates with python < 2.7.9. Either update python or set ' 'validate_certs=False on the task') try: viserver.connect(vcenter_hostname, username, password) except ssl.SSLError as sslerr: if '[SSL: CERTIFICATE_VERIFY_FAILED]' in sslerr.strerror: if not validate_certs: ssl._create_default_https_context ssl._create_default_https_context = ssl._create_unverified_context viserver.connect(vcenter_hostname, username, password) else: module.fail_json(msg='Unable to validate the certificate of the vcenter host %s' % vcenter_hostname) else: raise except VIApiException as err: module.fail_json(msg="Cannot connect to %s: %s" % (vcenter_hostname, to_native(err)), exception=traceback.format_exc()) # Check if the VM exists before continuing try: vm = viserver.get_vm_by_name(guest) except Exception: pass if vm: # Run for facts only if vmware_guest_facts: try: module.exit_json(ansible_facts=gather_facts(vm)) except Exception as e: module.fail_json(msg="Fact gather failed with exception %s" % to_native(e), exception=traceback.format_exc()) # Power Changes elif state in ['powered_on', 'powered_off', 'restarted']: state_result = power_state(vm, state, force) # Failure if isinstance(state_result, string_types): module.fail_json(msg=state_result) else: module.exit_json(changed=state_result) # Just check if there elif state == 'present': module.exit_json(changed=False) # Fail on reconfig without params elif state == 'reconfigured': reconfigure_vm( vsphere_client=viserver, vm=vm, module=module, esxi=esxi, resource_pool=resource_pool, cluster_name=cluster, guest=guest, vm_extra_config=vm_extra_config, vm_hardware=vm_hardware, vm_disk=vm_disk, vm_nic=vm_nic, state=state, force=force ) elif state == 'absent': delete_vm( vsphere_client=viserver, module=module, guest=guest, vm=vm, force=force) # VM doesn't exist else: # Fail for fact gather task if vmware_guest_facts: module.fail_json( msg="No such VM %s. Fact gathering requires an existing vm" % guest) elif from_template: deploy_template( vsphere_client=viserver, esxi=esxi, resource_pool=resource_pool, guest=guest, template_src=template_src, module=module, cluster_name=cluster, snapshot_to_clone=snapshot_to_clone, power_on_after_clone=power_on_after_clone, vm_extra_config=vm_extra_config ) if state in ['restarted', 'reconfigured']: module.fail_json( msg="No such VM %s. States [" "restarted, reconfigured] required an existing VM" % guest) elif state == 'absent': module.exit_json(changed=False, msg="vm %s not present" % guest) # check if user is trying to perform state operation on a vm which doesn't exists elif state in ['present', 'powered_off', 'powered_on'] and not all((vm_extra_config, vm_hardware, vm_disk, vm_nic, esxi)): module.exit_json(changed=False, msg="vm %s not present" % guest) # Create the VM elif state in ['present', 'powered_off', 'powered_on']: # Check the guest_config config_check("vm_disk", vm_disk, proto_vm_disk, module) config_check("vm_nic", vm_nic, proto_vm_nic, module) config_check("vm_hardware", vm_hardware, proto_vm_hardware, module) config_check("esxi", esxi, proto_esxi, module) create_vm( vsphere_client=viserver, module=module, esxi=esxi, resource_pool=resource_pool, cluster_name=cluster, guest=guest, vm_extra_config=vm_extra_config, vm_hardware=vm_hardware, vm_disk=vm_disk, vm_nic=vm_nic, vm_hw_version=vm_hw_version, state=state ) viserver.disconnect() module.exit_json( changed=False, vcenter=vcenter_hostname) if __name__ == '__main__': main()
gpl-3.0
Tao-Ma/incubator-hawq
src/bin/gpupgrade/setcatversion.py
9
2397
import subprocess releases = {"3.0": "200703112", "3.1": "200712072", "3.2": "200808253", "3.3": "200902041"} def release2catverno(rno): if not rno in releases.keys(): raise Exception("unknown version %s" % rno) return releases[rno] def stop_cluster(): p.subprocess.Popen(['gpstop', '-a'], shell=False, close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) result = p.communicate() if p.returncode != 0: raise Exception("could not stop cluster: " + result[0] + result[1]) def get_control_data(datadir): ''' Parse the output of pg_controldata run on data directory, returning catalog version and state ''' cmd = ['pg_controldata', datadir] p = subprocess.Popen(cmd, shell=False, close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) result = p.communicate() if p.returncode != 0: raise Exception("error running " + ' '.join(cmd) + ": " + result[0] + result[1]) out = result[0].strip() ver = "" state = "" for line in out.split('\n'): s = line.split(':') if s[0] == 'Catalog version number': ver = s[1].strip() elif s[0] == 'Database cluster state': state = s[1].strip() return [ver, state] def setcatversion(datadir, frm, to): ''' Set catalog version to 'to' from 'frm'. Check that the system is down and actually set to the previous version. ''' (ver, state) = get_control_data(datadir) frmcatverno = release2catverno(frm) if ver != frmcatverno: raise Exception("Expected version %s but found %s" % (frmcatverno, ver)) cmd = ['/Users/swm/greenplum-db-devel/bin/lib/gpmodcatversion', '--catversion', to, datadir] p = subprocess.Popen(cmd, shell=False, close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) result = p.communicate() if p.returncode != 0: raise Exception("could not update catalog to %s" % to) if __name__ == '__main__': paths = ['/Users/swm/greenplum-db-devel/upg/upgradetest-1', '/Users/swm/greenplum-db-devel/upg/upgradetest1', '/Users/swm/greenplum-db-devel/upg/upgradetest0'] for p in paths: setcatversion(p, '3.2', '3.3')
apache-2.0
evamwangi/bc-7-Todo_List
venv/Lib/site-packages/setuptools/command/install_egg_info.py
112
4035
from distutils import log, dir_util import os from setuptools.extern.six.moves import map from setuptools import Command from setuptools.archive_util import unpack_archive import pkg_resources class install_egg_info(Command): """Install an .egg-info directory for the package""" description = "Install an .egg-info directory for the package" user_options = [ ('install-dir=', 'd', "directory to install to"), ] def initialize_options(self): self.install_dir = None def finalize_options(self): self.set_undefined_options('install_lib', ('install_dir', 'install_dir')) ei_cmd = self.get_finalized_command("egg_info") basename = pkg_resources.Distribution( None, None, ei_cmd.egg_name, ei_cmd.egg_version ).egg_name() + '.egg-info' self.source = ei_cmd.egg_info self.target = os.path.join(self.install_dir, basename) self.outputs = [] def run(self): self.run_command('egg_info') if os.path.isdir(self.target) and not os.path.islink(self.target): dir_util.remove_tree(self.target, dry_run=self.dry_run) elif os.path.exists(self.target): self.execute(os.unlink, (self.target,), "Removing " + self.target) if not self.dry_run: pkg_resources.ensure_directory(self.target) self.execute( self.copytree, (), "Copying %s to %s" % (self.source, self.target) ) self.install_namespaces() def get_outputs(self): return self.outputs def copytree(self): # Copy the .egg-info tree to site-packages def skimmer(src, dst): # filter out source-control directories; note that 'src' is always # a '/'-separated path, regardless of platform. 'dst' is a # platform-specific path. for skip in '.svn/', 'CVS/': if src.startswith(skip) or '/' + skip in src: return None self.outputs.append(dst) log.debug("Copying %s to %s", src, dst) return dst unpack_archive(self.source, self.target, skimmer) def install_namespaces(self): nsp = self._get_all_ns_packages() if not nsp: return filename, ext = os.path.splitext(self.target) filename += '-nspkg.pth' self.outputs.append(filename) log.info("Installing %s", filename) lines = map(self._gen_nspkg_line, nsp) if self.dry_run: # always generate the lines, even in dry run list(lines) return with open(filename, 'wt') as f: f.writelines(lines) _nspkg_tmpl = ( "import sys, types, os", "p = os.path.join(sys._getframe(1).f_locals['sitedir'], *%(pth)r)", "ie = os.path.exists(os.path.join(p,'__init__.py'))", "m = not ie and " "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))", "mp = (m or []) and m.__dict__.setdefault('__path__',[])", "(p not in mp) and mp.append(p)", ) "lines for the namespace installer" _nspkg_tmpl_multi = ( 'm and setattr(sys.modules[%(parent)r], %(child)r, m)', ) "additional line(s) when a parent package is indicated" @classmethod def _gen_nspkg_line(cls, pkg): # ensure pkg is not a unicode string under Python 2.7 pkg = str(pkg) pth = tuple(pkg.split('.')) tmpl_lines = cls._nspkg_tmpl parent, sep, child = pkg.rpartition('.') if parent: tmpl_lines += cls._nspkg_tmpl_multi return ';'.join(tmpl_lines) % locals() + '\n' def _get_all_ns_packages(self): """Return sorted list of all package namespaces""" nsp = set() for pkg in self.distribution.namespace_packages or []: pkg = pkg.split('.') while pkg: nsp.add('.'.join(pkg)) pkg.pop() return sorted(nsp)
mit
zbqf109/goodo
openerp/report/render/html2html/html2html.py
49
3282
# -*- coding: utf-8 -*- # Part of Odoo. See LICENSE file for full copyright and licensing details. from openerp.report.render.rml2pdf import utils import copy import base64 import cStringIO import re from reportlab.lib.utils import ImageReader _regex = re.compile('\[\[(.+?)\]\]') utils._regex = re.compile('\[\[\s*(.+?)\s*\]\]',re.DOTALL) class html2html(object): def __init__(self, html, localcontext): self.localcontext = localcontext self.etree = html self._node = None def render(self): def process_text(node,new_node): if new_node.tag in ['story','tr','section']: new_node.attrib.clear() for child in utils._child_get(node, self): new_child = copy.deepcopy(child) new_node.append(new_child) if len(child): for n in new_child: new_child.text = utils._process_text(self, child.text) new_child.tail = utils._process_text(self, child.tail) new_child.remove(n) process_text(child, new_child) else: if new_child.tag=='img' and new_child.get('name'): if _regex.findall(new_child.get('name')) : src = utils._process_text(self, new_child.get('name')) if src : new_child.set('src','data:image/gif;base64,%s'%src) output = cStringIO.StringIO(base64.decodestring(src)) img = ImageReader(output) (width,height) = img.getSize() if not new_child.get('width'): new_child.set('width',str(width)) if not new_child.get('height') : new_child.set('height',str(height)) else : new_child.getparent().remove(new_child) new_child.text = utils._process_text(self, child.text) new_child.tail = utils._process_text(self, child.tail) self._node = copy.deepcopy(self.etree) for n in self._node: self._node.remove(n) process_text(self.etree, self._node) return self._node def url_modify(self,root): for n in root: if (n.text.find('<a ')>=0 or n.text.find('&lt;a')>=0) and n.text.find('href')>=0 and n.text.find('style')<=0 : node = (n.tag=='span' and n.getparent().tag=='u') and n.getparent().getparent() or ((n.tag=='span') and n.getparent()) or n style = node.get('color') and "style='color:%s; text-decoration: none;'"%node.get('color') or '' if n.text.find('&lt;a')>=0: t = '&lt;a ' else : t = '<a ' href = n.text.split(t)[-1] n.text = ' '.join([t,style,href]) self.url_modify(n) return root def parseString(node, localcontext = {}): r = html2html(node, localcontext) root = r.render() root = r.url_modify(root) return root
gpl-3.0
mats116/gae-boilerplate
bp_includes/external/requests/packages/urllib3/packages/ordered_dict.py
1093
8936
# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. # Passes Python2.7's test suite and incorporates all the latest updates. # Copyright 2009 Raymond Hettinger, released under the MIT License. # http://code.activestate.com/recipes/576693/ try: from thread import get_ident as _get_ident except ImportError: from dummy_thread import get_ident as _get_ident try: from _abcoll import KeysView, ValuesView, ItemsView except ImportError: pass class OrderedDict(dict): 'Dictionary that remembers insertion order' # An inherited dict maps keys to values. # The inherited dict provides __getitem__, __len__, __contains__, and get. # The remaining methods are order-aware. # Big-O running times for all methods are the same as for regular dictionaries. # The internal self.__map dictionary maps keys to links in a doubly linked list. # The circular doubly linked list starts and ends with a sentinel element. # The sentinel element never gets deleted (this simplifies the algorithm). # Each link is stored as a list of length three: [PREV, NEXT, KEY]. def __init__(self, *args, **kwds): '''Initialize an ordered dictionary. Signature is the same as for regular dictionaries, but keyword arguments are not recommended because their insertion order is arbitrary. ''' if len(args) > 1: raise TypeError('expected at most 1 arguments, got %d' % len(args)) try: self.__root except AttributeError: self.__root = root = [] # sentinel node root[:] = [root, root, None] self.__map = {} self.__update(*args, **kwds) def __setitem__(self, key, value, dict_setitem=dict.__setitem__): 'od.__setitem__(i, y) <==> od[i]=y' # Setting a new item creates a new link which goes at the end of the linked # list, and the inherited dictionary is updated with the new key/value pair. if key not in self: root = self.__root last = root[0] last[1] = root[0] = self.__map[key] = [last, root, key] dict_setitem(self, key, value) def __delitem__(self, key, dict_delitem=dict.__delitem__): 'od.__delitem__(y) <==> del od[y]' # Deleting an existing item uses self.__map to find the link which is # then removed by updating the links in the predecessor and successor nodes. dict_delitem(self, key) link_prev, link_next, key = self.__map.pop(key) link_prev[1] = link_next link_next[0] = link_prev def __iter__(self): 'od.__iter__() <==> iter(od)' root = self.__root curr = root[1] while curr is not root: yield curr[2] curr = curr[1] def __reversed__(self): 'od.__reversed__() <==> reversed(od)' root = self.__root curr = root[0] while curr is not root: yield curr[2] curr = curr[0] def clear(self): 'od.clear() -> None. Remove all items from od.' try: for node in self.__map.itervalues(): del node[:] root = self.__root root[:] = [root, root, None] self.__map.clear() except AttributeError: pass dict.clear(self) def popitem(self, last=True): '''od.popitem() -> (k, v), return and remove a (key, value) pair. Pairs are returned in LIFO order if last is true or FIFO order if false. ''' if not self: raise KeyError('dictionary is empty') root = self.__root if last: link = root[0] link_prev = link[0] link_prev[1] = root root[0] = link_prev else: link = root[1] link_next = link[1] root[1] = link_next link_next[0] = root key = link[2] del self.__map[key] value = dict.pop(self, key) return key, value # -- the following methods do not depend on the internal structure -- def keys(self): 'od.keys() -> list of keys in od' return list(self) def values(self): 'od.values() -> list of values in od' return [self[key] for key in self] def items(self): 'od.items() -> list of (key, value) pairs in od' return [(key, self[key]) for key in self] def iterkeys(self): 'od.iterkeys() -> an iterator over the keys in od' return iter(self) def itervalues(self): 'od.itervalues -> an iterator over the values in od' for k in self: yield self[k] def iteritems(self): 'od.iteritems -> an iterator over the (key, value) items in od' for k in self: yield (k, self[k]) def update(*args, **kwds): '''od.update(E, **F) -> None. Update od from dict/iterable E and F. If E is a dict instance, does: for k in E: od[k] = E[k] If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] Or if E is an iterable of items, does: for k, v in E: od[k] = v In either case, this is followed by: for k, v in F.items(): od[k] = v ''' if len(args) > 2: raise TypeError('update() takes at most 2 positional ' 'arguments (%d given)' % (len(args),)) elif not args: raise TypeError('update() takes at least 1 argument (0 given)') self = args[0] # Make progressively weaker assumptions about "other" other = () if len(args) == 2: other = args[1] if isinstance(other, dict): for key in other: self[key] = other[key] elif hasattr(other, 'keys'): for key in other.keys(): self[key] = other[key] else: for key, value in other: self[key] = value for key, value in kwds.items(): self[key] = value __update = update # let subclasses override update without breaking __init__ __marker = object() def pop(self, key, default=__marker): '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. ''' if key in self: result = self[key] del self[key] return result if default is self.__marker: raise KeyError(key) return default def setdefault(self, key, default=None): 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' if key in self: return self[key] self[key] = default return default def __repr__(self, _repr_running={}): 'od.__repr__() <==> repr(od)' call_key = id(self), _get_ident() if call_key in _repr_running: return '...' _repr_running[call_key] = 1 try: if not self: return '%s()' % (self.__class__.__name__,) return '%s(%r)' % (self.__class__.__name__, self.items()) finally: del _repr_running[call_key] def __reduce__(self): 'Return state information for pickling' items = [[k, self[k]] for k in self] inst_dict = vars(self).copy() for k in vars(OrderedDict()): inst_dict.pop(k, None) if inst_dict: return (self.__class__, (items,), inst_dict) return self.__class__, (items,) def copy(self): 'od.copy() -> a shallow copy of od' return self.__class__(self) @classmethod def fromkeys(cls, iterable, value=None): '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S and values equal to v (which defaults to None). ''' d = cls() for key in iterable: d[key] = value return d def __eq__(self, other): '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive while comparison to a regular mapping is order-insensitive. ''' if isinstance(other, OrderedDict): return len(self)==len(other) and self.items() == other.items() return dict.__eq__(self, other) def __ne__(self, other): return not self == other # -- the following methods are only used in Python 2.7 -- def viewkeys(self): "od.viewkeys() -> a set-like object providing a view on od's keys" return KeysView(self) def viewvalues(self): "od.viewvalues() -> an object providing a view on od's values" return ValuesView(self) def viewitems(self): "od.viewitems() -> a set-like object providing a view on od's items" return ItemsView(self)
lgpl-3.0
kevin-coder/tensorflow-fork
tensorflow/contrib/learn/python/learn/graph_actions_test.py
23
26409
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Graph actions tests.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import shutil import tempfile from tensorflow.contrib import testing from tensorflow.contrib.framework.python.framework import checkpoint_utils from tensorflow.contrib.framework.python.ops import variables as variables_lib from tensorflow.contrib.learn.python import learn from tensorflow.contrib.learn.python.learn.monitors import BaseMonitor from tensorflow.python.framework import constant_op from tensorflow.python.framework import ops from tensorflow.python.framework import test_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import resources from tensorflow.python.ops import state_ops from tensorflow.python.ops import variables from tensorflow.python.platform import test from tensorflow.python.summary import summary from tensorflow.python.training import checkpoint_management from tensorflow.python.training import saver as saver_lib class _Feeder(object): """Simple generator for `feed_fn`, returning 10 * step.""" def __init__(self, tensor, max_step): self._step = 0 self._tensor = tensor self._max_step = max_step @property def step(self): return self._step def feed_fn(self): if self._step >= self._max_step: raise StopIteration value = self._step * 10.0 self._step += 1 return {self._tensor: value} class _BaseMonitorWrapper(BaseMonitor): """Base monitor wrapper to facilitate testing. This monitor can act as either chief-exclusive or non-exclusive. """ def __init__(self, run_on_all_workers): super(_BaseMonitorWrapper, self).__init__() self._run_on_all_workers = run_on_all_workers self._is_active = False self._has_step = False @property def run_on_all_workers(self): return self._run_on_all_workers @property def is_active(self): return self._is_active @property def has_step(self): return self._has_step def begin(self, max_steps=None): self._is_active = True return super(_BaseMonitorWrapper, self).begin(max_steps) def step_begin(self, step): self._has_step = True return super(_BaseMonitorWrapper, self).step_begin(step) class GraphActionsTest(test.TestCase): """Graph actions tests.""" def setUp(self): learn.graph_actions.clear_summary_writers() self._output_dir = tempfile.mkdtemp() testing.FakeSummaryWriter.install() def tearDown(self): testing.FakeSummaryWriter.uninstall() if self._output_dir: shutil.rmtree(self._output_dir) learn.graph_actions.clear_summary_writers() def _assert_summaries(self, output_dir, writer, expected_summaries=None, expected_graphs=None, expected_meta_graphs=None, expected_session_logs=None): self.assertTrue(isinstance(writer, testing.FakeSummaryWriter)) writer.assert_summaries( self, expected_logdir=output_dir, expected_graph=ops.get_default_graph(), expected_summaries=expected_summaries, expected_added_graphs=expected_graphs, expected_added_meta_graphs=expected_meta_graphs, expected_session_logs=expected_session_logs) # TODO(ptucker): Test number and contents of checkpoint files. def _assert_ckpt(self, output_dir, expected=True): ckpt_state = checkpoint_management.get_checkpoint_state(output_dir) if expected: pattern = '%s/model.ckpt-.*' % output_dir primary_ckpt_path = ckpt_state.model_checkpoint_path self.assertRegexpMatches(primary_ckpt_path, pattern) all_ckpt_paths = ckpt_state.all_model_checkpoint_paths self.assertTrue(primary_ckpt_path in all_ckpt_paths) for ckpt_path in all_ckpt_paths: self.assertRegexpMatches(ckpt_path, pattern) else: self.assertTrue(ckpt_state is None) # TODO(ptucker): Test lock, multi-threaded access? def test_summary_writer(self): writer = learn.graph_actions.get_summary_writer('log/dir/0') self._assert_summaries('log/dir/0', writer) self.assertTrue( learn.graph_actions.get_summary_writer('log/dir/0') is learn.graph_actions.get_summary_writer('log/dir/0')) self.assertTrue( learn.graph_actions.get_summary_writer('log/dir/0') is not learn.graph_actions.get_summary_writer('log/dir/1')) # TODO(ptucker): Test restore_checkpoint_path for eval; this should obsolete # test_evaluate_with_saver(). # TODO(ptucker): Test start_queue_runners for both eval & train. # TODO(ptucker): Test coord.request_stop & coord.join for eval. def _build_inference_graph(self): """Build simple inference graph. This includes a regular variable, local variable, and fake table. Returns: Tuple of 3 `Tensor` objects, 2 input and 1 output. """ variables_lib.create_global_step() in0 = variables.VariableV1(1.0) in1 = variables_lib.local_variable(2.0) fake_table = variables.VariableV1( 3.0, trainable=False, collections=['fake_tables'], name='fake_table_var') in0.graph.add_to_collections([ops.GraphKeys.TABLE_INITIALIZERS], fake_table.initializer) out = in0 + in1 + fake_table return in0, in1, out def test_infer(self): with ops.Graph().as_default() as g, self.session(g): self._assert_ckpt(self._output_dir, False) in0, in1, out = self._build_inference_graph() self.assertEqual({ 'a': 1.0, 'b': 2.0, 'c': 6.0 }, learn.graph_actions.infer(None, {'a': in0, 'b': in1, 'c': out})) self._assert_ckpt(self._output_dir, False) @test.mock.patch.object( learn.graph_actions.coordinator.Coordinator, 'request_stop', side_effect=learn.graph_actions.coordinator.Coordinator.request_stop, autospec=True) def test_coordinator_request_stop_called(self, request_stop): with ops.Graph().as_default() as g, self.session(g): in0, in1, out = self._build_inference_graph() learn.graph_actions.infer(None, {'a': in0, 'b': in1, 'c': out}) self.assertTrue(request_stop.called) @test.mock.patch.object( learn.graph_actions.coordinator.Coordinator, 'request_stop', side_effect=learn.graph_actions.coordinator.Coordinator.request_stop, autospec=True) def test_run_feeds_iter_cleanup_with_exceptions(self, request_stop): with ops.Graph().as_default() as g, self.session(g): in0, in1, out = self._build_inference_graph() try: for _ in learn.graph_actions.run_feeds_iter({ 'a': in0, 'b': in1, 'c': out }, [None] * 3): self.assertFalse(request_stop.called) raise ValueError('Fake exception') except ValueError: pass self.assertTrue(request_stop.called) def test_run_feeds_iter_calls_resources_init(self): with ops.Graph().as_default(): in0, _, _ = self._build_inference_graph() handle = test_ops.stub_resource_handle_op(container='a', shared_name='b') resources.register_resource( handle=handle, create_op=test_ops.resource_create_op(handle), is_initialized_op=test_ops.resource_initialized_op(handle)) for _ in learn.graph_actions.run_feeds_iter( { 'in0': in0 }, feed_dicts=[{}]): self.assertTrue(test_ops.resource_initialized_op(handle).eval()) def test_infer_different_default_graph(self): with self.cached_session(): self._assert_ckpt(self._output_dir, False) with ops.Graph().as_default(): in0, in1, out = self._build_inference_graph() with ops.Graph().as_default(): self.assertEqual({ 'a': 1.0, 'b': 2.0, 'c': 6.0 }, learn.graph_actions.infer(None, {'a': in0, 'b': in1, 'c': out})) self._assert_ckpt(self._output_dir, False) def test_infer_invalid_feed(self): with ops.Graph().as_default() as g, self.session(g): self._assert_ckpt(self._output_dir, False) in0, _, _ = self._build_inference_graph() with self.assertRaisesRegexp(TypeError, 'Can not convert a NoneType'): learn.graph_actions.infer(None, {'a': in0}, feed_dict={None: 4.0}) self._assert_ckpt(self._output_dir, False) def test_infer_feed(self): with ops.Graph().as_default() as g, self.session(g): self._assert_ckpt(self._output_dir, False) in0, _, out = self._build_inference_graph() self.assertEqual( { 'c': 9.0 }, learn.graph_actions.infer( None, {'c': out}, feed_dict={in0: 4.0})) self._assert_ckpt(self._output_dir, False) # TODO(ptucker): Test eval for 1 epoch. def test_evaluate_invalid_args(self): with ops.Graph().as_default() as g, self.session(g): self._assert_ckpt(self._output_dir, False) with self.assertRaisesRegexp(ValueError, 'utput directory'): learn.graph_actions.evaluate( g, output_dir=None, checkpoint_path=None, eval_dict={'a': constant_op.constant(1.0)}) with self.assertRaisesRegexp(ValueError, 'utput directory'): learn.graph_actions.evaluate( g, output_dir='', checkpoint_path=None, eval_dict={'a': constant_op.constant(1.0)}) self._assert_ckpt(self._output_dir, False) def test_evaluate(self): with ops.Graph().as_default() as g, self.session(g): _, _, out = self._build_inference_graph() writer = learn.graph_actions.get_summary_writer(self._output_dir) self._assert_summaries(self._output_dir, writer, expected_session_logs=[]) self._assert_ckpt(self._output_dir, False) results = learn.graph_actions.evaluate( g, output_dir=self._output_dir, checkpoint_path=None, eval_dict={'a': out}, max_steps=1) self.assertEqual(({'a': 6.0}, 0), results) self._assert_summaries( self._output_dir, writer, expected_summaries={0: { 'a': 6.0 }}, expected_session_logs=[]) self._assert_ckpt(self._output_dir, False) def test_evaluate_ready_for_local_init(self): with ops.Graph().as_default() as g, self.session(g): variables_lib.create_global_step() v = variables.VariableV1(1.0) variables.VariableV1( v + 1, collections=[ops.GraphKeys.LOCAL_VARIABLES], trainable=False) ready_for_local_init_op = variables.report_uninitialized_variables( variables.global_variables()) ops.add_to_collection(ops.GraphKeys.READY_FOR_LOCAL_INIT_OP, ready_for_local_init_op) _ = learn.graph_actions.evaluate( g, output_dir=self._output_dir, checkpoint_path=None, eval_dict={'a': v}, max_steps=1) def test_evaluate_feed_fn(self): with ops.Graph().as_default() as g, self.session(g): in0, _, out = self._build_inference_graph() writer = learn.graph_actions.get_summary_writer(self._output_dir) self._assert_summaries(self._output_dir, writer, expected_session_logs=[]) self._assert_ckpt(self._output_dir, False) feeder = _Feeder(in0, 3) results = learn.graph_actions.evaluate( g, output_dir=self._output_dir, checkpoint_path=None, eval_dict={'a': out}, feed_fn=feeder.feed_fn, max_steps=3) self.assertEqual(3, feeder.step) self.assertEqual(({'a': 25.0}, 0), results) self._assert_summaries( self._output_dir, writer, expected_summaries={0: { 'a': 25.0 }}, expected_session_logs=[]) self._assert_ckpt(self._output_dir, False) def test_evaluate_feed_fn_with_exhaustion(self): with ops.Graph().as_default() as g, self.session(g): in0, _, out = self._build_inference_graph() writer = learn.graph_actions.get_summary_writer(self._output_dir) self._assert_summaries(self._output_dir, writer, expected_session_logs=[]) feeder = _Feeder(in0, 2) results = learn.graph_actions.evaluate( g, output_dir=self._output_dir, checkpoint_path=None, eval_dict={'a': out}, feed_fn=feeder.feed_fn, max_steps=3) self.assertEqual(2, feeder.step) self.assertEqual(({'a': 15.0}, 0), results) self._assert_summaries( self._output_dir, writer, expected_summaries={0: { 'a': 15.0 }}, expected_session_logs=[]) def test_evaluate_with_saver(self): with ops.Graph().as_default() as g, self.session(g): _, _, out = self._build_inference_graph() ops.add_to_collection(ops.GraphKeys.SAVERS, saver_lib.Saver()) writer = learn.graph_actions.get_summary_writer(self._output_dir) self._assert_summaries(self._output_dir, writer, expected_session_logs=[]) results = learn.graph_actions.evaluate( g, output_dir=self._output_dir, checkpoint_path=None, eval_dict={'a': out}, max_steps=1) self.assertEqual(({'a': 6.0}, 0), results) self._assert_summaries( self._output_dir, writer, expected_summaries={0: { 'a': 6.0 }}, expected_session_logs=[]) # TODO(ptucker): Resume training from previous ckpt. # TODO(ptucker): !supervisor_is_chief # TODO(ptucker): Custom init op for training. # TODO(ptucker): Mock supervisor, and assert all interactions. # TODO(ispir): remove following tests after deprecated train. class GraphActionsTrainTest(test.TestCase): """Tests for train.""" def setUp(self): learn.graph_actions.clear_summary_writers() self._output_dir = tempfile.mkdtemp() testing.FakeSummaryWriter.install() def tearDown(self): testing.FakeSummaryWriter.uninstall() if self._output_dir: shutil.rmtree(self._output_dir) learn.graph_actions.clear_summary_writers() def _assert_summaries(self, output_dir, expected_summaries=None, expected_graphs=None, expected_meta_graphs=None, expected_session_logs=None): writer = learn.graph_actions.get_summary_writer(output_dir) self.assertTrue(isinstance(writer, testing.FakeSummaryWriter)) writer.assert_summaries( self, expected_logdir=output_dir, expected_graph=ops.get_default_graph(), expected_summaries=expected_summaries, expected_added_graphs=expected_graphs, expected_added_meta_graphs=expected_meta_graphs, expected_session_logs=expected_session_logs) # TODO(ptucker): Test number and contents of checkpoint files. def _assert_ckpt(self, output_dir, expected=True): ckpt_state = checkpoint_management.get_checkpoint_state(output_dir) if expected: pattern = '%s/model.ckpt-.*' % output_dir primary_ckpt_path = ckpt_state.model_checkpoint_path self.assertRegexpMatches(primary_ckpt_path, pattern) all_ckpt_paths = ckpt_state.all_model_checkpoint_paths self.assertTrue(primary_ckpt_path in all_ckpt_paths) for ckpt_path in all_ckpt_paths: self.assertRegexpMatches(ckpt_path, pattern) else: self.assertTrue(ckpt_state is None) def _build_inference_graph(self): """Build simple inference graph. This includes a regular variable, local variable, and fake table. Returns: Tuple of 3 `Tensor` objects, 2 input and 1 output. """ variables_lib.create_global_step() in0 = variables.VariableV1(1.0) in1 = variables_lib.local_variable(2.0) fake_table = variables.VariableV1( 3.0, trainable=False, collections=['fake_tables'], name='fake_table_var') in0.graph.add_to_collections([ops.GraphKeys.TABLE_INITIALIZERS], fake_table.initializer) out = in0 + in1 + fake_table return in0, in1, out def test_train_invalid_args(self): with ops.Graph().as_default() as g, self.session(g): train_op = constant_op.constant(1.0) loss_op = constant_op.constant(2.0) with self.assertRaisesRegexp(ValueError, 'utput directory'): learn.graph_actions.train( g, output_dir=None, train_op=train_op, loss_op=loss_op) with self.assertRaisesRegexp(ValueError, 'utput directory'): learn.graph_actions.train( g, output_dir='', train_op=constant_op.constant(1.0), loss_op=constant_op.constant(2.0)) with self.assertRaisesRegexp(ValueError, 'train_op'): learn.graph_actions.train( g, output_dir=self._output_dir, train_op=None, loss_op=loss_op) with self.assertRaisesRegexp(ValueError, 'loss_op'): learn.graph_actions.train( g, output_dir=self._output_dir, train_op=constant_op.constant(1.0), loss_op=None) with self.assertRaisesRegexp(ValueError, 'global_step'): learn.graph_actions.train( g, output_dir=self._output_dir, train_op=constant_op.constant(1.0), loss_op=loss_op) # TODO(ptucker): Resume training from previous ckpt. # TODO(ptucker): !supervisor_is_chief # TODO(ptucker): Custom init op for training. # TODO(ptucker): Mock supervisor, and assert all interactions. def test_train(self): with ops.Graph().as_default() as g, self.session(g): with ops.control_dependencies(self._build_inference_graph()): train_op = state_ops.assign_add(variables_lib.get_global_step(), 1) self._assert_summaries(self._output_dir) self._assert_ckpt(self._output_dir, False) loss = learn.graph_actions.train( g, output_dir=self._output_dir, train_op=train_op, loss_op=constant_op.constant(2.0), steps=1) # TODO(ebrevdo,ptucker,ispir): this meta_graph_def lacks the # SaverDef, so we can't add it to the summary assertion test below. # meta_graph_def = meta_graph.create_meta_graph_def() self.assertEqual(2.0, loss) self._assert_summaries(self._output_dir, expected_graphs=[g]) self._assert_ckpt(self._output_dir, True) def test_train_steps_is_incremental(self): with ops.Graph().as_default() as g, self.session(g): with ops.control_dependencies(self._build_inference_graph()): train_op = state_ops.assign_add(variables_lib.get_global_step(), 1) learn.graph_actions.train( g, output_dir=self._output_dir, train_op=train_op, loss_op=constant_op.constant(2.0), steps=10) step = checkpoint_utils.load_variable( self._output_dir, variables_lib.get_global_step().name) self.assertEqual(10, step) with ops.Graph().as_default() as g, self.session(g): with ops.control_dependencies(self._build_inference_graph()): train_op = state_ops.assign_add(variables_lib.get_global_step(), 1) learn.graph_actions.train( g, output_dir=self._output_dir, train_op=train_op, loss_op=constant_op.constant(2.0), steps=15) step = checkpoint_utils.load_variable( self._output_dir, variables_lib.get_global_step().name) self.assertEqual(25, step) def test_train_max_steps_is_not_incremental(self): with ops.Graph().as_default() as g, self.session(g): with ops.control_dependencies(self._build_inference_graph()): train_op = state_ops.assign_add(variables_lib.get_global_step(), 1) learn.graph_actions.train( g, output_dir=self._output_dir, train_op=train_op, loss_op=constant_op.constant(2.0), max_steps=10) step = checkpoint_utils.load_variable( self._output_dir, variables_lib.get_global_step().name) self.assertEqual(10, step) with ops.Graph().as_default() as g, self.session(g): with ops.control_dependencies(self._build_inference_graph()): train_op = state_ops.assign_add(variables_lib.get_global_step(), 1) learn.graph_actions.train( g, output_dir=self._output_dir, train_op=train_op, loss_op=constant_op.constant(2.0), max_steps=15) step = checkpoint_utils.load_variable( self._output_dir, variables_lib.get_global_step().name) self.assertEqual(15, step) def test_train_loss(self): with ops.Graph().as_default() as g, self.session(g): variables_lib.create_global_step() loss_var = variables_lib.local_variable(10.0) train_op = control_flow_ops.group( state_ops.assign_add(variables_lib.get_global_step(), 1), state_ops.assign_add(loss_var, -1.0)) self._assert_summaries(self._output_dir) self._assert_ckpt(self._output_dir, False) loss = learn.graph_actions.train( g, output_dir=self._output_dir, train_op=train_op, loss_op=loss_var.value(), steps=6) # TODO(ebrevdo,ptucker,ispir): this meta_graph_def lacks the # SaverDef, so we can't add it to the summary assertion test below. # meta_graph_def = meta_graph.create_meta_graph_def() self.assertEqual(4.0, loss) self._assert_summaries(self._output_dir, expected_graphs=[g]) self._assert_ckpt(self._output_dir, True) def test_train_summaries(self): with ops.Graph().as_default() as g, self.session(g): with ops.control_dependencies(self._build_inference_graph()): train_op = state_ops.assign_add(variables_lib.get_global_step(), 1) loss_op = constant_op.constant(2.0) summary.scalar('loss', loss_op) self._assert_summaries(self._output_dir) self._assert_ckpt(self._output_dir, False) loss = learn.graph_actions.train( g, output_dir=self._output_dir, train_op=train_op, loss_op=loss_op, steps=1) # TODO(ebrevdo,ptucker,ispir): this meta_graph_def lacks the # SaverDef, so we can't add it to the summary assertion test below. # meta_graph_def = meta_graph.create_meta_graph_def() self.assertEqual(2.0, loss) self._assert_summaries( self._output_dir, expected_graphs=[g], expected_summaries={1: { 'loss': 2.0 }}) self._assert_ckpt(self._output_dir, True) def test_train_chief_monitor(self): with ops.Graph().as_default() as g, self.session(g): with ops.control_dependencies(self._build_inference_graph()): train_op = state_ops.assign_add(variables_lib.get_global_step(), 1) loss_op = constant_op.constant(2.0) summary.scalar('loss', loss_op) chief_exclusive_monitor = _BaseMonitorWrapper(False) all_workers_monitor = _BaseMonitorWrapper(True) loss = learn.graph_actions.train( g, output_dir=self._output_dir, train_op=train_op, loss_op=loss_op, supervisor_is_chief=True, steps=1, monitors=[chief_exclusive_monitor, all_workers_monitor]) self.assertEqual(2.0, loss) self.assertTrue(chief_exclusive_monitor.is_active and all_workers_monitor.is_active, 'All monitors must have been active.') self.assertTrue(chief_exclusive_monitor.has_step and all_workers_monitor.has_step, 'All monitors must have a step.') def test_train_worker_monitor(self): # We need to explicitly set device due to check on non-chief workers # requiring all variables to have a device assigned. with ops.Graph().as_default() as g, g.device('/cpu:0'): global_step = variables_lib.create_global_step(g) train_op = state_ops.assign_add(global_step, 1) loss_op = constant_op.constant(2.0) summary.scalar('loss', loss_op) # Add explicit "local" init op to initialize all variables # as there's no chief to init here. init_op = variables.global_variables_initializer() ops.add_to_collection(ops.GraphKeys.LOCAL_INIT_OP, init_op) # Create worker monitors where one should be active on the worker # and the other chief exclusive. chief_exclusive_monitor = _BaseMonitorWrapper(False) all_workers_monitor = _BaseMonitorWrapper(True) with self.session(g): loss = learn.graph_actions.train( g, output_dir=self._output_dir, global_step_tensor=global_step, train_op=train_op, loss_op=loss_op, supervisor_is_chief=False, steps=1, monitors=[chief_exclusive_monitor, all_workers_monitor]) self.assertEqual(2.0, loss) self.assertTrue(not chief_exclusive_monitor.is_active and all_workers_monitor.is_active, 'Only non-chief runnable monitor must have been active.') self.assertTrue(not chief_exclusive_monitor.has_step and all_workers_monitor.has_step, 'Only non-chief runnable monitor must have a step.') if __name__ == '__main__': test.main()
apache-2.0
mlperf/inference_results_v0.7
closed/QCT/code/rnnt/tensorrt/calibrator.py
12
4744
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import tensorrt as trt import os import numpy as np import pycuda.driver as cuda import pycuda.autoinit from tqdm import tqdm class RNNTCalibrator(trt.IInt8MinMaxCalibrator): def __init__(self, batch_size, max_batches, force, cache_path, data_map, data_dir, data_type): # Whenever you specify a custom constructor for a TensorRT class, # you MUST call the constructor of the parent explicitly. trt.IInt8MinMaxCalibrator.__init__(self) self.batch_size = batch_size self.max_batches = max_batches self.force = force self.cache_path = cache_path if data_type == 'fp32': self.ITEM_DTYPE = np.float32 elif data_type == 'fp16': self.ITEM_DTYPE = np.float16 elif data_type == 'int8': self.ITEM_DTYPE = np.int8 else: raise NotImplementedError(f"Data type {data_type} not recognized for calibration") ITEMSIZE_BYTES = self.ITEM_DTYPE(0).itemsize with open(data_map) as f: # Assumes calibration data is just lines of filenames (no extension) data_paths = [os.path.join(data_dir, fn.strip() + ".npy") for fn in f.readlines()] if max_batches * batch_size < len(data_paths): n_samples = max_batches*batch_size else: print(f"Requested {max_batches * batch_size} samples for calibration, but only {len(data_paths)} are in the dataset. Calibrating with {len(data_paths)} samples instead") n_samples = len(data_paths) lens = [] samples = [] for path in tqdm(data_paths[:n_samples]): sample = np.load(path) samples.append(np.load(path)) # Probably very inefficient way to get sequence lengths for each entry lens.append(np.nonzero(sample)[0][-1]) # We should ensure that we've loaded in data that we expect as input, and warn otherwise if samples[0].dtype != self.ITEM_DTYPE: print(f"Warning: converting input data of type {samples[0].dtype} to {self.ITEM_DTYPE}. This may result in loss of calibration accuracy and increased calibration time") # Partitions an array into sub-arrays, each of batch_size length, with the last entry being remainder-sized partition = lambda ar: np.array_split(ar, np.arange(len(ar))[batch_size::batch_size]) self.batches = partition(samples) self.batch_lens = partition(lens) # Type conversion happens later SEQ_LEN, BINS = self.batches[0].shape[1:3] # Not sure if "BINS" is the name I'm looking for (how many floats are used for each time step) self.device_input = cuda.mem_alloc(self.batch_size * SEQ_LEN * BINS * ITEMSIZE_BYTES) INT32_SIZE = np.int32(0).itemsize self.device_length = cuda.mem_alloc(self.batch_size * INT32_SIZE) self.current_idx = 0 # If there's a cache, use it instead of calibrating if not self.force and os.path.exists(self.cache_path): with open(self.cache_path, 'rb') as f: self.cache = f.read() else: self.cache = None def get_batch(self, names): if self.current_idx < len(self.batches): npbatch = np.ascontiguousarray(self.batches[self.current_idx], dtype=self.ITEM_DTYPE) cuda.memcpy_htod(self.device_input, npbatch) npseqlen = np.ascontiguousarray(self.batch_lens[self.current_idx], dtype=np.int32) cuda.memcpy_htod(self.device_length, npseqlen) self.current_idx += 1 return [int(self.device_input), int(self.device_length)] else: return None def get_batch_size(self): return self.batch_size def read_calibration_cache(self): return self.cache def write_calibration_cache(self, cache): with open(self.cache_path, 'wb') as f: f.write(cache) self.cache = cache def clear_cache(self): self.cache = None def __del__(self): self.device_input.free() self.device_length.free()
apache-2.0
bdoner/SickRage
lib/simplejson/encoder.py
343
16033
"""Implementation of JSONEncoder """ import re try: from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii except ImportError: c_encode_basestring_ascii = None try: from simplejson._speedups import make_encoder as c_make_encoder except ImportError: c_make_encoder = None ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]') ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])') HAS_UTF8 = re.compile(r'[\x80-\xff]') ESCAPE_DCT = { '\\': '\\\\', '"': '\\"', '\b': '\\b', '\f': '\\f', '\n': '\\n', '\r': '\\r', '\t': '\\t', } for i in range(0x20): #ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i)) ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,)) # Assume this produces an infinity on all machines (probably not guaranteed) INFINITY = float('1e66666') FLOAT_REPR = repr def encode_basestring(s): """Return a JSON representation of a Python string """ def replace(match): return ESCAPE_DCT[match.group(0)] return '"' + ESCAPE.sub(replace, s) + '"' def py_encode_basestring_ascii(s): """Return an ASCII-only JSON representation of a Python string """ if isinstance(s, str) and HAS_UTF8.search(s) is not None: s = s.decode('utf-8') def replace(match): s = match.group(0) try: return ESCAPE_DCT[s] except KeyError: n = ord(s) if n < 0x10000: #return '\\u{0:04x}'.format(n) return '\\u%04x' % (n,) else: # surrogate pair n -= 0x10000 s1 = 0xd800 | ((n >> 10) & 0x3ff) s2 = 0xdc00 | (n & 0x3ff) #return '\\u{0:04x}\\u{1:04x}'.format(s1, s2) return '\\u%04x\\u%04x' % (s1, s2) return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"' encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii class JSONEncoder(object): """Extensible JSON <http://json.org> encoder for Python data structures. Supports the following objects and types by default: +-------------------+---------------+ | Python | JSON | +===================+===============+ | dict | object | +-------------------+---------------+ | list, tuple | array | +-------------------+---------------+ | str, unicode | string | +-------------------+---------------+ | int, long, float | number | +-------------------+---------------+ | True | true | +-------------------+---------------+ | False | false | +-------------------+---------------+ | None | null | +-------------------+---------------+ To extend this to recognize other objects, subclass and implement a ``.default()`` method with another method that returns a serializable object for ``o`` if possible, otherwise it should call the superclass implementation (to raise ``TypeError``). """ item_separator = ', ' key_separator = ': ' def __init__(self, skipkeys=False, ensure_ascii=True, check_circular=True, allow_nan=True, sort_keys=False, indent=None, separators=None, encoding='utf-8', default=None): """Constructor for JSONEncoder, with sensible defaults. If skipkeys is false, then it is a TypeError to attempt encoding of keys that are not str, int, long, float or None. If skipkeys is True, such items are simply skipped. If ensure_ascii is true, the output is guaranteed to be str objects with all incoming unicode characters escaped. If ensure_ascii is false, the output will be unicode object. If check_circular is true, then lists, dicts, and custom encoded objects will be checked for circular references during encoding to prevent an infinite recursion (which would cause an OverflowError). Otherwise, no such check takes place. If allow_nan is true, then NaN, Infinity, and -Infinity will be encoded as such. This behavior is not JSON specification compliant, but is consistent with most JavaScript based encoders and decoders. Otherwise, it will be a ValueError to encode such floats. If sort_keys is true, then the output of dictionaries will be sorted by key; this is useful for regression tests to ensure that JSON serializations can be compared on a day-to-day basis. If indent is a non-negative integer, then JSON array elements and object members will be pretty-printed with that indent level. An indent level of 0 will only insert newlines. None is the most compact representation. If specified, separators should be a (item_separator, key_separator) tuple. The default is (', ', ': '). To get the most compact JSON representation you should specify (',', ':') to eliminate whitespace. If specified, default is a function that gets called for objects that can't otherwise be serialized. It should return a JSON encodable version of the object or raise a ``TypeError``. If encoding is not None, then all input strings will be transformed into unicode using that encoding prior to JSON-encoding. The default is UTF-8. """ self.skipkeys = skipkeys self.ensure_ascii = ensure_ascii self.check_circular = check_circular self.allow_nan = allow_nan self.sort_keys = sort_keys self.indent = indent if separators is not None: self.item_separator, self.key_separator = separators if default is not None: self.default = default self.encoding = encoding def default(self, o): """Implement this method in a subclass such that it returns a serializable object for ``o``, or calls the base implementation (to raise a ``TypeError``). For example, to support arbitrary iterators, you could implement default like this:: def default(self, o): try: iterable = iter(o) except TypeError: pass else: return list(iterable) return JSONEncoder.default(self, o) """ raise TypeError(repr(o) + " is not JSON serializable") def encode(self, o): """Return a JSON string representation of a Python data structure. >>> JSONEncoder().encode({"foo": ["bar", "baz"]}) '{"foo": ["bar", "baz"]}' """ # This is for extremely simple cases and benchmarks. if isinstance(o, basestring): if isinstance(o, str): _encoding = self.encoding if (_encoding is not None and not (_encoding == 'utf-8')): o = o.decode(_encoding) if self.ensure_ascii: return encode_basestring_ascii(o) else: return encode_basestring(o) # This doesn't pass the iterator directly to ''.join() because the # exceptions aren't as detailed. The list call should be roughly # equivalent to the PySequence_Fast that ''.join() would do. chunks = self.iterencode(o, _one_shot=True) if not isinstance(chunks, (list, tuple)): chunks = list(chunks) return ''.join(chunks) def iterencode(self, o, _one_shot=False): """Encode the given object and yield each string representation as available. For example:: for chunk in JSONEncoder().iterencode(bigobject): mysocket.write(chunk) """ if self.check_circular: markers = {} else: markers = None if self.ensure_ascii: _encoder = encode_basestring_ascii else: _encoder = encode_basestring if self.encoding != 'utf-8': def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding): if isinstance(o, str): o = o.decode(_encoding) return _orig_encoder(o) def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY): # Check for specials. Note that this type of test is processor- and/or # platform-specific, so do tests which don't depend on the internals. if o != o: text = 'NaN' elif o == _inf: text = 'Infinity' elif o == _neginf: text = '-Infinity' else: return _repr(o) if not allow_nan: raise ValueError( "Out of range float values are not JSON compliant: " + repr(o)) return text if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys: _iterencode = c_make_encoder( markers, self.default, _encoder, self.indent, self.key_separator, self.item_separator, self.sort_keys, self.skipkeys, self.allow_nan) else: _iterencode = _make_iterencode( markers, self.default, _encoder, self.indent, floatstr, self.key_separator, self.item_separator, self.sort_keys, self.skipkeys, _one_shot) return _iterencode(o, 0) def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot, ## HACK: hand-optimized bytecode; turn globals into locals False=False, True=True, ValueError=ValueError, basestring=basestring, dict=dict, float=float, id=id, int=int, isinstance=isinstance, list=list, long=long, str=str, tuple=tuple, ): def _iterencode_list(lst, _current_indent_level): if not lst: yield '[]' return if markers is not None: markerid = id(lst) if markerid in markers: raise ValueError("Circular reference detected") markers[markerid] = lst buf = '[' if _indent is not None: _current_indent_level += 1 newline_indent = '\n' + (' ' * (_indent * _current_indent_level)) separator = _item_separator + newline_indent buf += newline_indent else: newline_indent = None separator = _item_separator first = True for value in lst: if first: first = False else: buf = separator if isinstance(value, basestring): yield buf + _encoder(value) elif value is None: yield buf + 'null' elif value is True: yield buf + 'true' elif value is False: yield buf + 'false' elif isinstance(value, (int, long)): yield buf + str(value) elif isinstance(value, float): yield buf + _floatstr(value) else: yield buf if isinstance(value, (list, tuple)): chunks = _iterencode_list(value, _current_indent_level) elif isinstance(value, dict): chunks = _iterencode_dict(value, _current_indent_level) else: chunks = _iterencode(value, _current_indent_level) for chunk in chunks: yield chunk if newline_indent is not None: _current_indent_level -= 1 yield '\n' + (' ' * (_indent * _current_indent_level)) yield ']' if markers is not None: del markers[markerid] def _iterencode_dict(dct, _current_indent_level): if not dct: yield '{}' return if markers is not None: markerid = id(dct) if markerid in markers: raise ValueError("Circular reference detected") markers[markerid] = dct yield '{' if _indent is not None: _current_indent_level += 1 newline_indent = '\n' + (' ' * (_indent * _current_indent_level)) item_separator = _item_separator + newline_indent yield newline_indent else: newline_indent = None item_separator = _item_separator first = True if _sort_keys: items = dct.items() items.sort(key=lambda kv: kv[0]) else: items = dct.iteritems() for key, value in items: if isinstance(key, basestring): pass # JavaScript is weakly typed for these, so it makes sense to # also allow them. Many encoders seem to do something like this. elif isinstance(key, float): key = _floatstr(key) elif key is True: key = 'true' elif key is False: key = 'false' elif key is None: key = 'null' elif isinstance(key, (int, long)): key = str(key) elif _skipkeys: continue else: raise TypeError("key " + repr(key) + " is not a string") if first: first = False else: yield item_separator yield _encoder(key) yield _key_separator if isinstance(value, basestring): yield _encoder(value) elif value is None: yield 'null' elif value is True: yield 'true' elif value is False: yield 'false' elif isinstance(value, (int, long)): yield str(value) elif isinstance(value, float): yield _floatstr(value) else: if isinstance(value, (list, tuple)): chunks = _iterencode_list(value, _current_indent_level) elif isinstance(value, dict): chunks = _iterencode_dict(value, _current_indent_level) else: chunks = _iterencode(value, _current_indent_level) for chunk in chunks: yield chunk if newline_indent is not None: _current_indent_level -= 1 yield '\n' + (' ' * (_indent * _current_indent_level)) yield '}' if markers is not None: del markers[markerid] def _iterencode(o, _current_indent_level): if isinstance(o, basestring): yield _encoder(o) elif o is None: yield 'null' elif o is True: yield 'true' elif o is False: yield 'false' elif isinstance(o, (int, long)): yield str(o) elif isinstance(o, float): yield _floatstr(o) elif isinstance(o, (list, tuple)): for chunk in _iterencode_list(o, _current_indent_level): yield chunk elif isinstance(o, dict): for chunk in _iterencode_dict(o, _current_indent_level): yield chunk else: if markers is not None: markerid = id(o) if markerid in markers: raise ValueError("Circular reference detected") markers[markerid] = o o = _default(o) for chunk in _iterencode(o, _current_indent_level): yield chunk if markers is not None: del markers[markerid] return _iterencode
gpl-3.0
codeunsolved/NGS-Dashboard
py/lib/database_connector.py
1
3464
#!/usr/bin/env python # -*- coding: utf-8 -*- # PROGRAM : sql_connector # AUTHOR : codeunsolved@gmail.com # CREATED : August 22 2016 # VERSION : v0.0.1a import mysql.connector from mysql.connector import errorcode # CONFIG AREA mysql_config_example = { 'user': 'username', 'password': 'password', 'host': '127.0.0.1', 'raise_on_warnings': True } db_name_example = 'database' class MysqlConnector(object): def __init__(self, config, db_name): self.cnx = None self.connect(config) self.cursor = self.cnx.cursor(buffered=True) # needs to be buffered to use .fetch* methods, .rowcount attribute ... self.select_db(db_name) def connect(self, config): try: self.cnx = mysql.connector.connect(**config) except mysql.connector.Error as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: print "username or password incorrect!" else: print err def select_db(self, db_name): try: print "• Select DB: %s" % db_name, self.cnx.database = db_name except mysql.connector.Error as err: if err.errno == errorcode.ER_BAD_DB_ERROR: try: print "Failed: %s" % err.msg print "• Creating database: %s" % db_name, self.cursor.execute("CREATE DATABASE {} DEFAULT CHARACTER SET 'utf8'".format(db_name)) except mysql.connector.Error as err: print "Failed! %s" % err exit(1) else: print "OK!" self.cnx.database = db_name else: print(err.msg) exit(1) else: print "OK!" def insert(self, i_grammar, data): """ insert example from http://dev.mysql.com/doc/connector-python/en/connector-python-example-cursor-transaction.html add_employee = ("INSERT INTO employees " "(first_name, last_name, hire_date, gender, birth_date) " "VALUES (%s, %s, %s, %s, %s)") data_employee = ('Geert', 'Vanderkelen', tomorrow, 'M', date(1977, 6, 14)) # Insert new employee cursor.execute(add_employee, data_employee) emp_no = cursor.lastrowid add_salary = ("INSERT INTO salaries " "(emp_no, salary, from_date, to_date) " "VALUES (%(emp_no)s, %(salary)s, %(from_date)s, %(to_date)s)") data_salary = { 'emp_no': emp_no, 'salary': 50000, 'from_date': tomorrow, 'to_date': date(9999, 1, 1), } # Insert salary information cursor.execute(add_salary, data_salary) # Make sure data is committed to the database cnx.commit() """ try: self.cursor.execute(i_grammar, data) except mysql.connector.errors.IntegrityError, e: if e.errno == 1062: print "PASS! %s" % e else: raise else: pass self.cnx.commit() def query(self, q_grammar, data=[]): self.cursor.execute(q_grammar, data) return self.cursor def done(self): self.cursor.close() self.cnx.close()
mit
isrohutamahopetechnik/MissionPlanner
Lib/tokenize.py
53
16889
"""Tokenization help for Python programs. generate_tokens(readline) is a generator that breaks a stream of text into Python tokens. It accepts a readline-like method which is called repeatedly to get the next line of input (or "" for EOF). It generates 5-tuples with these members: the token type (see token.py) the token (a string) the starting (row, column) indices of the token (a 2-tuple of ints) the ending (row, column) indices of the token (a 2-tuple of ints) the original line (string) It is designed to match the working of the Python tokenizer exactly, except that it produces COMMENT tokens for comments and gives type OP for all operators Older entry points tokenize_loop(readline, tokeneater) tokenize(readline, tokeneater=printtoken) are the same, except instead of generating tokens, tokeneater is a callback function to which the 5 fields described above are passed as 5 arguments, each time a new token is found.""" __author__ = 'Ka-Ping Yee <ping@lfw.org>' __credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, ' 'Skip Montanaro, Raymond Hettinger') import string, re from token import * import token __all__ = [x for x in dir(token) if not x.startswith("_")] __all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"] del x del token COMMENT = N_TOKENS tok_name[COMMENT] = 'COMMENT' NL = N_TOKENS + 1 tok_name[NL] = 'NL' N_TOKENS += 2 def group(*choices): return '(' + '|'.join(choices) + ')' def any(*choices): return group(*choices) + '*' def maybe(*choices): return group(*choices) + '?' Whitespace = r'[ \f\t]*' Comment = r'#[^\r\n]*' Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment) Name = r'[a-zA-Z_]\w*' Hexnumber = r'0[xX][\da-fA-F]+[lL]?' Octnumber = r'(0[oO][0-7]+)|(0[0-7]*)[lL]?' Binnumber = r'0[bB][01]+[lL]?' Decnumber = r'[1-9]\d*[lL]?' Intnumber = group(Hexnumber, Binnumber, Octnumber, Decnumber) Exponent = r'[eE][-+]?\d+' Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent) Expfloat = r'\d+' + Exponent Floatnumber = group(Pointfloat, Expfloat) Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]') Number = group(Imagnumber, Floatnumber, Intnumber) # Tail end of ' string. Single = r"[^'\\]*(?:\\.[^'\\]*)*'" # Tail end of " string. Double = r'[^"\\]*(?:\\.[^"\\]*)*"' # Tail end of ''' string. Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''" # Tail end of """ string. Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""' Triple = group("[uU]?[rR]?'''", '[uU]?[rR]?"""') # Single-line ' or " string. String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'", r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"') # Because of leftmost-then-longest match semantics, be sure to put the # longest operators first (e.g., if = came before ==, == would get # recognized as two instances of =). Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=", r"//=?", r"[+\-*/%&|^=<>]=?", r"~") Bracket = '[][(){}]' Special = group(r'\r?\n', r'[:;.,`@]') Funny = group(Operator, Bracket, Special) PlainToken = group(Number, Funny, String, Name) Token = Ignore + PlainToken # First (or only) line of ' or " string. ContStr = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" + group("'", r'\\\r?\n'), r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' + group('"', r'\\\r?\n')) PseudoExtras = group(r'\\\r?\n', Comment, Triple) PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name) tokenprog, pseudoprog, single3prog, double3prog = map( re.compile, (Token, PseudoToken, Single3, Double3)) endprogs = {"'": re.compile(Single), '"': re.compile(Double), "'''": single3prog, '"""': double3prog, "r'''": single3prog, 'r"""': double3prog, "u'''": single3prog, 'u"""': double3prog, "ur'''": single3prog, 'ur"""': double3prog, "R'''": single3prog, 'R"""': double3prog, "U'''": single3prog, 'U"""': double3prog, "uR'''": single3prog, 'uR"""': double3prog, "Ur'''": single3prog, 'Ur"""': double3prog, "UR'''": single3prog, 'UR"""': double3prog, "b'''": single3prog, 'b"""': double3prog, "br'''": single3prog, 'br"""': double3prog, "B'''": single3prog, 'B"""': double3prog, "bR'''": single3prog, 'bR"""': double3prog, "Br'''": single3prog, 'Br"""': double3prog, "BR'''": single3prog, 'BR"""': double3prog, 'r': None, 'R': None, 'u': None, 'U': None, 'b': None, 'B': None} triple_quoted = {} for t in ("'''", '"""', "r'''", 'r"""', "R'''", 'R"""', "u'''", 'u"""', "U'''", 'U"""', "ur'''", 'ur"""', "Ur'''", 'Ur"""', "uR'''", 'uR"""', "UR'''", 'UR"""', "b'''", 'b"""', "B'''", 'B"""', "br'''", 'br"""', "Br'''", 'Br"""', "bR'''", 'bR"""', "BR'''", 'BR"""'): triple_quoted[t] = t single_quoted = {} for t in ("'", '"', "r'", 'r"', "R'", 'R"', "u'", 'u"', "U'", 'U"', "ur'", 'ur"', "Ur'", 'Ur"', "uR'", 'uR"', "UR'", 'UR"', "b'", 'b"', "B'", 'B"', "br'", 'br"', "Br'", 'Br"', "bR'", 'bR"', "BR'", 'BR"' ): single_quoted[t] = t tabsize = 8 class TokenError(Exception): pass class StopTokenizing(Exception): pass def printtoken(type, token, srow_scol, erow_ecol, line): # for testing srow, scol = srow_scol erow, ecol = erow_ecol print "%d,%d-%d,%d:\t%s\t%s" % \ (srow, scol, erow, ecol, tok_name[type], repr(token)) def tokenize(readline, tokeneater=printtoken): """ The tokenize() function accepts two parameters: one representing the input stream, and one providing an output mechanism for tokenize(). The first parameter, readline, must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. The second parameter, tokeneater, must also be a callable object. It is called once for each token, with five arguments, corresponding to the tuples generated by generate_tokens(). """ try: tokenize_loop(readline, tokeneater) except StopTokenizing: pass # backwards compatible interface def tokenize_loop(readline, tokeneater): for token_info in generate_tokens(readline): tokeneater(*token_info) class Untokenizer: def __init__(self): self.tokens = [] self.prev_row = 1 self.prev_col = 0 def add_whitespace(self, start): row, col = start assert row <= self.prev_row col_offset = col - self.prev_col if col_offset: self.tokens.append(" " * col_offset) def untokenize(self, iterable): for t in iterable: if len(t) == 2: self.compat(t, iterable) break tok_type, token, start, end, line = t self.add_whitespace(start) self.tokens.append(token) self.prev_row, self.prev_col = end if tok_type in (NEWLINE, NL): self.prev_row += 1 self.prev_col = 0 return "".join(self.tokens) def compat(self, token, iterable): startline = False indents = [] toks_append = self.tokens.append toknum, tokval = token if toknum in (NAME, NUMBER): tokval += ' ' if toknum in (NEWLINE, NL): startline = True prevstring = False for tok in iterable: toknum, tokval = tok[:2] if toknum in (NAME, NUMBER): tokval += ' ' # Insert a space between two consecutive strings if toknum == STRING: if prevstring: tokval = ' ' + tokval prevstring = True else: prevstring = False if toknum == INDENT: indents.append(tokval) continue elif toknum == DEDENT: indents.pop() continue elif toknum in (NEWLINE, NL): startline = True elif startline and indents: toks_append(indents[-1]) startline = False toks_append(tokval) def untokenize(iterable): """Transform tokens back into Python source code. Each element returned by the iterable must be a token sequence with at least two elements, a token number and token value. If only two tokens are passed, the resulting output is poor. Round-trip invariant for full input: Untokenized source will match input source exactly Round-trip invariant for limited intput: # Output text will tokenize the back to the input t1 = [tok[:2] for tok in generate_tokens(f.readline)] newcode = untokenize(t1) readline = iter(newcode.splitlines(1)).next t2 = [tok[:2] for tok in generate_tokens(readline)] assert t1 == t2 """ ut = Untokenizer() return ut.untokenize(iterable) def generate_tokens(readline): """ The generate_tokens() generator requires one argment, readline, which must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. Alternately, readline can be a callable function terminating with StopIteration: readline = open(myfile).next # Example of alternate readline The generator produces 5-tuples with these members: the token type; the token string; a 2-tuple (srow, scol) of ints specifying the row and column where the token begins in the source; a 2-tuple (erow, ecol) of ints specifying the row and column where the token ends in the source; and the line on which the token was found. The line passed is the logical line; continuation lines are included. """ lnum = parenlev = continued = 0 namechars, numchars = string.ascii_letters + '_', '0123456789' contstr, needcont = '', 0 contline = None indents = [0] while 1: # loop over lines in stream try: line = readline() except StopIteration: line = '' lnum += 1 pos, max = 0, len(line) if contstr: # continued string if not line: raise TokenError, ("EOF in multi-line string", strstart) endmatch = endprog.match(line) if endmatch: pos = end = endmatch.end(0) yield (STRING, contstr + line[:end], strstart, (lnum, end), contline + line) contstr, needcont = '', 0 contline = None elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n': yield (ERRORTOKEN, contstr + line, strstart, (lnum, len(line)), contline) contstr = '' contline = None continue else: contstr = contstr + line contline = contline + line continue elif parenlev == 0 and not continued: # new statement if not line: break column = 0 while pos < max: # measure leading whitespace if line[pos] == ' ': column += 1 elif line[pos] == '\t': column = (column//tabsize + 1)*tabsize elif line[pos] == '\f': column = 0 else: break pos += 1 if pos == max: break if line[pos] in '#\r\n': # skip comments or blank lines if line[pos] == '#': comment_token = line[pos:].rstrip('\r\n') nl_pos = pos + len(comment_token) yield (COMMENT, comment_token, (lnum, pos), (lnum, pos + len(comment_token)), line) yield (NL, line[nl_pos:], (lnum, nl_pos), (lnum, len(line)), line) else: yield ((NL, COMMENT)[line[pos] == '#'], line[pos:], (lnum, pos), (lnum, len(line)), line) continue if column > indents[-1]: # count indents or dedents indents.append(column) yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line) while column < indents[-1]: if column not in indents: raise IndentationError( "unindent does not match any outer indentation level", ("<tokenize>", lnum, pos, line)) indents = indents[:-1] yield (DEDENT, '', (lnum, pos), (lnum, pos), line) else: # continued statement if not line: raise TokenError, ("EOF in multi-line statement", (lnum, 0)) continued = 0 while pos < max: pseudomatch = pseudoprog.match(line, pos) if pseudomatch: # scan for tokens start, end = pseudomatch.span(1) spos, epos, pos = (lnum, start), (lnum, end), end token, initial = line[start:end], line[start] if initial in numchars or \ (initial == '.' and token != '.'): # ordinary number yield (NUMBER, token, spos, epos, line) elif initial in '\r\n': yield (NL if parenlev > 0 else NEWLINE, token, spos, epos, line) elif initial == '#': assert not token.endswith("\n") yield (COMMENT, token, spos, epos, line) elif token in triple_quoted: endprog = endprogs[token] endmatch = endprog.match(line, pos) if endmatch: # all on one line pos = endmatch.end(0) token = line[start:pos] yield (STRING, token, spos, (lnum, pos), line) else: strstart = (lnum, start) # multiple lines contstr = line[start:] contline = line break elif initial in single_quoted or \ token[:2] in single_quoted or \ token[:3] in single_quoted: if token[-1] == '\n': # continued string strstart = (lnum, start) endprog = (endprogs[initial] or endprogs[token[1]] or endprogs[token[2]]) contstr, needcont = line[start:], 1 contline = line break else: # ordinary string yield (STRING, token, spos, epos, line) elif initial in namechars: # ordinary name yield (NAME, token, spos, epos, line) elif initial == '\\': # continued stmt continued = 1 else: if initial in '([{': parenlev += 1 elif initial in ')]}': parenlev -= 1 yield (OP, token, spos, epos, line) else: yield (ERRORTOKEN, line[pos], (lnum, pos), (lnum, pos+1), line) pos += 1 for indent in indents[1:]: # pop remaining indent levels yield (DEDENT, '', (lnum, 0), (lnum, 0), '') yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '') if __name__ == '__main__': # testing import sys if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline) else: tokenize(sys.stdin.readline)
gpl-3.0
paleocore/paleocore
hrp/import_hrp_dentition.py
1
7294
__author__ = 'reedd' """ This loader/importer script is designed to read data from a sqlite database storing the Hadar Research Project (HRP) data, and load those data into the PaleoCore postgres database. It assumes that the occurrence and biology table data has already been imported. This script reads in the data form the dentition table and adds it to the appropriate Biology occurrences. The script: 1) """ # Import libraries import sqlite3 from taxonomy.models import Taxon, TaxonRank, IdentificationQualifier from hrp.models import Occurrence, Locality, Archaeology, Biology, Geology from django.contrib.gis.geos import Point from django.core.exceptions import ObjectDoesNotExist import datetime import re # Required imports for stand-alone django scripts # http://stackoverflow.com/questions/25244631/models-arent-loaded-yet-error-while-populating-in-django-1-8-and-python-2-7-8 import django django.setup() # Global variables # absolute file path to the HRP sqlite database from whihc we are reading data hrpdb_path = '/Users/reedd/Documents/projects/PaleoCore/projects/HRP/HRP_Paleobase4_2016.sqlite' record_limit = ('20000',) # a limiter setting the maximum number of records to be read from the database, for debugging # list of fields as they occur in the HRP sqlite database dentition table. The list is used to correctly find # specific data read in from each row of the occurrence table. dentition_field_list = ['FaunaAttrbutesID', 'CatalogNumberNumeric', 'CatalogNumberNumberic_OLD', 'Maxilla', 'Mandible', 'uli1', 'uli2', 'uli3', 'uri1', 'uri2', 'uri3', 'ulc', 'urc', 'ulp1', 'ulp2', 'ulp3', 'ulp4', 'urp1', 'urp2', 'urp3', 'urp4', 'ulm1', 'ulm2', 'ulm3', 'ulm4', 'urm1', 'urm2', 'urm3', 'urm4', 'lli1', 'lli2', 'lli3', 'lri1', 'lri2', 'lri3', 'llc', 'lrc', 'llp1', 'llp2', 'llp3', 'llp4', 'lrp1', 'lrp2', 'lrp3', 'lrp4', 'llm1', 'llm2', 'llm3', 'llm4', 'lrm1', 'lrm2', 'lrm3', 'lrm4', 'indet_incisor', 'indet_canine', 'indet_premolar', 'indet_molar', 'indet_Tooth', 'deciduous'] # list of HRP collectors used as a structured vocabulary for the collector field and to validate the data in this field. HRP_collector_list = ['C.J. Campisano', 'W.H. Kimbel', 'T.K. Nalley', 'D.N. Reed', 'K.E. Reed', 'B.J. Schoville', 'A.E. Shapiro', 'HFS Student', 'HRP Team'] # list of HRP stratigraphic members used as structured vocabulary for the member field HRP_strat_member_list = ['Basal', 'Basal-Sidi Hakoma', 'Denen Dora', 'Denen Dora-Kada Hadar', 'Kada Hadar', 'Sidi Hakoma', 'Sidi Hakoma-Denen Dora'] # list of fields as they occur in the HRP sqlite database biology table. biology_field_list = ["CatalogNumberNumeric", "CatalogNumberNumericOLD", "Kingdom", "Phylum", "Class", "Order", "Family", "Subfamily", "Tribe", "Genus", "SpecificEpithet", "IdentificationQualifier", "IdentifiedBy", "DateIdentified", "TypeStatus", "TaxonomyRemarks", "Element", "ElementPortion", "Side", "ElementNumber", "ElementQualifier", "SizeClass", "LifeStage", "ElementRemarks", "DateLastModified", "Barcode", "BiologyRemarks"] # structured vocabularies for fields in the biology table. rank_list = ['Kingdom', 'Phylum', 'Class', 'Order', 'Family'] taxon_dict = {'Kingdom': '', 'Phylum': '', 'Class': '', 'Order': '', 'Family': '', 'Subfamily': '', 'Tribe': '', 'Genus': '', 'Species': ''} element_list = ['astragalus', 'bacculum', 'bone (indet.)', 'calcaneus', 'canine', 'capitate', 'carapace', 'carpal (indet.)', 'carpal/tarsal', 'carpometacarpus', 'carpus', 'chela', 'clavicle', 'coccyx', 'coprolite', 'cranium', 'cranium w/horn core', 'cuboid', 'cubonavicular', 'cuneiform', 'dermal plate', 'egg shell', 'endocast', 'ethmoid', 'femur', 'fibula', 'frontal', 'hamate', 'horn core', 'humerus', 'hyoid', 'ilium', 'incisor', 'innominate', 'ischium', 'lacrimal', 'long bone', 'lunate', 'mandible', 'manus', 'maxilla', 'metacarpal', 'metapodial', 'metatarsal', 'molar', 'nasal', 'navicular', 'naviculocuboid', 'occipital', 'ossicone', 'parietal', 'patella', 'pes', 'phalanx', 'pisiform', 'plastron', 'premaxilla', 'premolar', 'pubis', 'radioulna', 'radius', 'rib', 'sacrum', 'scaphoid', 'scapholunar', 'scapula', 'scute', 'sesamoid', 'shell', 'skeleton', 'skull', 'sphenoid', 'sternum', 'talon', 'talus', 'tarsal (indet.)', 'tarsometatarsus', 'tarsus', 'temporal', 'tibia', 'tibiotarsus', 'tooth (indet.)', 'trapezium', 'trapezoid', 'triquetrum', 'ulna', 'vertebra', 'vomer', 'zygomatic'] def get_dentition_row(obj): pk_string = str(obj.id) return dentition_cursor.execute('SELECT * FROM dentition WHERE CatalogNumberNumeric = ?', (pk_string,)).fetchone() def valid_dentition(row): if row: # if there is row data, many will be None # perhaps validate dentition data against element data in biology table. # if upper teeth only, element = maxilla # if lower teeth only, element = mandible # if both element is max+man return True else: return None def add_dentition_data(row, obj): obj_field_list = obj.get_all_field_names() for f in dentition_field_list: if f in obj_field_list: if row[dentition_field_list.index(f)]: fval = True else: fval = False setattr(obj, f, fval) return obj def main(): import_count, collection_count, observation_count, row_count, ac, bc, gc, ao, bo, go = [0] * 10 print "Record limit is set to: %s\n" % record_limit print "Processing records\n\n", # Fetch all Biology occurrences biology_objects_rs = Biology.objects.all() for obj in biology_objects_rs: # fetch related row from dentition dentition_row = get_dentition_row(obj) #print dentition_row # validate data in the row if valid_dentition(dentition_row): # add data to object updated_obj = add_dentition_data(dentition_row, obj) # save object # print obj.id, obj.element, obj.uli1, obj.uli2, obj.uli3, obj.uli4, obj.uli5, obj.uri1, obj.uri2, obj.uri3, obj.uri4, \ # obj.ulc, obj.urc, obj.ulp1, obj.ulp2, obj.ulp3, obj.ulp4, obj.urp1, obj.urp2, obj.urp3, obj.urp4, \ # obj.ulm1, obj.ulm2, obj.ulm3, obj.urm1, obj.urm2, obj.urm3 updated_obj.save() # print report import_count += 1 print "Completed update of {} biological occurrences".format(import_count) # Open a connection to the local sqlite database print "Opening connection to %s" % hrpdb_path connection = sqlite3.connect(hrpdb_path) # open a connection to the HRP sqlite database dentition_cursor = connection.cursor() # cursor for reading data in the dentition table main() # process data in the sqlite database connection.close() # close the connection
gpl-2.0
falleco/sample-websockets
socketio_django/socketio_django/settings.py
1
2615
""" Django settings for socketio_django project. For more information on this file, see https://docs.djangoproject.com/en/1.6/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.6/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os, sys from os import path from os.path import dirname, abspath, basename BASE_DIR = os.path.dirname(os.path.dirname(__file__)) DJANGO_ROOT = dirname(dirname(abspath(__file__))) # Absolute filesystem path to the top-level project folder: SITE_ROOT = dirname(DJANGO_ROOT) # Site name: SITE_NAME = basename(DJANGO_ROOT) sys.path.append(DJANGO_ROOT) STATIC_ROOT = path.normpath(path.join(SITE_ROOT, 'static')) STATICFILES_DIRS = ( path.normpath(path.join(SITE_ROOT, 'socketio_django', 'assets')), ) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'ee^!th)f)yc6*2ip(&==he8%$s6v_*f53ocabn5f+iffwbd#=9' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'heart_beat' ) MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ) ROOT_URLCONF = 'socketio_django.urls' WSGI_APPLICATION = 'socketio_django.wsgi.application' # Database # https://docs.djangoproject.com/en/1.6/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Internationalization # https://docs.djangoproject.com/en/1.6/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.6/howto/static-files/ STATIC_URL = '/static/'
mit
v1bri/gnuradio
gr-uhd/python/uhd/qa_uhd.py
47
1959
#!/usr/bin/env python # # Copyright 2005,2008,2010 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # from gnuradio import gr, gr_unittest, uhd class test_uhd(gr_unittest.TestCase): def setUp(self): self.tb = gr.top_block() def tearDown(self): self.tb = None def test_000_nop (self): """Just see if we can import the module... They may not have a UHD device connected, etc. Don't try to run anything""" pass def test_time_spec_t (self): seconds = 42.0 time = uhd.time_spec_t(seconds) twice_time = time + time; zero_time = time - time; self.assertEqual(time.get_real_secs() * 2, seconds * 2 ) self.assertEqual(time.get_real_secs() - time.get_real_secs() , 0.0) def test_stream_args_channel_foo(self): """ Try to manipulate the stream args channels for proper swig'ing checks. """ sa = uhd.stream_args_t() sa.channels.append(1) sa.channels.append(0) print sa.channels self.assertEqual(len(sa.channels), 2) self.assertEqual(sa.channels[0], 1) self.assertEqual(sa.channels[1], 0) if __name__ == '__main__': gr_unittest.run(test_uhd, "test_uhd.xml")
gpl-3.0
anthgur/servo
tests/wpt/web-platform-tests/css/tools/apiclient/apiclient/uritemplate.py
79
14296
# coding=utf-8 # # Copyright © 2013 Hewlett-Packard Development Company, L.P. # # This work is distributed under the W3C® Software License [1] # in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # # [1] http://www.w3.org/Consortium/Legal/2002/copyright-software-20021231 # # Process URI templates per http://tools.ietf.org/html/rfc6570 import re class UnsupportedExpression(Exception): def __init__(self, expression): self.expression = expression def __unicode__(self): return u'Unsopported expression: ' + self.expression class BadExpression(Exception): def __init__(self, expression): self.expression = expression def __unicode__(self): return u'Bad expression: ' + self.expression class BadVariable(Exception): def __init__(self, variable): self.variable = variable def __unicode__(self): return u'Bad variable: ' + self.variable class BadExpansion(Exception): def __init__(self, variable): self.variable = variable def __unicode__(self): return u'Bad expansion: ' + self.variable class URITemplate(object): alpha = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' digit = '0123456789' hexdigit = '0123456789ABCDEFabcdef' genDelims = ':/?#[]@' subDelims = "!$&'()*+,;=" varstart = alpha + digit + '_' varchar = varstart + '.' unreserved = alpha + digit + '-._~' reserved = genDelims + subDelims def __init__(self, template): self.template = template self.parts = [] parts = re.split(r'(\{[^\}]*\})', self.template) for part in parts: if (part): if (('{' == part[0]) and ('}' == part[-1])): expression = part[1:-1] if (re.match('^([a-zA-Z0-9_]|%[0-9a-fA-F][0-9a-fA-F]).*$', expression)): self.parts.append(SimpleExpansion(expression)) elif ('+' == part[1]): self.parts.append(ReservedExpansion(expression)) elif ('#' == part[1]): self.parts.append(FragmentExpansion(expression)) elif ('.' == part[1]): self.parts.append(LabelExpansion(expression)) elif ('/' == part[1]): self.parts.append(PathExpansion(expression)) elif (';' == part[1]): self.parts.append(PathStyleExpansion(expression)) elif ('?' == part[1]): self.parts.append(FormStyleQueryExpansion(expression)) elif ('&' == part[1]): self.parts.append(FormStyleQueryContinuation(expression)) elif (part[1] in '=,!@|'): raise UnsupportedExpression(part) else: raise BadExpression(part) else: if (('{' not in part) and ('}' not in part)): self.parts.append(Literal(part)) else: raise BadExpression(part) @property def variables(self): vars = set() for part in self.parts: vars.update(part.variables) return vars def expand(self, **kwargs): try: expanded = [part.expand(kwargs) for part in self.parts] except (BadExpansion): return None return ''.join([expandedPart for expandedPart in expanded if (expandedPart is not None)]) def __str__(self): return self.template.encode('ascii', 'replace') def __unicode__(self): return unicode(self.template) class Variable(object): def __init__(self, name): self.name = '' self.maxLength = None self.explode = False self.array = False if (name[0:1] not in URITemplate.varstart): raise BadVariable(name) if (':' in name): name, maxLength = name.split(':', 1) if ((0 < len(maxLength)) and (len(maxLength) < 4)): for digit in maxLength: if (digit not in URITemplate.digit): raise BadVariable(name + ':' + maxLength) self.maxLength = int(maxLength) if (not self.maxLength): raise BadVariable(name + ':' + maxLength) else: raise BadVariable(name + ':' + maxLength) elif ('*' == name[-1]): name = name[:-1] self.explode = True elif ('[]' == name[-2:]): name = name[:-2] self.array = True self.explode = True index = 0 while (index < len(name)): codepoint = name[index] if (('%' == codepoint) and ((index + 2) < len(name)) and (name[index + 1] in URITemplate.hexdigit) and (name[index + 2] in URITemplate.hexdigit)): self.name += name[index:index + 3] index += 2 elif (codepoint in URITemplate.varchar): self.name += codepoint else: raise BadVariable(name + ((':' + self.maxLength) if (self.maxLength) else '') + ('[]' if (self.array) else ('*' if (self.explode) else ''))) index += 1 class Expression(object): def __init__(self): pass @property def variables(self): return [] def _encode(self, value, legal, pctEncoded): output = '' index = 0 while (index < len(value)): codepoint = value[index] if (codepoint in legal): output += codepoint elif (pctEncoded and ('%' == codepoint) and ((index + 2) < len(value)) and (value[index + 1] in URITemplate.hexdigit) and (value[index + 2] in URITemplate.hexdigit)): output += value[index:index + 3] index += 2 else: utf8 = codepoint.encode('utf8') for byte in utf8: output += '%' + URITemplate.hexdigit[ord(byte) / 16] + URITemplate.hexdigit[ord(byte) % 16] index += 1 return output def _uriEncodeValue(self, value): return self._encode(value, URITemplate.unreserved, False) def _uriEncodeName(self, name): return self._encode(unicode(name), URITemplate.unreserved + URITemplate.reserved, True) if (name) else '' def _join(self, prefix, joiner, value): if (prefix): return prefix + joiner + value return value def _encodeStr(self, variable, name, value, prefix, joiner, first): if (variable.maxLength): if (not first): raise BadExpansion(variable) return self._join(prefix, joiner, self._uriEncodeValue(value[:variable.maxLength])) return self._join(prefix, joiner, self._uriEncodeValue(value)) def _encodeDictItem(self, variable, name, key, item, delim, prefix, joiner, first): joiner = '=' if (variable.explode) else ',' if (variable.array): prefix = (prefix + '[' + self._uriEncodeName(key) + ']') if (prefix and not first) else self._uriEncodeName(key) else: prefix = self._join(prefix, '.', self._uriEncodeName(key)) return self._encodeVar(variable, key, item, delim, prefix, joiner, False) def _encodeListItem(self, variable, name, index, item, delim, prefix, joiner, first): if (variable.array): prefix = prefix + '[' + unicode(index) + ']' if (prefix) else '' return self._encodeVar(variable, None, item, delim, prefix, joiner, False) return self._encodeVar(variable, name, item, delim, prefix, '.', False) def _encodeVar(self, variable, name, value, delim = ',', prefix = '', joiner = '=', first = True): if (isinstance(value, basestring)): return self._encodeStr(variable, name, value, prefix, joiner, first) elif (hasattr(value, 'keys') and hasattr(value, '__getitem__')): # dict-like if (len(value)): encodedItems = [self._encodeDictItem(variable, name, key, value[key], delim, prefix, joiner, first) for key in value.keys()] return delim.join([item for item in encodedItems if (item is not None)]) return None elif (hasattr(value, '__getitem__')): # list-like if (len(value)): encodedItems = [self._encodeListItem(variable, name, index, item, delim, prefix, joiner, first) for index, item in enumerate(value)] return delim.join([item for item in encodedItems if (item is not None)]) return None else: return self._encodeStr(variable, name, unicode(value).lower(), prefix, joiner, first) def expand(self, values): return None class Literal(Expression): def __init__(self, value): Expression.__init__(self) self.value = value def expand(self, values): return self._encode(self.value, (URITemplate.unreserved + URITemplate.reserved), True) class Expansion(Expression): operator = '' varJoiner = ',' def __init__(self, variables): Expression.__init__(self) self.vars = [Variable(var) for var in variables.split(',')] @property def variables(self): return [var.name for var in self.vars] def _expandVar(self, variable, value): return self._encodeVar(variable, self._uriEncodeName(variable.name), value) def expand(self, values): expandedVars = [] for var in self.vars: if ((var.name in values) and (values[var.name] is not None)): expandedVar = self._expandVar(var, values[var.name]) if (expandedVar is not None): expandedVars.append(expandedVar) if (len(expandedVars)): expanded = self.varJoiner.join(expandedVars) if (expanded is not None): return self.operator + expanded return None class SimpleExpansion(Expansion): def __init__(self, variables): Expansion.__init__(self, variables) class ReservedExpansion(Expansion): def __init__(self, variables): Expansion.__init__(self, variables[1:]) def _uriEncodeValue(self, value): return self._encode(value, (URITemplate.unreserved + URITemplate.reserved), True) class FragmentExpansion(ReservedExpansion): operator = '#' def __init__(self, variables): ReservedExpansion.__init__(self, variables) class LabelExpansion(Expansion): operator = '.' varJoiner = '.' def __init__(self, variables): Expansion.__init__(self, variables[1:]) def _expandVar(self, variable, value): return self._encodeVar(variable, self._uriEncodeName(variable.name), value, delim = ('.' if variable.explode else ',')) class PathExpansion(Expansion): operator = '/' varJoiner = '/' def __init__(self, variables): Expansion.__init__(self, variables[1:]) def _expandVar(self, variable, value): return self._encodeVar(variable, self._uriEncodeName(variable.name), value, delim = ('/' if variable.explode else ',')) class PathStyleExpansion(Expansion): operator = ';' varJoiner = ';' def __init__(self, variables): Expansion.__init__(self, variables[1:]) def _encodeStr(self, variable, name, value, prefix, joiner, first): if (variable.array): if (name): prefix = prefix + '[' + name + ']' if (prefix) else name elif (variable.explode): prefix = self._join(prefix, '.', name) return Expansion._encodeStr(self, variable, name, value, prefix, joiner, first) def _encodeDictItem(self, variable, name, key, item, delim, prefix, joiner, first): if (variable.array): if (name): prefix = prefix + '[' + name + ']' if (prefix) else name prefix = (prefix + '[' + self._uriEncodeName(key) + ']') if (prefix and not first) else self._uriEncodeName(key) elif (variable.explode): prefix = self._join(prefix, '.', name) if (not first) else '' else: prefix = self._join(prefix, '.', self._uriEncodeName(key)) joiner = ',' return self._encodeVar(variable, self._uriEncodeName(key) if (not variable.array) else '', item, delim, prefix, joiner, False) def _encodeListItem(self, variable, name, index, item, delim, prefix, joiner, first): if (variable.array): if (name): prefix = prefix + '[' + name + ']' if (prefix) else name return self._encodeVar(variable, unicode(index), item, delim, prefix, joiner, False) return self._encodeVar(variable, name, item, delim, prefix, '=' if (variable.explode) else '.', False) def _expandVar(self, variable, value): if (variable.explode): return self._encodeVar(variable, self._uriEncodeName(variable.name), value, delim = ';') value = self._encodeVar(variable, self._uriEncodeName(variable.name), value, delim = ',') return (self._uriEncodeName(variable.name) + '=' + value) if (value) else variable.name class FormStyleQueryExpansion(PathStyleExpansion): operator = '?' varJoiner = '&' def __init__(self, variables): PathStyleExpansion.__init__(self, variables) def _expandVar(self, variable, value): if (variable.explode): return self._encodeVar(variable, self._uriEncodeName(variable.name), value, delim = '&') value = self._encodeVar(variable, self._uriEncodeName(variable.name), value, delim = ',') return (self._uriEncodeName(variable.name) + '=' + value) if (value is not None) else None class FormStyleQueryContinuation(FormStyleQueryExpansion): operator = '&' def __init__(self, variables): FormStyleQueryExpansion.__init__(self, variables)
mpl-2.0