repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringclasses 981 values | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15 values |
|---|---|---|---|---|---|
admiralspark/NetSpark-Scripts | Example_Scripts/Cisco/auditQOS.py | 1 | 2040 | #Global imports
from netmiko import ConnectHandler
from datetime import datetime
import csv, os.path
#Local imports
import credentials
# Begin timing the script
start_time = datetime.now()
# Define the primary function (to be moved to a separate module some day...)
def nc(username, password, secret, customer):
with open(customer, mode='r') as csvfile:
reader = csv.DictReader(csvfile)
# Now iterate through every row in the CSVfile and make dictionaries
for row in reader:
hostname = row['SysName']
device_type = row['device_type']
ip = row['IP_Address']
switch = {
'device_type': device_type,
'ip': ip,
'username': USERNAME,
'password': PASSWORD,
'secret': SECRET,
'verbose': False,
}
# This is your connection handler for commands from here on out
net_connect = ConnectHandler(**switch)
# Insert your commands here
net_connect.enable()
# or maybe send configuration stuff with
# net_connect.send_config_set(username cisco priv 15 pass cisco)
connect_return = net_connect.send_command("sh mls qos")
# Now make it pretty
print("\n\n>>>>>>>>> Device {0} <<<<<<<<<".format(hostname))
print(connect_return)
print("\n>>>>>>>>> End <<<<<<<<<")
# Disconnect from this session
net_connect.disconnect()
# Grab the Customer name to search
CUSTOMER = input('Customer name: ') + ".csv"
# Flesh out these variables using the credentials.cred_csv module
USERNAME, PASSWORD, SECRET = credentials.cred_csv()
# Give it a command:
# command_string = "write mem" # can be passed to nc...
# Run the primary function in this program
nc(USERNAME, PASSWORD, SECRET, CUSTOMER)
end_time = datetime.now()
# How long did it run?
total_time = end_time - start_time
print("\nTotal time for script: \n" + str(total_time))
| gpl-3.0 |
TheWardoctor/Wardoctors-repo | script.stargate.guide/resources/playwith/playwith.py | 1 | 2975 | import sys
import xbmc,xbmcaddon,xbmcvfs
import sqlite3
from subprocess import Popen
import datetime,time
# from vpnapi import VPNAPI
channel = sys.argv[1]
start = sys.argv[2]
ADDON = xbmcaddon.Addon(id='script.stargate.guide')
def adapt_datetime(ts):
# http://docs.python.org/2/library/sqlite3.html#registering-an-adapter-callable
return time.mktime(ts.timetuple())
def convert_datetime(ts):
try:
return datetime.datetime.fromtimestamp(float(ts))
except ValueError:
return None
sqlite3.register_adapter(datetime.datetime, adapt_datetime)
sqlite3.register_converter('timestamp', convert_datetime)
path = xbmc.translatePath('special://profile/addon_data/script.stargate.guide/source.db')
try:
conn = sqlite3.connect(path, detect_types=sqlite3.PARSE_DECLTYPES)
conn.row_factory = sqlite3.Row
except Exception as detail:
xbmc.log("EXCEPTION: (script.stargate.guide) %s" % detail, xbmc.LOGERROR)
# Get the Program Info from the database
c = conn.cursor()
startDate = datetime.datetime.fromtimestamp(float(start))
c.execute('SELECT DISTINCT * FROM programs WHERE channel=? AND start_date = ?', [channel,startDate])
for row in c:
title = row["title"]
endDate = row["end_date"]
duration = endDate - startDate
before = int(ADDON.getSetting('autoplaywiths.before'))
after = int(ADDON.getSetting('autoplaywiths.after'))
extra = (before + after) * 60
#TODO start from now
seconds = duration.seconds + extra
if seconds > (3600*4):
seconds = 3600*4
break
# Find the channel's stream url
c.execute('SELECT stream_url FROM custom_stream_url WHERE channel=?', [channel])
row = c.fetchone()
url = ""
if row:
url = row[0]
if not url:
quit()
# Uncomment this if you want to use VPN Mgr filtering. Need to import VPNAPI.py
# else:
# if ADDON.getSetting('vpnmgr.connect') == "true":
# vpndefault = False
# if ADDON.getSetting('vpnmgr.default') == "true":
# vpndefault = True
# api = VPNAPI()
# if url[0:9] == 'plugin://':
# api.filterAndSwitch(url, 0, vpndefault, True)
# else:
# if vpndefault: api.defaultVPN(True)
# Find the actual url used to play the stream
#core = "dummy"
#xbmc.executebuiltin('PlayWith(%s)' % core)
player = xbmc.Player()
player.play(url)
count = 30
url = ""
while count:
count = count - 1
time.sleep(1)
if player.isPlaying():
url = player.getPlayingFile()
break
player.stop()
# Play with your own preferred player and paths
if url:
name = "%s = %s = %s" % (start,channel,title)
name = name.encode("cp1252")
filename = xbmc.translatePath("special://temp/%s.ts" % name)
#filename = "/storage/recordings/%s.ts" % name
ffmpeg = r"c:\utils\ffmpeg.exe" #WINDOWS
#ffmpeg = r"/usr/bin/ffmpeg" #LIBREELEC
cmd = [ffmpeg, "-y", "-i", url, "-c", "copy", "-t", str(seconds), filename]
#p = Popen(cmd,shell=True)
p = Popen(cmd,shell=False)
| apache-2.0 |
Kazade/NeHe-Website | google_appengine/lib/django_1_2/tests/regressiontests/get_or_create_regress/tests.py | 88 | 2540 | from django.test import TestCase
from models import Author, Publisher
class GetOrCreateTests(TestCase):
def test_related(self):
p = Publisher.objects.create(name="Acme Publishing")
# Create a book through the publisher.
book, created = p.books.get_or_create(name="The Book of Ed & Fred")
self.assertTrue(created)
# The publisher should have one book.
self.assertEqual(p.books.count(), 1)
# Try get_or_create again, this time nothing should be created.
book, created = p.books.get_or_create(name="The Book of Ed & Fred")
self.assertFalse(created)
# And the publisher should still have one book.
self.assertEqual(p.books.count(), 1)
# Add an author to the book.
ed, created = book.authors.get_or_create(name="Ed")
self.assertTrue(created)
# The book should have one author.
self.assertEqual(book.authors.count(), 1)
# Try get_or_create again, this time nothing should be created.
ed, created = book.authors.get_or_create(name="Ed")
self.assertFalse(created)
# And the book should still have one author.
self.assertEqual(book.authors.count(), 1)
# Add a second author to the book.
fred, created = book.authors.get_or_create(name="Fred")
self.assertTrue(created)
# The book should have two authors now.
self.assertEqual(book.authors.count(), 2)
# Create an Author not tied to any books.
Author.objects.create(name="Ted")
# There should be three Authors in total. The book object should have two.
self.assertEqual(Author.objects.count(), 3)
self.assertEqual(book.authors.count(), 2)
# Try creating a book through an author.
_, created = ed.books.get_or_create(name="Ed's Recipes", publisher=p)
self.assertTrue(created)
# Now Ed has two Books, Fred just one.
self.assertEqual(ed.books.count(), 2)
self.assertEqual(fred.books.count(), 1)
# Use the publisher's primary key value instead of a model instance.
_, created = ed.books.get_or_create(name='The Great Book of Ed', publisher_id=p.id)
self.assertTrue(created)
# Try get_or_create again, this time nothing should be created.
_, created = ed.books.get_or_create(name='The Great Book of Ed', publisher_id=p.id)
self.assertFalse(created)
# The publisher should have three books.
self.assertEqual(p.books.count(), 3)
| bsd-3-clause |
partofthething/home-assistant | homeassistant/components/dweet/sensor.py | 21 | 3239 | """Support for showing values from Dweet.io."""
from datetime import timedelta
import json
import logging
import dweepy
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_DEVICE,
CONF_NAME,
CONF_UNIT_OF_MEASUREMENT,
CONF_VALUE_TEMPLATE,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Dweet.io Sensor"
SCAN_INTERVAL = timedelta(minutes=1)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_DEVICE): cv.string,
vol.Required(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Dweet sensor."""
name = config.get(CONF_NAME)
device = config.get(CONF_DEVICE)
value_template = config.get(CONF_VALUE_TEMPLATE)
unit = config.get(CONF_UNIT_OF_MEASUREMENT)
if value_template is not None:
value_template.hass = hass
try:
content = json.dumps(dweepy.get_latest_dweet_for(device)[0]["content"])
except dweepy.DweepyError:
_LOGGER.error("Device/thing %s could not be found", device)
return
if value_template.render_with_possible_json_value(content) == "":
_LOGGER.error("%s was not found", value_template)
return
dweet = DweetData(device)
add_entities([DweetSensor(hass, dweet, name, value_template, unit)], True)
class DweetSensor(Entity):
"""Representation of a Dweet sensor."""
def __init__(self, hass, dweet, name, value_template, unit_of_measurement):
"""Initialize the sensor."""
self.hass = hass
self.dweet = dweet
self._name = name
self._value_template = value_template
self._state = None
self._unit_of_measurement = unit_of_measurement
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state."""
return self._state
def update(self):
"""Get the latest data from REST API."""
self.dweet.update()
if self.dweet.data is None:
self._state = None
else:
values = json.dumps(self.dweet.data[0]["content"])
self._state = self._value_template.render_with_possible_json_value(
values, None
)
class DweetData:
"""The class for handling the data retrieval."""
def __init__(self, device):
"""Initialize the sensor."""
self._device = device
self.data = None
def update(self):
"""Get the latest data from Dweet.io."""
try:
self.data = dweepy.get_latest_dweet_for(self._device)
except dweepy.DweepyError:
_LOGGER.warning("Device %s doesn't contain any data", self._device)
self.data = None
| mit |
pk-sam/crosswalk-test-suite | usecase/usecase-wrt-auto-tests/samples/ApkBuildAnyLocation/absolutePath.py | 2 | 2850 | #!/usr/bin/env python
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Hongjuan, Wang<hongjuanx.wang@intel.com>
import unittest
import os
import sys
import commands
import shutil
import comm
class TestPackertoolsFunctions(unittest.TestCase):
def test_absolutePath_hostApp(self):
comm.setUp()
cmd = "python %smake_apk.py --package=org.xwalk.example --name=example --arch=%s --mode=%s --app-url=https://crosswalk-project.org/" % \
(comm.Pck_Tools, comm.ARCH, comm.MODE)
print os.getcwd()
comm.gen_pkg(cmd, self)
def test_absolutePath_manifest(self):
comm.setUp()
manifestPath = comm.ConstPath + "/res/manifest.json"
cmd = "python %smake_apk.py --package=org.xwalk.example --arch=%s --mode=%s --manifest=%s" % \
(comm.Pck_Tools, comm.ARCH, comm.MODE, manifestPath)
os.chdir(comm.ConstPath + '/../../tools')
print os.getcwd()
comm.gen_pkg(cmd, self)
def test_absolutePath_package(self):
comm.setUp()
appRoot = comm.ConstPath + "/res/"
cmd = "python %smake_apk.py --package=org.xwalk.example --name=example --arch=%s --mode=%s --app-root=%s --app-local-path=index.html" % \
(comm.Pck_Tools, comm.ARCH, comm.MODE, appRoot)
os.chdir(comm.ConstPath + '/../../tools/crosswalk')
print os.getcwd()
comm.gen_pkg(cmd, self)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
capocchi/DEVSimPy-plugin-SIG-viewer | ExtendedFrame.py | 1 | 9957 | # -*- coding: utf-8 -*-
#import wxversion
#wxversion.select('2.8.11')
import wx
import os
import sys
import wx.combo
import wx.lib.agw.foldpanelbar as fpb
_ = wx.GetTranslation
def GetCollapsedIconData():
return \
'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\
\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\
\x00\x01\x8eIDAT8\x8d\xa5\x93-n\xe4@\x10\x85?g\x03\n6lh)\xc4\xd2\x12\xc3\x81\
\xd6\xa2I\x90\x154\xb9\x81\x8f1G\xc8\x11\x16\x86\xcd\xa0\x99F\xb3A\x91\xa1\
\xc9J&\x96L"5lX\xcc\x0bl\xf7v\xb2\x7fZ\xa5\x98\xebU\xbdz\xf5\\\x9deW\x9f\xf8\
H\\\xbfO|{y\x9dT\x15P\x04\x01\x01UPUD\x84\xdb/7YZ\x9f\xa5\n\xce\x97aRU\x8a\
\xdc`\xacA\x00\x04P\xf0!0\xf6\x81\xa0\xf0p\xff9\xfb\x85\xe0|\x19&T)K\x8b\x18\
\xf9\xa3\xe4\xbe\xf3\x8c^#\xc9\xd5\n\xa8*\xc5?\x9a\x01\x8a\xd2b\r\x1cN\xc3\
\x14\t\xce\x97a\xb2F0Ks\xd58\xaa\xc6\xc5\xa6\xf7\xdfya\xe7\xbdR\x13M2\xf9\
\xf9qKQ\x1fi\xf6-\x00~T\xfac\x1dq#\x82,\xe5q\x05\x91D\xba@\xefj\xba1\xf0\xdc\
zzW\xcff&\xb8,\x89\xa8@Q\xd6\xaaf\xdfRm,\xee\xb1BDxr#\xae\xf5|\xddo\xd6\xe2H\
\x18\x15\x84\xa0q@]\xe54\x8d\xa3\xedf\x05M\xe3\xd8Uy\xc4\x15\x8d\xf5\xd7\x8b\
~\x82\x0fh\x0e"\xb0\xad,\xee\xb8c\xbb\x18\xe7\x8e;6\xa5\x89\x04\xde\xff\x1c\
\x16\xef\xe0p\xfa>\x19\x11\xca\x8d\x8d\xe0\x93\x1b\x01\xd8m\xf3(;x\xa5\xef=\
\xb7w\xf3\x1d$\x7f\xc1\xe0\xbd\xa7\xeb\xa0(,"Kc\x12\xc1+\xfd\xe8\tI\xee\xed)\
\xbf\xbcN\xc1{D\x04k\x05#\x12\xfd\xf2a\xde[\x81\x87\xbb\xdf\x9cr\x1a\x87\xd3\
0)\xba>\x83\xd5\xb97o\xe0\xaf\x04\xff\x13?\x00\xd2\xfb\xa9`z\xac\x80w\x00\
\x00\x00\x00IEND\xaeB`\x82'
def GetCollapsedIconBitmap():
return wx.BitmapFromImage(GetCollapsedIconImage())
def GetCollapsedIconImage():
import cStringIO
stream = cStringIO.StringIO(GetCollapsedIconData())
return wx.ImageFromStream(stream)
#----------------------------------------------------------------------
def GetExpandedIconData():
return \
'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\
\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x04sBIT\x08\x08\x08\x08|\x08d\x88\x00\
\x00\x01\x9fIDAT8\x8d\x95\x93\xa1\x8e\xdc0\x14EO\xb2\xc4\xd0\xd2\x12\xb7(mI\
\xa4%V\xd1lQT4[4-\x9a\xfe\xc1\xc2|\xc6\xc2~BY\x83:A3E\xd3\xa0*\xa4\xd2\x90H!\
\x95\x0c\r\r\x1fK\x81g\xb2\x99\x84\xb4\x0fY\xd6\xbb\xc7\xf7>=\'Iz\xc3\xbcv\
\xfbn\xb8\x9c\x15 \xe7\xf3\xc7\x0fw\xc9\xbc7\x99\x03\x0e\xfbn0\x99F+\x85R\
\x80RH\x10\x82\x08\xde\x05\x1ef\x90+\xc0\xe1\xd8\ryn\xd0Z-\\A\xb4\xd2\xf7\
\x9e\xfbwoF\xc8\x088\x1c\xbbae\xb3\xe8y&\x9a\xdf\xf5\xbd\xe7\xfem\x84\xa4\
\x97\xccYf\x16\x8d\xdb\xb2a]\xfeX\x18\xc9s\xc3\xe1\x18\xe7\x94\x12cb\xcc\xb5\
\xfa\xb1l8\xf5\x01\xe7\x84\xc7\xb2Y@\xb2\xcc0\x02\xb4\x9a\x88%\xbe\xdc\xb4\
\x9e\xb6Zs\xaa74\xadg[6\x88<\xb7]\xc6\x14\x1dL\x86\xe6\x83\xa0\x81\xba\xda\
\x10\x02x/\xd4\xd5\x06\r\x840!\x9c\x1fM\x92\xf4\x86\x9f\xbf\xfe\x0c\xd6\x9ae\
\xd6u\x8d \xf4\xf5\x165\x9b\x8f\x04\xe1\xc5\xcb\xdb$\x05\x90\xa97@\x04lQas\
\xcd*7\x14\xdb\x9aY\xcb\xb8\\\xe9E\x10|\xbc\xf2^\xb0E\x85\xc95_\x9f\n\xaa/\
\x05\x10\x81\xce\xc9\xa8\xf6><G\xd8\xed\xbbA)X\xd9\x0c\x01\x9a\xc6Q\x14\xd9h\
[\x04\xda\xd6c\xadFkE\xf0\xc2\xab\xd7\xb7\xc9\x08\x00\xf8\xf6\xbd\x1b\x8cQ\
\xd8|\xb9\x0f\xd3\x9a\x8a\xc7\x08\x00\x9f?\xdd%\xde\x07\xda\x93\xc3{\x19C\
\x8a\x9c\x03\x0b8\x17\xe8\x9d\xbf\x02.>\x13\xc0n\xff{PJ\xc5\xfdP\x11""<\xbc\
\xff\x87\xdf\xf8\xbf\xf5\x17FF\xaf\x8f\x8b\xd3\xe6K\x00\x00\x00\x00IEND\xaeB\
`\x82'
def GetExpandedIconBitmap():
return wx.BitmapFromImage(GetExpandedIconImage())
def GetExpandedIconImage():
import cStringIO
stream = cStringIO.StringIO(GetExpandedIconData())
return wx.ImageFromStream(stream)
class Extended(wx.Frame):
def __init__(self, parent, id=wx.ID_ANY, title="", pos=wx.DefaultPosition, size=(700,400), style=wx.DEFAULT_FRAME_STYLE):
wx.Frame.__init__(self, parent, id, title, pos, size, style)
self._flags = 0
self.statusbar = self.CreateStatusBar(2, wx.ST_SIZEGRIP)
self.statusbar.SetStatusWidths([-4, -3])
self.statusbar.SetStatusText("", 0)
self.statusbar.SetStatusText("", 1)
self._leftWindow1 = wx.SashLayoutWindow(self, 101, wx.DefaultPosition,
wx.Size(200, 1000), wx.NO_BORDER |
wx.SW_3D | wx.CLIP_CHILDREN)
self._leftWindow1.SetDefaultSize(wx.Size(220, 1000))
self._leftWindow1.SetOrientation(wx.LAYOUT_VERTICAL)
self._leftWindow1.SetAlignment(wx.LAYOUT_LEFT)
self._leftWindow1.SetSashVisible(wx.SASH_RIGHT, True)
self._leftWindow1.SetExtraBorderSize(10)
self._pnl = 0
###------------------------------------------------------------------------------------
self.remainingSpace = self.CreateRemainingSpace()
self.ReCreateFoldPanel(0)
def OnSize(self, event):
wx.LayoutAlgorithm().LayoutWindow(self, self.remainingSpace)
event.Skip()
def OnFoldPanelBarDrag(self, event):
if event.GetDragStatus() == wx.SASH_STATUS_OUT_OF_RANGE:
return
if event.GetId() == self.ID_WINDOW_LEFT1:
self._leftWindow1.SetDefaultSize(wx.Size(event.GetDragRect().width, 1000))
# Leaves bits of itself behind sometimes
wx.LayoutAlgorithm().LayoutWindow(self, self.remainingSpace)
self.remainingSpace.Refresh()
event.Skip()
def CreateRemainingSpace(self):
""" Default right panel
"""
panel = wx.Panel(self, -1, style=wx.SUNKEN_BORDER)
wx.StaticText(panel, -1,
"Use your imagination for what kinds of things to put in this window...",
(15,30))
return panel
def ReCreateFoldPanel(self, fpb_flags):
# delete earlier panel
self._leftWindow1.DestroyChildren()
# recreate the foldpanelbar
self._pnl = fpb.FoldPanelBar(self._leftWindow1, -1, wx.DefaultPosition, wx.Size(-1,-1), agwStyle=fpb_flags)
Images = wx.ImageList(16,16)
Images.Add(GetExpandedIconBitmap())
Images.Add(GetCollapsedIconBitmap())
item = self._pnl.AddFoldPanel(_("Options"), collapsed=False, foldIcons=Images)
self._pnl.AddFoldPanelWindow(item, wx.StaticText(item, -1, "Some options..."), fpb.FPB_ALIGN_WIDTH, 5, 20)
self._leftWindow1.SizeWindows()
def OnCollapseMe(self, event):
for i in range(0, self._pnl.GetCount()):
item = self._pnl.GetFoldPanel(i)
self._pnl.Collapse(item)
def OnExpandMe(self, event):
col1 = wx.Colour(self._rslider1.GetValue(), self._gslider1.GetValue(),
self._bslider1.GetValue())
col2 = wx.Colour(self._rslider2.GetValue(), self._gslider2.GetValue(),
self._bslider2.GetValue())
style = fpb.CaptionBarStyle()
style.SetFirstColour(col1)
style.SetSecondColour(col2)
counter = 0
for items in self.radiocontrols:
if items.GetValue():
break
counter = counter + 1
if counter == 0:
mystyle = fpb.CAPTIONBAR_GRADIENT_V
elif counter == 1:
mystyle = fpb.CAPTIONBAR_GRADIENT_H
elif counter == 2:
mystyle = fpb.CAPTIONBAR_SINGLE
elif counter == 3:
mystyle = fpb.CAPTIONBAR_RECTANGLE
else:
mystyle = fpb.CAPTIONBAR_FILLED_RECTANGLE
style.SetCaptionStyle(mystyle)
self._pnl.ApplyCaptionStyleAll(style)
def OnSlideColour(self, event):
col1 = wx.Colour(self._rslider1.GetValue(), self._gslider1.GetValue(),
self._bslider1.GetValue())
col2 = wx.Colour(self._rslider2.GetValue(), self._gslider2.GetValue(),
self._bslider2.GetValue())
style = fpb.CaptionBarStyle()
counter = 0
for items in self.radiocontrols:
if items.GetValue():
break
counter = counter + 1
if counter == 0:
mystyle = fpb.CAPTIONBAR_GRADIENT_V
elif counter == 1:
mystyle = fpb.CAPTIONBAR_GRADIENT_H
elif counter == 2:
mystyle = fpb.CAPTIONBAR_SINGLE
elif counter == 3:
mystyle = fpb.CAPTIONBAR_RECTANGLE
else:
mystyle = fpb.CAPTIONBAR_FILLED_RECTANGLE
style.SetFirstColour(col1)
style.SetSecondColour(col2)
style.SetCaptionStyle(mystyle)
item = self._pnl.GetFoldPanel(0)
self._pnl.ApplyCaptionStyle(item, style)
def OnStyleChange(self, event):
style = fpb.CaptionBarStyle()
eventid = event.GetId()
if eventid == self.ID_USE_HGRADIENT:
style.SetCaptionStyle(fpb.CAPTIONBAR_GRADIENT_H)
elif eventid == self.ID_USE_VGRADIENT:
style.SetCaptionStyle(fpb.CAPTIONBAR_GRADIENT_V)
elif eventid == self.ID_USE_SINGLE:
style.SetCaptionStyle(fpb.CAPTIONBAR_SINGLE)
elif eventid == self.ID_USE_RECTANGLE:
style.SetCaptionStyle(fpb.CAPTIONBAR_RECTANGLE)
elif eventid == self.ID_USE_FILLED_RECTANGLE:
style.SetCaptionStyle(fpb.CAPTIONBAR_FILLED_RECTANGLE)
else:
raise "ERROR: Undefined Style Selected For CaptionBar: " + repr(eventid)
col1 = wx.Colour(self._rslider1.GetValue(), self._gslider1.GetValue(),
self._bslider1.GetValue())
col2 = wx.Colour(self._rslider2.GetValue(), self._gslider2.GetValue(),
self._bslider2.GetValue())
style.SetFirstColour(col1)
style.SetSecondColour(col2)
if self._single.GetValue():
item = self._pnl.GetFoldPanel(1)
self._pnl.ApplyCaptionStyle(item, style)
else:
self._pnl.ApplyCaptionStyleAll(style)
if __name__ == '__main__':
app = wx.PySimpleApp()
frame = Extended(None, title="FoldPanelBar Extended Demo")
frame.Show()
app.MainLoop()
| lgpl-3.0 |
vaygr/ansible | lib/ansible/utils/module_docs_fragments/ipa.py | 27 | 2627 | # Copyright (c) 2017-18, Ansible Project
# Copyright (c) 2017-18, Abhijeet Kasurde (akasurde@redhat.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class ModuleDocFragment(object):
# Parameters for FreeIPA/IPA modules
DOCUMENTATION = '''
options:
ipa_port:
description:
- Port of FreeIPA / IPA server.
- If the value is not specified in the task, the value of environment variable C(IPA_PORT) will be used instead.
- If both the environment variable C(IPA_PORT) and the value are not specified in the task, then default value is set.
- 'Environment variable fallback mechanism is added in version 2.5.'
default: 443
ipa_host:
description:
- IP or hostname of IPA server.
- If the value is not specified in the task, the value of environment variable C(IPA_HOST) will be used instead.
- If both the environment variable C(IPA_HOST) and the value are not specified in the task, then default value is set.
- 'Environment variable fallback mechanism is added in version 2.5.'
default: ipa.example.com
ipa_user:
description:
- Administrative account used on IPA server.
- If the value is not specified in the task, the value of environment variable C(IPA_USER) will be used instead.
- If both the environment variable C(IPA_USER) and the value are not specified in the task, then default value is set.
- 'Environment variable fallback mechanism is added in version 2.5.'
default: admin
ipa_pass:
description:
- Password of administrative user.
- If the value is not specified in the task, the value of environment variable C(IPA_PASS) will be used instead.
- If both the environment variable C(IPA_PASS) and the value are not specified in the task, then default value is set.
- 'Environment variable fallback mechanism is added in version 2.5.'
required: true
ipa_prot:
description:
- Protocol used by IPA server.
- If the value is not specified in the task, the value of environment variable C(IPA_PROT) will be used instead.
- If both the environment variable C(IPA_PROT) and the value are not specified in the task, then default value is set.
- 'Environment variable fallback mechanism is added in version 2.5.'
default: https
choices: ["http", "https"]
validate_certs:
description:
- This only applies if C(ipa_prot) is I(https).
- If set to C(no), the SSL certificates will not be validated.
- This should only set to C(no) used on personally controlled sites using self-signed certificates.
default: true
'''
| gpl-3.0 |
openhatch/oh-mainline | vendor/packages/Django/django/contrib/gis/tests/inspectapp/tests.py | 134 | 5128 | from __future__ import absolute_import
import os
from django.db import connections
from django.test import TestCase
from django.contrib.gis.gdal import Driver
from django.contrib.gis.geometry.test_data import TEST_DATA
from django.contrib.gis.utils.ogrinspect import ogrinspect
from .models import AllOGRFields
class OGRInspectTest(TestCase):
def test_poly(self):
shp_file = os.path.join(TEST_DATA, 'test_poly', 'test_poly.shp')
model_def = ogrinspect(shp_file, 'MyModel')
expected = [
'# This is an auto-generated Django model module created by ogrinspect.',
'from django.contrib.gis.db import models',
'',
'class MyModel(models.Model):',
' float = models.FloatField()',
' int = models.FloatField()',
' str = models.CharField(max_length=80)',
' geom = models.PolygonField(srid=-1)',
' objects = models.GeoManager()',
]
self.assertEqual(model_def, '\n'.join(expected))
def test_date_field(self):
shp_file = os.path.join(TEST_DATA, 'cities', 'cities.shp')
model_def = ogrinspect(shp_file, 'City')
expected = [
'# This is an auto-generated Django model module created by ogrinspect.',
'from django.contrib.gis.db import models',
'',
'class City(models.Model):',
' name = models.CharField(max_length=80)',
' population = models.FloatField()',
' density = models.FloatField()',
' created = models.DateField()',
' geom = models.PointField(srid=-1)',
' objects = models.GeoManager()',
]
self.assertEqual(model_def, '\n'.join(expected))
def test_time_field(self):
# Only possible to test this on PostGIS at the momemnt. MySQL
# complains about permissions, and SpatiaLite/Oracle are
# insanely difficult to get support compiled in for in GDAL.
if not connections['default'].ops.postgis:
return
# Getting the database identifier used by OGR, if None returned
# GDAL does not have the support compiled in.
ogr_db = get_ogr_db_string()
if not ogr_db:
return
# writing shapefules via GDAL currently does not support writing OGRTime
# fields, so we need to actually use a database
model_def = ogrinspect(ogr_db, 'Measurement',
layer_key=AllOGRFields._meta.db_table,
decimal=['f_decimal'])
self.assertTrue(model_def.startswith(
'# This is an auto-generated Django model module created by ogrinspect.\n'
'from django.contrib.gis.db import models\n'
'\n'
'class Measurement(models.Model):\n'
))
# The ordering of model fields might vary depending on several factors (version of GDAL, etc.)
self.assertIn(' f_decimal = models.DecimalField(max_digits=0, decimal_places=0)', model_def)
self.assertIn(' f_int = models.IntegerField()', model_def)
self.assertIn(' f_datetime = models.DateTimeField()', model_def)
self.assertIn(' f_time = models.TimeField()', model_def)
self.assertIn(' f_float = models.FloatField()', model_def)
self.assertIn(' f_char = models.CharField(max_length=10)', model_def)
self.assertIn(' f_date = models.DateField()', model_def)
self.assertTrue(model_def.endswith(
' geom = models.PolygonField()\n'
' objects = models.GeoManager()'
))
def get_ogr_db_string():
# Construct the DB string that GDAL will use to inspect the database.
# GDAL will create its own connection to the database, so we re-use the
# connection settings from the Django test. This approach is a bit fragile
# and cannot work on any other database other than PostgreSQL at the moment.
db = connections.databases['default']
# Map from the django backend into the OGR driver name and database identifier
# http://www.gdal.org/ogr/ogr_formats.html
#
# TODO: Support Oracle (OCI), MySQL, and SpatiaLite.
drivers = {
'django.contrib.gis.db.backends.postgis': ('PostgreSQL', 'PG'),
}
drv_name, db_str = drivers[db['ENGINE']]
# Ensure that GDAL library has driver support for the database.
try:
Driver(drv_name)
except:
return None
# Build the params of the OGR database connection string
# TODO: connection strings are database-dependent, thus if
# we ever test other backends, this will need to change.
params = ["dbname='%s'" % db['NAME']]
def add(key, template):
value = db.get(key, None)
# Don't add the parameter if it is not in django's settings
if value:
params.append(template % value)
add('HOST', "host='%s'")
add('PORT', "port='%s'")
add('USER', "user='%s'")
add('PASSWORD', "password='%s'")
return '%s:%s' % (db_str, ' '.join(params))
| agpl-3.0 |
phimpme/generator | Phimpme/site-packages/requests/packages/charade/utf8prober.py | 205 | 2728 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
from .charsetprober import CharSetProber
from .codingstatemachine import CodingStateMachine
from .mbcssm import UTF8SMModel
ONE_CHAR_PROB = 0.5
class UTF8Prober(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(UTF8SMModel)
self.reset()
def reset(self):
CharSetProber.reset(self)
self._mCodingSM.reset()
self._mNumOfMBChar = 0
def get_charset_name(self):
return "utf-8"
def feed(self, aBuf):
for c in aBuf:
codingState = self._mCodingSM.next_state(c)
if codingState == constants.eError:
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
if self._mCodingSM.get_current_charlen() >= 2:
self._mNumOfMBChar += 1
if self.get_state() == constants.eDetecting:
if self.get_confidence() > constants.SHORTCUT_THRESHOLD:
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
unlike = 0.99
if self._mNumOfMBChar < 6:
for i in range(0, self._mNumOfMBChar):
unlike = unlike * ONE_CHAR_PROB
return 1.0 - unlike
else:
return unlike
| gpl-3.0 |
ajoaoff/django | tests/generic_views/test_base.py | 269 | 19854 | from __future__ import unicode_literals
import time
import unittest
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import resolve
from django.http import HttpResponse
from django.test import RequestFactory, SimpleTestCase, override_settings
from django.test.utils import require_jinja2
from django.views.generic import RedirectView, TemplateView, View
from . import views
class SimpleView(View):
"""
A simple view with a docstring.
"""
def get(self, request):
return HttpResponse('This is a simple view')
class SimplePostView(SimpleView):
post = SimpleView.get
class PostOnlyView(View):
def post(self, request):
return HttpResponse('This view only accepts POST')
class CustomizableView(SimpleView):
parameter = {}
def decorator(view):
view.is_decorated = True
return view
class DecoratedDispatchView(SimpleView):
@decorator
def dispatch(self, request, *args, **kwargs):
return super(DecoratedDispatchView, self).dispatch(request, *args, **kwargs)
class AboutTemplateView(TemplateView):
def get(self, request):
return self.render_to_response({})
def get_template_names(self):
return ['generic_views/about.html']
class AboutTemplateAttributeView(TemplateView):
template_name = 'generic_views/about.html'
def get(self, request):
return self.render_to_response(context={})
class InstanceView(View):
def get(self, request):
return self
class ViewTest(unittest.TestCase):
rf = RequestFactory()
def _assert_simple(self, response):
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'This is a simple view')
def test_no_init_kwargs(self):
"""
Test that a view can't be accidentally instantiated before deployment
"""
try:
SimpleView(key='value').as_view()
self.fail('Should not be able to instantiate a view')
except AttributeError:
pass
def test_no_init_args(self):
"""
Test that a view can't be accidentally instantiated before deployment
"""
try:
SimpleView.as_view('value')
self.fail('Should not be able to use non-keyword arguments instantiating a view')
except TypeError:
pass
def test_pathological_http_method(self):
"""
The edge case of a http request that spoofs an existing method name is caught.
"""
self.assertEqual(SimpleView.as_view()(
self.rf.get('/', REQUEST_METHOD='DISPATCH')
).status_code, 405)
def test_get_only(self):
"""
Test a view which only allows GET doesn't allow other methods.
"""
self._assert_simple(SimpleView.as_view()(self.rf.get('/')))
self.assertEqual(SimpleView.as_view()(self.rf.post('/')).status_code, 405)
self.assertEqual(SimpleView.as_view()(
self.rf.get('/', REQUEST_METHOD='FAKE')
).status_code, 405)
def test_get_and_head(self):
"""
Test a view which supplies a GET method also responds correctly to HEAD.
"""
self._assert_simple(SimpleView.as_view()(self.rf.get('/')))
response = SimpleView.as_view()(self.rf.head('/'))
self.assertEqual(response.status_code, 200)
def test_head_no_get(self):
"""
Test a view which supplies no GET method responds to HEAD with HTTP 405.
"""
response = PostOnlyView.as_view()(self.rf.head('/'))
self.assertEqual(response.status_code, 405)
def test_get_and_post(self):
"""
Test a view which only allows both GET and POST.
"""
self._assert_simple(SimplePostView.as_view()(self.rf.get('/')))
self._assert_simple(SimplePostView.as_view()(self.rf.post('/')))
self.assertEqual(SimplePostView.as_view()(
self.rf.get('/', REQUEST_METHOD='FAKE')
).status_code, 405)
def test_invalid_keyword_argument(self):
"""
Test that view arguments must be predefined on the class and can't
be named like a HTTP method.
"""
# Check each of the allowed method names
for method in SimpleView.http_method_names:
kwargs = dict(((method, "value"),))
self.assertRaises(TypeError, SimpleView.as_view, **kwargs)
# Check the case view argument is ok if predefined on the class...
CustomizableView.as_view(parameter="value")
# ...but raises errors otherwise.
self.assertRaises(TypeError, CustomizableView.as_view, foobar="value")
def test_calling_more_than_once(self):
"""
Test a view can only be called once.
"""
request = self.rf.get('/')
view = InstanceView.as_view()
self.assertNotEqual(view(request), view(request))
def test_class_attributes(self):
"""
Test that the callable returned from as_view() has proper
docstring, name and module.
"""
self.assertEqual(SimpleView.__doc__, SimpleView.as_view().__doc__)
self.assertEqual(SimpleView.__name__, SimpleView.as_view().__name__)
self.assertEqual(SimpleView.__module__, SimpleView.as_view().__module__)
def test_dispatch_decoration(self):
"""
Test that attributes set by decorators on the dispatch method
are also present on the closure.
"""
self.assertTrue(DecoratedDispatchView.as_view().is_decorated)
def test_options(self):
"""
Test that views respond to HTTP OPTIONS requests with an Allow header
appropriate for the methods implemented by the view class.
"""
request = self.rf.options('/')
view = SimpleView.as_view()
response = view(request)
self.assertEqual(200, response.status_code)
self.assertTrue(response['Allow'])
def test_options_for_get_view(self):
"""
Test that a view implementing GET allows GET and HEAD.
"""
request = self.rf.options('/')
view = SimpleView.as_view()
response = view(request)
self._assert_allows(response, 'GET', 'HEAD')
def test_options_for_get_and_post_view(self):
"""
Test that a view implementing GET and POST allows GET, HEAD, and POST.
"""
request = self.rf.options('/')
view = SimplePostView.as_view()
response = view(request)
self._assert_allows(response, 'GET', 'HEAD', 'POST')
def test_options_for_post_view(self):
"""
Test that a view implementing POST allows POST.
"""
request = self.rf.options('/')
view = PostOnlyView.as_view()
response = view(request)
self._assert_allows(response, 'POST')
def _assert_allows(self, response, *expected_methods):
"Assert allowed HTTP methods reported in the Allow response header"
response_allows = set(response['Allow'].split(', '))
self.assertEqual(set(expected_methods + ('OPTIONS',)), response_allows)
def test_args_kwargs_request_on_self(self):
"""
Test a view only has args, kwargs & request once `as_view`
has been called.
"""
bare_view = InstanceView()
view = InstanceView.as_view()(self.rf.get('/'))
for attribute in ('args', 'kwargs', 'request'):
self.assertNotIn(attribute, dir(bare_view))
self.assertIn(attribute, dir(view))
def test_direct_instantiation(self):
"""
It should be possible to use the view by directly instantiating it
without going through .as_view() (#21564).
"""
view = PostOnlyView()
response = view.dispatch(self.rf.head('/'))
self.assertEqual(response.status_code, 405)
@override_settings(ROOT_URLCONF='generic_views.urls')
class TemplateViewTest(SimpleTestCase):
rf = RequestFactory()
def _assert_about(self, response):
response.render()
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<h1>About</h1>')
def test_get(self):
"""
Test a view that simply renders a template on GET
"""
self._assert_about(AboutTemplateView.as_view()(self.rf.get('/about/')))
def test_head(self):
"""
Test a TemplateView responds correctly to HEAD
"""
response = AboutTemplateView.as_view()(self.rf.head('/about/'))
self.assertEqual(response.status_code, 200)
def test_get_template_attribute(self):
"""
Test a view that renders a template on GET with the template name as
an attribute on the class.
"""
self._assert_about(AboutTemplateAttributeView.as_view()(self.rf.get('/about/')))
def test_get_generic_template(self):
"""
Test a completely generic view that renders a template on GET
with the template name as an argument at instantiation.
"""
self._assert_about(TemplateView.as_view(template_name='generic_views/about.html')(self.rf.get('/about/')))
def test_template_name_required(self):
"""
A template view must provide a template name.
"""
self.assertRaises(ImproperlyConfigured, self.client.get, '/template/no_template/')
@require_jinja2
def test_template_engine(self):
"""
A template view may provide a template engine.
"""
request = self.rf.get('/using/')
view = TemplateView.as_view(template_name='generic_views/using.html')
self.assertEqual(view(request).render().content, b'DTL\n')
view = TemplateView.as_view(template_name='generic_views/using.html', template_engine='django')
self.assertEqual(view(request).render().content, b'DTL\n')
view = TemplateView.as_view(template_name='generic_views/using.html', template_engine='jinja2')
self.assertEqual(view(request).render().content, b'Jinja2\n')
def test_template_params(self):
"""
A generic template view passes kwargs as context.
"""
response = self.client.get('/template/simple/bar/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['foo'], 'bar')
self.assertIsInstance(response.context['view'], View)
def test_extra_template_params(self):
"""
A template view can be customized to return extra context.
"""
response = self.client.get('/template/custom/bar/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['foo'], 'bar')
self.assertEqual(response.context['key'], 'value')
self.assertIsInstance(response.context['view'], View)
def test_cached_views(self):
"""
A template view can be cached
"""
response = self.client.get('/template/cached/bar/')
self.assertEqual(response.status_code, 200)
time.sleep(1.0)
response2 = self.client.get('/template/cached/bar/')
self.assertEqual(response2.status_code, 200)
self.assertEqual(response.content, response2.content)
time.sleep(2.0)
# Let the cache expire and test again
response2 = self.client.get('/template/cached/bar/')
self.assertEqual(response2.status_code, 200)
self.assertNotEqual(response.content, response2.content)
def test_content_type(self):
response = self.client.get('/template/content_type/')
self.assertEqual(response['Content-Type'], 'text/plain')
def test_resolve_view(self):
match = resolve('/template/content_type/')
self.assertIs(match.func.view_class, TemplateView)
self.assertEqual(match.func.view_initkwargs['content_type'], 'text/plain')
def test_resolve_login_required_view(self):
match = resolve('/template/login_required/')
self.assertIs(match.func.view_class, TemplateView)
@override_settings(ROOT_URLCONF='generic_views.urls')
class RedirectViewTest(SimpleTestCase):
rf = RequestFactory()
def test_no_url(self):
"Without any configuration, returns HTTP 410 GONE"
response = RedirectView.as_view()(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 410)
def test_default_redirect(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_permanent_redirect(self):
"Permanent redirects are an option"
response = RedirectView.as_view(url='/bar/', permanent=True)(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 301)
self.assertEqual(response.url, '/bar/')
def test_temporary_redirect(self):
"Temporary redirects are an option"
response = RedirectView.as_view(url='/bar/', permanent=False)(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_include_args(self):
"GET arguments can be included in the redirected URL"
response = RedirectView.as_view(url='/bar/')(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
response = RedirectView.as_view(url='/bar/', query_string=True)(self.rf.get('/foo/?pork=spam'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/?pork=spam')
def test_include_urlencoded_args(self):
"GET arguments can be URL-encoded when included in the redirected URL"
response = RedirectView.as_view(url='/bar/', query_string=True)(
self.rf.get('/foo/?unicode=%E2%9C%93'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/?unicode=%E2%9C%93')
def test_parameter_substitution(self):
"Redirection URLs can be parameterized"
response = RedirectView.as_view(url='/bar/%(object_id)d/')(self.rf.get('/foo/42/'), object_id=42)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/42/')
def test_named_url_pattern(self):
"Named pattern parameter should reverse to the matching pattern"
response = RedirectView.as_view(pattern_name='artist_detail')(self.rf.get('/foo/'), pk=1)
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], '/detail/artist/1/')
def test_named_url_pattern_using_args(self):
response = RedirectView.as_view(pattern_name='artist_detail')(self.rf.get('/foo/'), 1)
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], '/detail/artist/1/')
def test_wrong_named_url_pattern(self):
"A wrong pattern name returns 410 GONE"
response = RedirectView.as_view(pattern_name='wrong.pattern_name')(self.rf.get('/foo/'))
self.assertEqual(response.status_code, 410)
def test_redirect_POST(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.post('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_HEAD(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.head('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_OPTIONS(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.options('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_PUT(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.put('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_PATCH(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.patch('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_DELETE(self):
"Default is a temporary redirect"
response = RedirectView.as_view(url='/bar/')(self.rf.delete('/foo/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/bar/')
def test_redirect_when_meta_contains_no_query_string(self):
"regression for #16705"
# we can't use self.rf.get because it always sets QUERY_STRING
response = RedirectView.as_view(url='/bar/')(self.rf.request(PATH_INFO='/foo/'))
self.assertEqual(response.status_code, 302)
def test_direct_instantiation(self):
"""
It should be possible to use the view without going through .as_view()
(#21564).
"""
view = RedirectView()
response = view.dispatch(self.rf.head('/foo/'))
self.assertEqual(response.status_code, 410)
class GetContextDataTest(unittest.TestCase):
def test_get_context_data_super(self):
test_view = views.CustomContextView()
context = test_view.get_context_data(kwarg_test='kwarg_value')
# the test_name key is inserted by the test classes parent
self.assertIn('test_name', context)
self.assertEqual(context['kwarg_test'], 'kwarg_value')
self.assertEqual(context['custom_key'], 'custom_value')
# test that kwarg overrides values assigned higher up
context = test_view.get_context_data(test_name='test_value')
self.assertEqual(context['test_name'], 'test_value')
def test_object_at_custom_name_in_context_data(self):
# Checks 'pony' key presence in dict returned by get_context_date
test_view = views.CustomSingleObjectView()
test_view.context_object_name = 'pony'
context = test_view.get_context_data()
self.assertEqual(context['pony'], test_view.object)
def test_object_in_get_context_data(self):
# Checks 'object' key presence in dict returned by get_context_date #20234
test_view = views.CustomSingleObjectView()
context = test_view.get_context_data()
self.assertEqual(context['object'], test_view.object)
class UseMultipleObjectMixinTest(unittest.TestCase):
rf = RequestFactory()
def test_use_queryset_from_view(self):
test_view = views.CustomMultipleObjectMixinView()
test_view.get(self.rf.get('/'))
# Don't pass queryset as argument
context = test_view.get_context_data()
self.assertEqual(context['object_list'], test_view.queryset)
def test_overwrite_queryset(self):
test_view = views.CustomMultipleObjectMixinView()
test_view.get(self.rf.get('/'))
queryset = [{'name': 'Lennon'}, {'name': 'Ono'}]
self.assertNotEqual(test_view.queryset, queryset)
# Overwrite the view's queryset with queryset from kwarg
context = test_view.get_context_data(object_list=queryset)
self.assertEqual(context['object_list'], queryset)
class SingleObjectTemplateResponseMixinTest(unittest.TestCase):
def test_template_mixin_without_template(self):
"""
We want to makes sure that if you use a template mixin, but forget the
template, it still tells you it's ImproperlyConfigured instead of
TemplateDoesNotExist.
"""
view = views.TemplateResponseWithoutTemplate()
self.assertRaises(ImproperlyConfigured, view.get_template_names)
| bsd-3-clause |
malisal/bfdpie | tests/bfdpie_test.py | 1 | 14822 | import unittest
from bfdpie import *
class Test(unittest.TestCase):
def test_large_vma(self):
b = Binary()
# 32-bit limit test
dis1 = b.disassemble(b"\x90", ARCH_I686, 0x80000000)
dis2 = b.disassemble(b"\x90", ARCH_X86_64, 0x80000000)
self.assertTrue(dis1[0].vma >= 0)
self.assertTrue(dis2[0].vma >= 0)
# 64-bit limit test
dis3 = b.disassemble(b"\x90", ARCH_I686, 0x8000000000000000)
dis4 = b.disassemble(b"\x90", ARCH_X86_64, 0x8000000000000000)
self.assertTrue(dis3[0].vma >= 0)
self.assertTrue(dis4[0].vma >= 0)
def test_arch_i686(self):
# 8048579: 89 e5 mov %esp,%ebp
# 804857b: 53 push %ebx
# 804857c: bb 4c 96 04 08 mov $0x804964c,%ebx
# 8048581: 52 push %edx
b = Binary()
dis = b.disassemble(
b"\x89\xe5" +
b"\x53" +
b"\xbb\x4c\x96\x04\x08" +
b"\x52"
,arch=ARCH_I686
)
self.assertTrue(str(dis[0]) == "mov ebp,esp")
self.assertTrue(str(dis[1]) == "push ebx")
self.assertTrue(str(dis[2]) == "mov ebx,0x804964c")
self.assertTrue(str(dis[3]) == "push edx")
def test_arch_x86_64(self):
# 4006aa: ba 00 04 00 00 mov $0x400,%edx
# 4006af: 48 89 c6 mov %rax,%rsi
# 4006b2: bf 00 00 00 00 mov $0x0,%edi
# 4006b7: b8 00 00 00 00 mov $0x0,%eax
b = Binary()
dis = b.disassemble(
b"\xba\x00\x04\x00\x00" +
b"\x48\x89\xc6" +
b"\xbf\x00\x00\x00\x00" +
b"\xb8\x00\x00\x00\x00",
ARCH_X86_64
)
self.assertTrue(str(dis[0]) == "mov edx,0x400")
self.assertTrue(str(dis[1]) == "mov rsi,rax")
self.assertTrue(str(dis[2]) == "mov edi,0x0")
self.assertTrue(str(dis[3]) == "mov eax,0x0")
def test_arch_armel(self):
# 84c0: e92d4800 push {fp, lr}
# 84c4: e28db004 add fp, sp, #4
# 84c8: e24dd020 sub sp, sp, #32
# 84cc: e24b3024 sub r3, fp, #36 ; 0x24
b = Binary()
dis = b.disassemble(
b"\x00\x48\x2d\xe9" +
b"\x04\xb0\x8d\xe2" +
b"\x20\xd0\x4d\xe2" +
b"\x24\x30\x4b\xe2",
ARCH_ARMEL
)
self.assertTrue(str(dis[0]) == "push {fp, lr}")
self.assertTrue(str(dis[1]) == "add fp, sp, #4")
self.assertTrue(str(dis[2]) == "sub sp, sp, #32")
self.assertTrue(str(dis[3]) == "sub r3, fp, #36 ; 0x24")
def test_arch_armel_thumb(self):
# 84ce: db00 lsls r3, r3, #3
# 84d0: 0020 movs r0, #0
# 84d2: 111c adds r1, r2, #0
# 84d4: 1a1c adds r2, r3, #0
b = Binary()
dis = b.disassemble(
b"\xdb\x00" +
b"\x00\x20" +
b"\x11\x1c" +
b"\x1a\x1c",
ARCH_ARMEL_THUMB
)
self.assertTrue(str(dis[0]) == "lsls r3, r3, #3")
self.assertTrue(str(dis[1]) == "movs r0, #0")
self.assertTrue(str(dis[2]) == "adds r1, r2, #0")
self.assertTrue(str(dis[3]) == "adds r2, r3, #0")
def test_arch_armeb(self):
# 84c0: e92d4800 push {fp, lr}
# 84c4: e28db004 add fp, sp, #4
# 84c8: e24dd020 sub sp, sp, #32
# 84cc: e24b3024 sub r3, fp, #36 ; 0x24
b = Binary()
dis = b.disassemble(
b"\xe9\x2d\x48\x00" +
b"\xe2\x8d\xb0\x04" +
b"\xe2\x4d\xd0\x20" +
b"\xe2\x4b\x30\x24",
ARCH_ARMEB
)
self.assertTrue(str(dis[0]) == "push {fp, lr}")
self.assertTrue(str(dis[1]) == "add fp, sp, #4")
self.assertTrue(str(dis[2]) == "sub sp, sp, #32")
self.assertTrue(str(dis[3]) == "sub r3, fp, #36 ; 0x24")
def test_arch_armeb_thumb(self):
# 84ce: 00db lsls r3, r3, #3
# 84d0: 2000 movs r0, #0
# 84d2: 1c11 adds r1, r2, #0
# 84d4: 1c1a adds r2, r3, #0
b = Binary()
dis = b.disassemble(
b"\x00\xdb" +
b"\x20\x00" +
b"\x1c\x11" +
b"\x1c\x1a",
ARCH_ARMEB_THUMB
)
self.assertTrue(str(dis[0]) == "lsls r3, r3, #3")
self.assertTrue(str(dis[1]) == "movs r0, #0")
self.assertTrue(str(dis[2]) == "adds r1, r2, #0")
self.assertTrue(str(dis[3]) == "adds r2, r3, #0")
def test_arch_mips(self):
# 4009d8: 8fbf001c lw ra,28(sp)
# 4009dc: 00000000 nop
# 4009e0: 03e00008 jr ra
# 4009e4: 27bd0020 addiu sp,sp,32
b = Binary()
dis = b.disassemble(
b"\x8f\xbf\x00\x1c" +
b"\x00\x00\x00\x00" +
b"\x03\xe0\x00\x08" +
b"\x27\xbd\x00\x20",
ARCH_MIPS
)
self.assertTrue(str(dis[0]) == "lw ra,28(sp)")
self.assertTrue(str(dis[1]) == "nop")
self.assertTrue(str(dis[2]) == "jr ra")
self.assertTrue(str(dis[3]) == "addiu sp,sp,32")
def test_arch_mipsel(self):
# 4009d8: 1c00bf8f lw ra,28(sp)
# 4009dc: 00000000 nop
# 4009e0: 0800e003 jr ra
# 4009e4: 2000bd27 addiu sp,sp,32
b = Binary()
dis = b.disassemble(
b"\x1c\x00\xbf\x8f" +
b"\x00\x00\x00\x00" +
b"\x08\x00\xe0\x03" +
b"\x20\x00\xbd\x27",
ARCH_MIPSEL
)
self.assertTrue(str(dis[0]) == "lw ra,28(sp)")
self.assertTrue(str(dis[1]) == "nop")
self.assertTrue(str(dis[2]) == "jr ra")
self.assertTrue(str(dis[3]) == "addiu sp,sp,32")
def test_arch_mips64(self):
# 120000918: 3c1c0002 lui gp,0x2
# 12000091c: 279c843c addiu gp,gp,-31684
# 120000920: 039fe02d daddu gp,gp,ra
# 120000924: df998068 ld t9,-32664(gp)
b = Binary()
dis = b.disassemble(
b"\x3c\x1c\x00\x02" +
b"\x27\x9c\x84\x3c" +
b"\x03\x9f\xe0\x2d" +
b"\xdf\x99\x80\x68",
ARCH_MIPS64
)
self.assertTrue(str(dis[0]) == "lui gp,0x2")
self.assertTrue(str(dis[1]) == "addiu gp,gp,-31684")
self.assertTrue(str(dis[2]) == "daddu gp,gp,ra")
self.assertTrue(str(dis[3]) == "ld t9,-32664(gp)")
def test_arch_mips64el(self):
# 120000918: 02001c3c lui gp,0x2
# 12000091c: 3c849c27 addiu gp,gp,-31684
# 120000920: 2de09f03 daddu gp,gp,ra
# 120000924: 688099df ld t9,-32664(gp)
b = Binary()
dis = b.disassemble(
b"\x02\x00\x1c\x3c" +
b"\x3c\x84\x9c\x27" +
b"\x2d\xe0\x9f\x03" +
b"\x68\x80\x99\xdf",
ARCH_MIPS64EL
)
self.assertTrue(str(dis[0]) == "lui gp,0x2")
self.assertTrue(str(dis[1]) == "addiu gp,gp,-31684")
self.assertTrue(str(dis[2]) == "daddu gp,gp,ra")
self.assertTrue(str(dis[3]) == "ld t9,-32664(gp)")
def test_arch_ppc32(self):
# 1000058c: 80 01 00 14 lwz r0,20(r1)
# 10000590: 38 21 00 10 addi r1,r1,16
# 10000594: 7c 08 03 a6 mtlr r0
# 10000598: 4e 80 00 20 blr
b = Binary()
dis = b.disassemble(
b"\x80\x01\x00\x14" +
b"\x38\x21\x00\x10" +
b"\x7c\x08\x03\xa6" +
b"\x4e\x80\x00\x20",
ARCH_PPC32
)
self.assertTrue(str(dis[0]) == "lwz r0,20(r1)")
self.assertTrue(str(dis[1]) == "addi r1,r1,16")
self.assertTrue(str(dis[2]) == "mtlr r0")
self.assertTrue(str(dis[3]) == "blr")
def test_arch_ppc64(self):
# 100007d4: 38 21 00 70 addi r1,r1,112
# 100007d8: e8 01 00 10 ld r0,16(r1)
# 100007dc: 7c 08 03 a6 mtlr r0
# 100007e0: 4e 80 00 20 blr
b = Binary()
dis = b.disassemble(
b"\x38\x21\x00\x70" +
b"\xe8\x01\x00\x10" +
b"\x7c\x08\x03\xa6" +
b"\x4e\x80\x00\x20",
ARCH_PPC64
)
self.assertTrue(str(dis[0]) == "addi r1,r1,112")
self.assertTrue(str(dis[1]) == "ld r0,16(r1)")
self.assertTrue(str(dis[2]) == "mtlr r0")
self.assertTrue(str(dis[3]) == "blr")
def test_arch_sparc(self):
# 105e4: 9d e3 bf 98 save %sp, -104, %sp
# 105ec: 01 00 00 00 nop
# 105f0: 81 c7 e0 08 ret
# 105f4: 81 e8 00 00 restore
b = Binary()
dis = b.disassemble(
b"\x9d\xe3\xbf\x98" +
b"\x01\x00\x00\x00" +
b"\x81\xc7\xe0\x08" +
b"\x81\xe8\x00\x00",
ARCH_SPARC
)
self.assertTrue(str(dis[0]) == "save %sp, -104, %sp")
self.assertTrue(str(dis[1]) == "nop")
self.assertTrue(str(dis[2]) == "ret")
self.assertTrue(str(dis[3]) == "restore")
def test_arch_sparc64(self):
# 1007a0: 9f c0 40 00 call %g1
# 1007a4: ba 07 7f f8 add %i5, -8, %i5
# 1007a8: c2 5f 40 00 ldx [ %i5 ], %g1
# 1007ac: 80 a0 7f ff cmp %g1, -1
b = Binary()
dis = b.disassemble(
b"\x9f\xc0\x40\x00" +
b"\xba\x07\x7f\xf8" +
b"\xc2\x5f\x40\x00" +
b"\x80\xa0\x7f\xff",
ARCH_SPARC64
)
self.assertTrue(str(dis[0]) == "call %g1")
self.assertTrue(str(dis[1]) == "add %i5, -8, %i5")
self.assertTrue(str(dis[2]) == "ldx [ %i5 ], %g1")
self.assertTrue(str(dis[3]) == "cmp %g1, -1")
def test_arch_sh4(self):
# 400618: 26 4f lds.l @r15+,pr
# 40061a: 0b 00 rts
# 40061c: f6 68 mov.l @r15+,r8
# 40061e: 09 00 nop
b = Binary()
dis = b.disassemble(
b"\x26\x4f" +
b"\x0b\x00" +
b"\xf6\x68" +
b"\x09\x00",
ARCH_SH4
)
self.assertTrue(str(dis[0]) == "lds.l @r15+,pr")
self.assertTrue(str(dis[1]) == "rts")
self.assertTrue(str(dis[2]) == "mov.l @r15+,r8")
self.assertTrue(str(dis[3]) == "nop")
def test_arch_sh4eb(self):
# 400618: 4f 26 lds.l @r15+,pr
# 40061a: 00 0b rts
# 40061c: 68 f6 mov.l @r15+,r8
# 40061e: 00 09 nop
b = Binary()
dis = b.disassemble(
b"\x4f\x26" +
b"\x00\x0b" +
b"\x68\xf6" +
b"\x00\x09",
ARCH_SH4EB
)
self.assertTrue(str(dis[0]) == "lds.l @r15+,pr")
self.assertTrue(str(dis[1]) == "rts")
self.assertTrue(str(dis[2]) == "mov.l @r15+,r8")
self.assertTrue(str(dis[3]) == "nop")
def test_arch_aarch64(self):
# 400624: a9bf7bfd stp x29, x30, [sp,#-16]!
# 400628: 910003fd mov x29, sp
# 40062c: a8c17bfd ldp x29, x30, [sp],#16
# 400630: d65f03c0 ret
b = Binary()
dis = b.disassemble(
b"\xfd\x7b\xbf\xa9" +
b"\xfd\x03\x00\x91" +
b"\xfd\x7b\xc1\xa8" +
b"\xc0\x03\x5f\xd6",
ARCH_AARCH64
)
self.assertTrue(str(dis[0]) == "stp x29, x30, [sp,#-16]!")
self.assertTrue(str(dis[1]) == "mov x29, sp")
self.assertTrue(str(dis[2]) == "ldp x29, x30, [sp],#16")
self.assertTrue(str(dis[3]) == "ret")
def test_arch_alpha(self):
# 1200007e8: 3e 15 c2 43 subq sp,0x10,sp
# 1200007ec: 00 00 5e b7 stq ra,0(sp)
# 1200007f0: 08 00 be b7 stq gp,8(sp)
# 1200007f4: 00 00 fe 2f unop
b = Binary()
dis = b.disassemble(
b"\x3e\x15\xc2\x43" +
b"\x00\x00\x5e\xb7" +
b"\x08\x00\xbe\xb7" +
b"\x00\x00\xfe\x2f",
ARCH_ALPHA
)
self.assertTrue(str(dis[0]) == "subq sp,0x10,sp")
self.assertTrue(str(dis[1]) == "stq ra,0(sp)")
self.assertTrue(str(dis[2]) == "stq gp,8(sp)")
self.assertTrue(str(dis[3]) == "unop")
def test_arch_crisv32(self):
# 80610: 6e0e move.d [$sp+],$r0
# 80612: 31b6 move $r1,$srp
# 80614: 6e1e move.d [$sp+],$r1
# 80616: f0b9 ret
b = Binary()
dis = b.disassemble(
b"\x6e\x0e" +
b"\x31\xb6" +
b"\x6e\x1e" +
b"\xf0\xb9",
ARCH_CRISV32
)
self.assertTrue(str(dis[0]) == "move.d [$sp+],$r0")
self.assertTrue(str(dis[1]) == "move $r1,$srp")
self.assertTrue(str(dis[2]) == "move.d [$sp+],$r1")
self.assertTrue(str(dis[3]) == "ret")
def test_arch_s390x(self):
# 80000724: e3 40 f1 10 00 04 lg %r4,272(%r15)
# 8000072a: eb cf f1 00 00 04 lmg %r12,%r15,256(%r15)
# 80000730: 07 f4 br %r4
# 80000732: 07 07 nopr %r7
b = Binary()
dis = b.disassemble(
b"\xe3\x40\xf1\x10\x00\x04" +
b"\xeb\xcf\xf1\x00\x00\x04" +
b"\x07\xf4" +
b"\x07\x07",
ARCH_S390X
)
self.assertTrue(str(dis[0]) == "lg %r4,272(%r15)")
self.assertTrue(str(dis[1]) == "lmg %r12,%r15,256(%r15)")
self.assertTrue(str(dis[2]) == "br %r4")
self.assertTrue(str(dis[3]) == "nopr %r7")
def test_arch_microblaze(self):
# 10000628: 3021ffe0 addik r1, r1, -32
# 1000062c: fa81001c swi r20, r1, 28
# 10000630: f9e10000 swi r15, r1, 0
# 10000634: 96808000 mfs r20, rpc
b = Binary()
dis = b.disassemble(
b"\x30\x21\xff\xe0" +
b"\xfa\x81\x00\x1c" +
b"\xf9\xe1\x00\x00" +
b"\x96\x80\x80\x00",
ARCH_MICROBLAZE
)
self.assertTrue(str(dis[0]) == "addik r1, r1, -32")
self.assertTrue(str(dis[1]) == "swi r20, r1, 28")
self.assertTrue(str(dis[2]) == "swi r15, r1, 0")
self.assertTrue(str(dis[3]) == "mfs r20, rpc")
def test_arch_microblazeel(self):
# 10000628: e03021ff addik r1, r1, -32
# 1000062c: 1cfa8100 swi r20, r1, 28
# 10000630: 00f9e100 swi r15, r1, 0
# 10000634: 00968080 mfs r20, rpc
b = Binary()
dis = b.disassemble(
b"\xe0\xff\x21\x30" +
b"\x1c\x00\x81\xfa" +
b"\x00\x00\xe1\xf9" +
b"\x00\x80\x80\x96",
ARCH_MICROBLAZEEL
)
self.assertTrue(str(dis[0]) == "addik r1, r1, -32")
self.assertTrue(str(dis[1]) == "swi r20, r1, 28")
self.assertTrue(str(dis[2]) == "swi r15, r1, 0")
self.assertTrue(str(dis[3]) == "mfs r20, rpc")
def test_loading(self):
b = Binary()
self.assertTrue(b.file_type == "elf")
self.assertTrue(str(b.arch) == "Arch<name:X86_64, bits:64, little_endian:1>")
if __name__ == "__main__":
unittest.main()
| mit |
dingdong-io/brackets.beautify.io | thirdparty/js-beautify/python/cssbeautifier/tests/test.py | 1 | 20342 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import cssbeautifier
class CSSBeautifierTest(unittest.TestCase):
def resetOptions(self):
false = False
true = True
self.options = cssbeautifier.default_options()
self.options.indent_size = 1
self.options.indent_char = '\t'
self.options.selector_separator_newline = true
self.options.end_with_newline = false
self.options.newline_between_rules = false
def testGenerated(self):
self.resetOptions()
test_fragment = self.decodesto
t = self.decodesto
false = False
true = True
self.options.indent_size = 1
self.options.indent_char = '\t'
self.options.selector_separator_newline = true
self.options.end_with_newline = false
self.options.newline_between_rules = false
# End With Newline - (eof = "\n")
self.options.end_with_newline = true
test_fragment('', '\n')
test_fragment(' .tabs{}', ' .tabs {}\n')
test_fragment(' \n\n.tabs{}\n\n\n\n', ' .tabs {}\n')
test_fragment('\n')
# End With Newline - (eof = "")
self.options.end_with_newline = false
test_fragment('')
test_fragment(' .tabs{}', ' .tabs {}')
test_fragment(' \n\n.tabs{}\n\n\n\n', ' .tabs {}')
test_fragment('\n', '')
# Empty braces
t('.tabs{}', '.tabs {}')
t('.tabs { }', '.tabs {}')
t('.tabs { }', '.tabs {}')
t('.tabs \n{\n \n }', '.tabs {}')
#
t('#cboxOverlay {\n\tbackground: url(images/overlay.png) repeat 0 0;\n\topacity: 0.9;\n\tfilter: alpha(opacity = 90);\n}', '#cboxOverlay {\n\tbackground: url(images/overlay.png) repeat 0 0;\n\topacity: 0.9;\n\tfilter: alpha(opacity=90);\n}')
# Selector Separator - (separator = " ", separator1 = " ")
self.options.selector_separator_newline = false
self.options.selector_separator = " "
t('#bla, #foo{color:green}', '#bla, #foo {\n\tcolor: green\n}')
t('@media print {.tab{}}', '@media print {\n\t.tab {}\n}')
t('@media print {.tab,.bat{}}', '@media print {\n\t.tab, .bat {}\n}')
t('#bla, #foo{color:black}', '#bla, #foo {\n\tcolor: black\n}')
t('a:first-child,a:first-child{color:red;div:first-child,div:hover{color:black;}}', 'a:first-child, a:first-child {\n\tcolor: red;\n\tdiv:first-child, div:hover {\n\t\tcolor: black;\n\t}\n}')
# Selector Separator - (separator = " ", separator1 = " ")
self.options.selector_separator_newline = false
self.options.selector_separator = " "
t('#bla, #foo{color:green}', '#bla, #foo {\n\tcolor: green\n}')
t('@media print {.tab{}}', '@media print {\n\t.tab {}\n}')
t('@media print {.tab,.bat{}}', '@media print {\n\t.tab, .bat {}\n}')
t('#bla, #foo{color:black}', '#bla, #foo {\n\tcolor: black\n}')
t('a:first-child,a:first-child{color:red;div:first-child,div:hover{color:black;}}', 'a:first-child, a:first-child {\n\tcolor: red;\n\tdiv:first-child, div:hover {\n\t\tcolor: black;\n\t}\n}')
# Selector Separator - (separator = "\n", separator1 = "\n\t")
self.options.selector_separator_newline = true
self.options.selector_separator = " "
t('#bla, #foo{color:green}', '#bla,\n#foo {\n\tcolor: green\n}')
t('@media print {.tab{}}', '@media print {\n\t.tab {}\n}')
t('@media print {.tab,.bat{}}', '@media print {\n\t.tab,\n\t.bat {}\n}')
t('#bla, #foo{color:black}', '#bla,\n#foo {\n\tcolor: black\n}')
t('a:first-child,a:first-child{color:red;div:first-child,div:hover{color:black;}}', 'a:first-child,\na:first-child {\n\tcolor: red;\n\tdiv:first-child,\n\tdiv:hover {\n\t\tcolor: black;\n\t}\n}')
# Selector Separator - (separator = "\n", separator1 = "\n\t")
self.options.selector_separator_newline = true
self.options.selector_separator = " "
t('#bla, #foo{color:green}', '#bla,\n#foo {\n\tcolor: green\n}')
t('@media print {.tab{}}', '@media print {\n\t.tab {}\n}')
t('@media print {.tab,.bat{}}', '@media print {\n\t.tab,\n\t.bat {}\n}')
t('#bla, #foo{color:black}', '#bla,\n#foo {\n\tcolor: black\n}')
t('a:first-child,a:first-child{color:red;div:first-child,div:hover{color:black;}}', 'a:first-child,\na:first-child {\n\tcolor: red;\n\tdiv:first-child,\n\tdiv:hover {\n\t\tcolor: black;\n\t}\n}')
# Newline Between Rules - (separator = "\n")
self.options.newline_between_rules = true
t('.div {}\n.span {}', '.div {}\n\n.span {}')
t('.div{}\n \n.span{}', '.div {}\n\n.span {}')
t('.div {} \n \n.span { } \n', '.div {}\n\n.span {}')
t('.div {\n \n} \n .span {\n } ', '.div {}\n\n.span {}')
t('.selector1 {\n\tmargin: 0; /* This is a comment including an url http://domain.com/path/to/file.ext */\n}\n.div{height:15px;}', '.selector1 {\n\tmargin: 0;\n\t/* This is a comment including an url http://domain.com/path/to/file.ext */\n}\n\n.div {\n\theight: 15px;\n}')
t('.tabs{width:10px;//end of line comment\nheight:10px;//another\n}\n.div{height:15px;}', '.tabs {\n\twidth: 10px; //end of line comment\n\theight: 10px; //another\n}\n\n.div {\n\theight: 15px;\n}')
t('#foo {\n\tbackground-image: url(foo@2x.png);\n\t@font-face {\n\t\tfont-family: "Bitstream Vera Serif Bold";\n\t\tsrc: url("http://developer.mozilla.org/@api/deki/files/2934/=VeraSeBd.ttf");\n\t}\n}\n.div{height:15px;}', '#foo {\n\tbackground-image: url(foo@2x.png);\n\t@font-face {\n\t\tfont-family: "Bitstream Vera Serif Bold";\n\t\tsrc: url("http://developer.mozilla.org/@api/deki/files/2934/=VeraSeBd.ttf");\n\t}\n}\n\n.div {\n\theight: 15px;\n}')
t('@media screen {\n\t#foo:hover {\n\t\tbackground-image: url(foo@2x.png);\n\t}\n\t@font-face {\n\t\tfont-family: "Bitstream Vera Serif Bold";\n\t\tsrc: url("http://developer.mozilla.org/@api/deki/files/2934/=VeraSeBd.ttf");\n\t}\n}\n.div{height:15px;}', '@media screen {\n\t#foo:hover {\n\t\tbackground-image: url(foo@2x.png);\n\t}\n\t@font-face {\n\t\tfont-family: "Bitstream Vera Serif Bold";\n\t\tsrc: url("http://developer.mozilla.org/@api/deki/files/2934/=VeraSeBd.ttf");\n\t}\n}\n\n.div {\n\theight: 15px;\n}')
t('@font-face {\n\tfont-family: "Bitstream Vera Serif Bold";\n\tsrc: url("http://developer.mozilla.org/@api/deki/files/2934/=VeraSeBd.ttf");\n}\n@media screen {\n\t#foo:hover {\n\t\tbackground-image: url(foo.png);\n\t}\n\t@media screen and (min-device-pixel-ratio: 2) {\n\t\t@font-face {\n\t\t\tfont-family: "Helvetica Neue"\n\t\t}\n\t\t#foo:hover {\n\t\t\tbackground-image: url(foo@2x.png);\n\t\t}\n\t}\n}', '@font-face {\n\tfont-family: "Bitstream Vera Serif Bold";\n\tsrc: url("http://developer.mozilla.org/@api/deki/files/2934/=VeraSeBd.ttf");\n}\n\n@media screen {\n\t#foo:hover {\n\t\tbackground-image: url(foo.png);\n\t}\n\t@media screen and (min-device-pixel-ratio: 2) {\n\t\t@font-face {\n\t\t\tfont-family: "Helvetica Neue"\n\t\t}\n\t\t#foo:hover {\n\t\t\tbackground-image: url(foo@2x.png);\n\t\t}\n\t}\n}')
t('a:first-child{color:red;div:first-child{color:black;}}\n.div{height:15px;}', 'a:first-child {\n\tcolor: red;\n\tdiv:first-child {\n\t\tcolor: black;\n\t}\n}\n\n.div {\n\theight: 15px;\n}')
t('a:first-child{color:red;div:not(.peq){color:black;}}\n.div{height:15px;}', 'a:first-child {\n\tcolor: red;\n\tdiv:not(.peq) {\n\t\tcolor: black;\n\t}\n}\n\n.div {\n\theight: 15px;\n}')
# Newline Between Rules - (separator = "")
self.options.newline_between_rules = false
t('.div {}\n.span {}')
t('.div{}\n \n.span{}', '.div {}\n.span {}')
t('.div {} \n \n.span { } \n', '.div {}\n.span {}')
t('.div {\n \n} \n .span {\n } ', '.div {}\n.span {}')
t('.selector1 {\n\tmargin: 0; /* This is a comment including an url http://domain.com/path/to/file.ext */\n}\n.div{height:15px;}', '.selector1 {\n\tmargin: 0;\n\t/* This is a comment including an url http://domain.com/path/to/file.ext */\n}\n.div {\n\theight: 15px;\n}')
t('.tabs{width:10px;//end of line comment\nheight:10px;//another\n}\n.div{height:15px;}', '.tabs {\n\twidth: 10px; //end of line comment\n\theight: 10px; //another\n}\n.div {\n\theight: 15px;\n}')
t('#foo {\n\tbackground-image: url(foo@2x.png);\n\t@font-face {\n\t\tfont-family: "Bitstream Vera Serif Bold";\n\t\tsrc: url("http://developer.mozilla.org/@api/deki/files/2934/=VeraSeBd.ttf");\n\t}\n}\n.div{height:15px;}', '#foo {\n\tbackground-image: url(foo@2x.png);\n\t@font-face {\n\t\tfont-family: "Bitstream Vera Serif Bold";\n\t\tsrc: url("http://developer.mozilla.org/@api/deki/files/2934/=VeraSeBd.ttf");\n\t}\n}\n.div {\n\theight: 15px;\n}')
t('@media screen {\n\t#foo:hover {\n\t\tbackground-image: url(foo@2x.png);\n\t}\n\t@font-face {\n\t\tfont-family: "Bitstream Vera Serif Bold";\n\t\tsrc: url("http://developer.mozilla.org/@api/deki/files/2934/=VeraSeBd.ttf");\n\t}\n}\n.div{height:15px;}', '@media screen {\n\t#foo:hover {\n\t\tbackground-image: url(foo@2x.png);\n\t}\n\t@font-face {\n\t\tfont-family: "Bitstream Vera Serif Bold";\n\t\tsrc: url("http://developer.mozilla.org/@api/deki/files/2934/=VeraSeBd.ttf");\n\t}\n}\n.div {\n\theight: 15px;\n}')
t('@font-face {\n\tfont-family: "Bitstream Vera Serif Bold";\n\tsrc: url("http://developer.mozilla.org/@api/deki/files/2934/=VeraSeBd.ttf");\n}\n@media screen {\n\t#foo:hover {\n\t\tbackground-image: url(foo.png);\n\t}\n\t@media screen and (min-device-pixel-ratio: 2) {\n\t\t@font-face {\n\t\t\tfont-family: "Helvetica Neue"\n\t\t}\n\t\t#foo:hover {\n\t\t\tbackground-image: url(foo@2x.png);\n\t\t}\n\t}\n}')
t('a:first-child{color:red;div:first-child{color:black;}}\n.div{height:15px;}', 'a:first-child {\n\tcolor: red;\n\tdiv:first-child {\n\t\tcolor: black;\n\t}\n}\n.div {\n\theight: 15px;\n}')
t('a:first-child{color:red;div:not(.peq){color:black;}}\n.div{height:15px;}', 'a:first-child {\n\tcolor: red;\n\tdiv:not(.peq) {\n\t\tcolor: black;\n\t}\n}\n.div {\n\theight: 15px;\n}')
# Functions braces
t('.tabs(){}', '.tabs() {}')
t('.tabs (){}', '.tabs () {}')
t('.tabs (pa, pa(1,2)), .cols { }', '.tabs (pa, pa(1, 2)),\n.cols {}')
t('.tabs(pa, pa(1,2)), .cols { }', '.tabs(pa, pa(1, 2)),\n.cols {}')
t('.tabs ( ) { }', '.tabs () {}')
t('.tabs( ) { }', '.tabs() {}')
t('.tabs (t, t2) \n{\n key: val(p1 ,p2); \n }', '.tabs (t, t2) {\n\tkey: val(p1, p2);\n}')
t('.box-shadow(@shadow: 0 1px 3px rgba(0, 0, 0, .25)) {\n\t-webkit-box-shadow: @shadow;\n\t-moz-box-shadow: @shadow;\n\tbox-shadow: @shadow;\n}')
# Comments
t('/* test */')
t('.tabs{/* test */}', '.tabs {\n\t/* test */\n}')
t('.tabs{/* test */}', '.tabs {\n\t/* test */\n}')
t('/* header */.tabs {}', '/* header */\n\n.tabs {}')
t('.tabs {\n/* non-header */\nwidth:10px;}', '.tabs {\n\t/* non-header */\n\twidth: 10px;\n}')
t('/* header')
t('// comment')
t('.selector1 {\n\tmargin: 0; /* This is a comment including an url http://domain.com/path/to/file.ext */\n}', '.selector1 {\n\tmargin: 0;\n\t/* This is a comment including an url http://domain.com/path/to/file.ext */\n}')
# single line comment support (less/sass)
t('.tabs{\n// comment\nwidth:10px;\n}', '.tabs {\n\t// comment\n\twidth: 10px;\n}')
t('.tabs{// comment\nwidth:10px;\n}', '.tabs {\n\t// comment\n\twidth: 10px;\n}')
t('//comment\n.tabs{width:10px;}', '//comment\n.tabs {\n\twidth: 10px;\n}')
t('.tabs{//comment\n//2nd single line comment\nwidth:10px;}', '.tabs {\n\t//comment\n\t//2nd single line comment\n\twidth: 10px;\n}')
t('.tabs{width:10px;//end of line comment\n}', '.tabs {\n\twidth: 10px; //end of line comment\n}')
t('.tabs{width:10px;//end of line comment\nheight:10px;}', '.tabs {\n\twidth: 10px; //end of line comment\n\theight: 10px;\n}')
t('.tabs{width:10px;//end of line comment\nheight:10px;//another\n}', '.tabs {\n\twidth: 10px; //end of line comment\n\theight: 10px; //another\n}')
# Psuedo-classes vs Variables
t('@page :first {}')
# Assume the colon goes with the @name. If we're in LESS, this is required regardless of the at-string.
t('@page:first {}', '@page: first {}')
t('@page: first {}')
# SASS/SCSS
# Basic Interpolation
t('p {\n\t$font-size: 12px;\n\t$line-height: 30px;\n\tfont: #{$font-size}/#{$line-height};\n}')
t('p.#{$name} {}')
t(
'@mixin itemPropertiesCoverItem($items, $margin) {\n' +
'\twidth: calc((100% - ((#{$items} - 1) * #{$margin}rem)) / #{$items});\n' +
'\tmargin: 1.6rem #{$margin}rem 1.6rem 0;\n' +
'}')
#
def testNewline(self):
self.resetOptions()
t = self.decodesto
self.options.end_with_newline = True
t("", "\n")
t("\n", "\n")
t(".tabs{}\n", ".tabs {}\n")
t(".tabs{}", ".tabs {}\n")
def testBasics(self):
self.resetOptions()
t = self.decodesto
t("", "")
t("\n", "")
t(".tabs{}\n", ".tabs {}")
t(".tabs{}", ".tabs {}")
t(".tabs{color:red}", ".tabs {\n\tcolor: red\n}")
t(".tabs{color:rgb(255, 255, 0)}", ".tabs {\n\tcolor: rgb(255, 255, 0)\n}")
t(".tabs{background:url('back.jpg')}", ".tabs {\n\tbackground: url('back.jpg')\n}")
t("#bla, #foo{color:red}", "#bla,\n#foo {\n\tcolor: red\n}")
t("@media print {.tab{}}", "@media print {\n\t.tab {}\n}")
t("@media print {.tab{background-image:url(foo@2x.png)}}", "@media print {\n\t.tab {\n\t\tbackground-image: url(foo@2x.png)\n\t}\n}")
t("a:before {\n" +
"\tcontent: 'a{color:black;}\"\"\\'\\'\"\\n\\n\\na{color:black}\';\n" +
"}");
# may not eat the space before "["
t('html.js [data-custom="123"] {\n\topacity: 1.00;\n}')
t('html.js *[data-custom="123"] {\n\topacity: 1.00;\n}')
# lead-in whitespace determines base-indent.
# lead-in newlines are stripped.
t("\n\na, img {padding: 0.2px}", "a,\nimg {\n\tpadding: 0.2px\n}")
t(" a, img {padding: 0.2px}", " a,\n img {\n \tpadding: 0.2px\n }")
t(" \t \na, img {padding: 0.2px}", " \t a,\n \t img {\n \t \tpadding: 0.2px\n \t }")
t("\n\n a, img {padding: 0.2px}", "a,\nimg {\n\tpadding: 0.2px\n}")
def testSeperateSelectors(self):
self.resetOptions()
t = self.decodesto
t("#bla, #foo{color:red}", "#bla,\n#foo {\n\tcolor: red\n}")
t("a, img {padding: 0.2px}", "a,\nimg {\n\tpadding: 0.2px\n}")
def testBlockNesting(self):
self.resetOptions()
t = self.decodesto
t("#foo {\n\tbackground-image: url(foo@2x.png);\n\t@font-face {\n\t\tfont-family: 'Bitstream Vera Serif Bold';\n\t\tsrc: url('http://developer.mozilla.org/@api/deki/files/2934/=VeraSeBd.ttf');\n\t}\n}")
t("@media screen {\n\t#foo:hover {\n\t\tbackground-image: url(foo@2x.png);\n\t}\n\t@font-face {\n\t\tfont-family: 'Bitstream Vera Serif Bold';\n\t\tsrc: url('http://developer.mozilla.org/@api/deki/files/2934/=VeraSeBd.ttf');\n\t}\n}")
# @font-face {
# font-family: 'Bitstream Vera Serif Bold';
# src: url('http://developer.mozilla.org/@api/deki/files/2934/=VeraSeBd.ttf');
# }
# @media screen {
# #foo:hover {
# background-image: url(foo.png);
# }
# @media screen and (min-device-pixel-ratio: 2) {
# @font-face {
# font-family: 'Helvetica Neue'
# }
# #foo:hover {
# background-image: url(foo@2x.png);
# }
# }
# }
t("@font-face {\n\tfont-family: 'Bitstream Vera Serif Bold';\n\tsrc: url('http://developer.mozilla.org/@api/deki/files/2934/=VeraSeBd.ttf');\n}\n@media screen {\n\t#foo:hover {\n\t\tbackground-image: url(foo.png);\n\t}\n\t@media screen and (min-device-pixel-ratio: 2) {\n\t\t@font-face {\n\t\t\tfont-family: 'Helvetica Neue'\n\t\t}\n\t\t#foo:hover {\n\t\t\tbackground-image: url(foo@2x.png);\n\t\t}\n\t}\n}")
def testOptions(self):
self.resetOptions()
self.options.indent_size = 2
self.options.indent_char = ' '
self.options.selector_separator_newline = False
t = self.decodesto
# pseudo-classes and pseudo-elements
t("#foo:hover {\n background-image: url(foo@2x.png)\n}")
t("#foo *:hover {\n color: purple\n}")
t("::selection {\n color: #ff0000;\n}")
# TODO: don't break nested pseduo-classes
t("@media screen {.tab,.bat:hover {color:red}}", "@media screen {\n .tab, .bat:hover {\n color: red\n }\n}")
# particular edge case with braces and semicolons inside tags that allows custom text
t( "a:not(\"foobar\\\";{}omg\"){\ncontent: 'example\\';{} text';\ncontent: \"example\\\";{} text\";}",
"a:not(\"foobar\\\";{}omg\") {\n content: 'example\\';{} text';\n content: \"example\\\";{} text\";\n}")
def testLessCss(self):
self.resetOptions()
t = self.decodesto
t('.well{ \n @well-bg:@bg-color;@well-fg:@fg-color;}','.well {\n\t@well-bg: @bg-color;\n\t@well-fg: @fg-color;\n}')
t('.well {&.active {\nbox-shadow: 0 1px 1px @border-color, 1px 0 1px @border-color;}}',
'.well {\n' +
'\t&.active {\n' +
'\t\tbox-shadow: 0 1px 1px @border-color, 1px 0 1px @border-color;\n' +
'\t}\n' +
'}')
t('a {\n' +
'\tcolor: blue;\n' +
'\t&:hover {\n' +
'\t\tcolor: green;\n' +
'\t}\n' +
'\t& & &&&.active {\n' +
'\t\tcolor: green;\n' +
'\t}\n' +
'}')
# Not sure if this is sensible
# but I believe it is correct to not remove the space in "&: hover".
t('a {\n' +
'\t&: hover {\n' +
'\t\tcolor: green;\n' +
'\t}\n' +
'}');
# import
t('@import "test";');
# don't break nested pseudo-classes
t("a:first-child{color:red;div:first-child{color:black;}}",
"a:first-child {\n\tcolor: red;\n\tdiv:first-child {\n\t\tcolor: black;\n\t}\n}");
# handle SASS/LESS parent reference
t("div{&:first-letter {text-transform: uppercase;}}",
"div {\n\t&:first-letter {\n\t\ttext-transform: uppercase;\n\t}\n}");
# nested modifiers (&:hover etc)
t(".tabs{&:hover{width:10px;}}", ".tabs {\n\t&:hover {\n\t\twidth: 10px;\n\t}\n}")
t(".tabs{&.big{width:10px;}}", ".tabs {\n\t&.big {\n\t\twidth: 10px;\n\t}\n}")
t(".tabs{&>big{width:10px;}}", ".tabs {\n\t&>big {\n\t\twidth: 10px;\n\t}\n}")
t(".tabs{&+.big{width:10px;}}", ".tabs {\n\t&+.big {\n\t\twidth: 10px;\n\t}\n}")
# nested rules
t(".tabs{.child{width:10px;}}", ".tabs {\n\t.child {\n\t\twidth: 10px;\n\t}\n}")
# variables
t("@myvar:10px;.tabs{width:10px;}", "@myvar: 10px;\n.tabs {\n\twidth: 10px;\n}")
t("@myvar:10px; .tabs{width:10px;}", "@myvar: 10px;\n.tabs {\n\twidth: 10px;\n}")
def decodesto(self, input, expectation=None):
if expectation == None:
expectation = input
self.assertMultiLineEqual(
cssbeautifier.beautify(input, self.options), expectation)
# if the expected is different from input, run it again
# expected output should be unchanged when run twice.
if not expectation != input:
self.assertMultiLineEqual(
cssbeautifier.beautify(expectation, self.options), expectation)
# Everywhere we do newlines, they should be replaced with opts.eol
self.options.eol = '\r\\n';
expectation = expectation.replace('\n', '\r\n')
self.assertMultiLineEqual(
cssbeautifier.beautify(input, self.options), expectation)
input = input.replace('\n', '\r\n')
self.assertMultiLineEqual(
cssbeautifier.beautify(input, self.options), expectation)
self.options.eol = '\n'
if __name__ == '__main__':
unittest.main()
| mit |
bcheung92/Paperproject | gem5/tests/configs/memtest-filter.py | 23 | 3208 | # Copyright (c) 2006-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Ron Dreslinski
import m5
from m5.objects import *
m5.util.addToPath('../configs/common')
from Caches import *
#MAX CORES IS 8 with the fals sharing method
nb_cores = 8
cpus = [ MemTest() for i in xrange(nb_cores) ]
# system simulated
system = System(cpu = cpus,
physmem = SimpleMemory(),
membus = SystemXBar(width=16, snoop_filter = SnoopFilter()))
# Dummy voltage domain for all our clock domains
system.voltage_domain = VoltageDomain()
system.clk_domain = SrcClockDomain(clock = '1GHz',
voltage_domain = system.voltage_domain)
# Create a seperate clock domain for components that should run at
# CPUs frequency
system.cpu_clk_domain = SrcClockDomain(clock = '2GHz',
voltage_domain = system.voltage_domain)
system.toL2Bus = L2XBar(clk_domain = system.cpu_clk_domain,
snoop_filter = SnoopFilter())
system.l2c = L2Cache(clk_domain = system.cpu_clk_domain, size='64kB', assoc=8)
system.l2c.cpu_side = system.toL2Bus.master
# connect l2c to membus
system.l2c.mem_side = system.membus.slave
# add L1 caches
for cpu in cpus:
# All cpus are associated with cpu_clk_domain
cpu.clk_domain = system.cpu_clk_domain
cpu.l1c = L1Cache(size = '32kB', assoc = 4)
cpu.l1c.cpu_side = cpu.port
cpu.l1c.mem_side = system.toL2Bus.slave
system.system_port = system.membus.slave
# connect memory to membus
system.physmem.port = system.membus.master
# -----------------------
# run simulation
# -----------------------
root = Root( full_system = False, system = system )
root.system.mem_mode = 'timing'
| mit |
thiagoss/splitencoder | splitencoder/splitter.py | 1 | 4258 | #!/usr/bin/env python
#
# splitencoder
#
# Copyright (C) 2015 Samsung Electronics. All rights reserved.
# Author: Thiago Santos <thiagoss@osg.samsung.com>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301, USA.
#
import sys
import gi
gi.require_version('Gst', '1.0')
from gi.repository import GObject, Gst
import utils
def on_message(bus, message, udata):
pipeline, loop = udata
if message.type == Gst.MessageType.EOS:
pipeline.set_state(Gst.State.NULL)
loop.quit()
elif message.type == Gst.MessageType.ERROR:
print message.parse_error()
pipeline.set_state(Gst.State.NULL)
loop.quit()
return True
def on_autoplug_factories(element, pad, caps, udata):
factories = Gst.ElementFactory.list_get_elements(Gst.ELEMENT_FACTORY_TYPE_DEMUXER | Gst.ELEMENT_FACTORY_TYPE_PARSER, Gst.Rank.MARGINAL)
factories = Gst.ElementFactory.list_filter(factories, caps, Gst.PadDirection.SINK, caps.is_fixed())
if len(factories) == 0:
#TODO check if this is indeed a parsed type and not some unhandled format (missing elements)
return None
return factories
def on_autoplug_continue(element, pad, caps, udata):
s = caps.get_structure(0)
if s.has_field('parsed'):
if s.get_value('parsed') == True:
return False
if s.has_field('framed'):
if s.get_value('framed') == True:
return False
return True
def on_pad_added(element, pad, udata):
pipeline, splitmuxsink = udata
other_pad = None
# Can't use 'get_compatible_pad' because splitmuxsink pad templates
# are all ANY so it will always match the first
if utils.caps_is_video(pad.get_current_caps()):
klass = type(splitmuxsink)
tmpl = klass.get_pad_template('video')
other_pad = splitmuxsink.request_pad(tmpl, None, None)
elif utils.caps_is_audio(pad.get_current_caps()):
klass = type(splitmuxsink)
tmpl = klass.get_pad_template('audio_%u')
other_pad = splitmuxsink.request_pad(tmpl, None, None)
else:
caps = pad.get_current_caps()
print 'leaving pad %s unlinked - "%s"' % (pad.get_name(), caps.to_string() if caps else 'no caps')
if other_pad:
queue = Gst.ElementFactory.make('queue')
pipeline.add(queue)
queue.sync_state_with_parent()
pad.link(queue.get_static_pad('sink'))
queue.get_static_pad('src').link(other_pad)
def split(input_uri, output_dir):
Gst.init()
loop = GObject.MainLoop()
pipeline = Gst.Pipeline()
uridecodebin = Gst.ElementFactory.make('uridecodebin')
splitmuxsink = Gst.ElementFactory.make('splitmuxsink')
pipeline.add(uridecodebin)
pipeline.add(splitmuxsink)
uridecodebin.set_property('uri', input_uri)
uridecodebin.connect('autoplug-factories', on_autoplug_factories, None)
uridecodebin.connect('autoplug-continue', on_autoplug_continue, None)
uridecodebin.connect('pad-added', on_pad_added, (pipeline, splitmuxsink))
# TODO fix mp4mux to properly segment files
splitmuxsink.set_property('muxer', Gst.ElementFactory.make('matroskamux'))
splitmuxsink.set_property('location', output_dir + '/' + 'segment_%09d.mkv')
splitmuxsink.set_property('max-size-time', 10000000000) #10s segments
bus = pipeline.get_bus()
bus.add_watch(0, on_message, (pipeline, loop))
pipeline.set_state(Gst.State.PLAYING)
loop.run()
if __name__ == '__main__':
Gst.init()
input_uri = sys.argv[1]
output_dir = sys.argv[2]
#TODO validate args
split(input_uri, output_dir)
| lgpl-2.1 |
aio-libs/aiohttp_session | tests/test_nacl_storage.py | 1 | 9303 | import asyncio
import json
import time
from typing import Any, Dict, MutableMapping, Optional, cast
import nacl.secret
import nacl.utils
import pytest
from aiohttp import web
from aiohttp.test_utils import TestClient
from aiohttp.web_middlewares import _Handler
from nacl.encoding import Base64Encoder
from aiohttp_session import Session, get_session, new_session, session_middleware
from aiohttp_session.nacl_storage import NaClCookieStorage
from .typedefs import AiohttpClient
def test_invalid_key() -> None:
with pytest.raises(ValueError):
NaClCookieStorage(b'123') # short key
def make_cookie(
client: TestClient,
secretbox: nacl.secret.SecretBox,
data: Dict[str, Any]
) -> None:
session_data = {
'session': data,
'created': int(time.time())
}
cookie_data = json.dumps(session_data).encode('utf-8')
nonce = nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE)
data = secretbox.encrypt(cookie_data, nonce,
encoder=Base64Encoder).decode('utf-8')
# Ignoring type until aiohttp#4252 is released
client.session.cookie_jar.update_cookies(
{'AIOHTTP_SESSION': data} # type: ignore
)
def create_app(
handler: _Handler,
key: bytes,
max_age: Optional[int] = None
) -> web.Application:
middleware = session_middleware(NaClCookieStorage(key, max_age=max_age))
app = web.Application(middlewares=[middleware])
app.router.add_route('GET', '/', handler)
return app
def decrypt(secretbox: nacl.secret.SecretBox, cookie_value: str) -> Any:
assert type(cookie_value) == str
return json.loads(
secretbox.decrypt(cookie_value.encode('utf-8'),
encoder=Base64Encoder).decode('utf-8')
)
@pytest.fixture
def secretbox(key: bytes) -> nacl.secret.SecretBox: # type: ignore[misc] # No nacl types
return nacl.secret.SecretBox(key)
@pytest.fixture
def key() -> bytes:
return nacl.utils.random(nacl.secret.SecretBox.KEY_SIZE) # type: ignore[no-any-return]
async def test_create_new_session(
aiohttp_client: AiohttpClient,
secretbox: nacl.secret.SecretBox,
key: bytes
) -> None:
async def handler(request: web.Request) -> web.StreamResponse:
session = await get_session(request)
assert isinstance(session, Session)
assert session.new
assert not session._changed
assert cast(MutableMapping[str, Any], {}) == session
return web.Response(body=b'OK')
client = await aiohttp_client(create_app(handler, key))
resp = await client.get('/')
assert resp.status == 200
async def test_load_existing_session(
aiohttp_client: AiohttpClient,
secretbox: nacl.secret.SecretBox,
key: bytes
) -> None:
async def handler(request: web.Request) -> web.StreamResponse:
session = await get_session(request)
assert isinstance(session, Session)
assert not session.new
assert not session._changed
assert cast(MutableMapping[str, Any], {'a': 1, 'b': 12}) == session
return web.Response(body=b'OK')
client = await aiohttp_client(create_app(handler, key))
make_cookie(client, secretbox, {'a': 1, 'b': 12})
resp = await client.get('/')
assert resp.status == 200
async def test_change_session(
aiohttp_client: AiohttpClient,
secretbox: nacl.secret.SecretBox,
key: bytes
) -> None:
async def handler(request: web.Request) -> web.StreamResponse:
session = await get_session(request)
session['c'] = 3
return web.Response(body=b'OK')
client = await aiohttp_client(create_app(handler, key))
make_cookie(client, secretbox, {'a': 1, 'b': 2})
resp = await client.get('/')
assert resp.status == 200
morsel = resp.cookies['AIOHTTP_SESSION']
cookie_data = decrypt(secretbox, morsel.value)
assert 'session' in cookie_data
assert 'a' in cookie_data['session']
assert 'b' in cookie_data['session']
assert 'c' in cookie_data['session']
assert 'created' in cookie_data
assert cookie_data['session']['a'] == 1
assert cookie_data['session']['b'] == 2
assert cookie_data['session']['c'] == 3
assert morsel['httponly']
assert '/' == morsel['path']
async def test_del_cookie_on_session_invalidation(
aiohttp_client: AiohttpClient,
secretbox: nacl.secret.SecretBox,
key: bytes
) -> None:
async def handler(request: web.Request) -> web.StreamResponse:
session = await get_session(request)
session.invalidate()
return web.Response(body=b'OK')
client = await aiohttp_client(create_app(handler, key))
make_cookie(client, secretbox, {'a': 1, 'b': 2})
resp = await client.get('/')
assert resp.status == 200
morsel = resp.cookies['AIOHTTP_SESSION']
assert '' == morsel.value
assert not morsel['httponly']
assert morsel['path'] == '/'
async def test_nacl_session_fixation(
aiohttp_client: AiohttpClient,
secretbox: nacl.secret.SecretBox,
key: bytes
) -> None:
async def login(request: web.Request) -> web.StreamResponse:
session = await get_session(request)
session['k'] = 'v'
return web.Response()
async def logout(request: web.Request) -> web.StreamResponse:
session = await get_session(request)
session.invalidate()
return web.Response()
app = create_app(login, key)
app.router.add_route('DELETE', '/', logout)
client = await aiohttp_client(app)
resp = await client.get('/')
assert 'AIOHTTP_SESSION' in resp.cookies
evil_cookie = resp.cookies['AIOHTTP_SESSION'].value
resp = await client.delete('/')
assert resp.cookies['AIOHTTP_SESSION'].value == ""
# Ignoring type until aiohttp#4252 is released
client.session.cookie_jar.update_cookies(
{'AIOHTTP_SESSION': evil_cookie} # type: ignore
)
resp = await client.get('/')
assert resp.cookies['AIOHTTP_SESSION'].value != evil_cookie
async def test_load_session_dont_load_expired_session(
aiohttp_client: AiohttpClient,
key: bytes
) -> None:
async def handler(request: web.Request) -> web.StreamResponse:
session = await get_session(request)
exp_param = request.rel_url.query.get('exp', None)
if exp_param is None:
session['a'] = 1
session['b'] = 2
else:
assert cast(MutableMapping[str, Any], {}) == session
return web.Response(body=b'OK')
client = await aiohttp_client(
create_app(handler, key, 2)
)
resp = await client.get('/')
assert resp.status == 200
await asyncio.sleep(5)
resp = await client.get('/?exp=yes')
assert resp.status == 200
async def test_load_corrupted_session(
aiohttp_client: AiohttpClient,
key: bytes
) -> None:
async def handler(request: web.Request) -> web.StreamResponse:
session = await get_session(request)
assert isinstance(session, Session)
assert session.new
assert cast(MutableMapping[str, Any], {}) == session
return web.Response(body=b'OK')
client = await aiohttp_client(create_app(handler, key))
# Ignoring type until aiohttp#4252 is released
client.session.cookie_jar.update_cookies(
{'AIOHTTP_SESSION': 'bad key'} # type: ignore
)
resp = await client.get('/')
assert resp.status == 200
async def test_load_session_different_key(
aiohttp_client: AiohttpClient,
key: bytes
) -> None:
async def handler(request: web.Request) -> web.StreamResponse:
session = await get_session(request)
assert isinstance(session, Session)
assert session.new
assert cast(MutableMapping[str, Any], {}) == session
return web.Response(body=b'OK')
client = await aiohttp_client(create_app(handler, key))
# create another box with another key
key = nacl.utils.random(nacl.secret.SecretBox.KEY_SIZE)
secretbox = nacl.secret.SecretBox(key)
make_cookie(client, secretbox, {'a': 1, 'b': 12})
resp = await client.get('/')
assert resp.status == 200
async def test_load_expired_session(
aiohttp_client: AiohttpClient,
key: bytes
) -> None:
MAX_AGE = 2
async def login(request: web.Request) -> web.StreamResponse:
session = await new_session(request)
session['created'] = int(time.time())
return web.Response()
async def handler(request: web.Request) -> web.StreamResponse:
session = await get_session(request)
created = session.get('created', None) if not session.new else None
text = ''
if created is not None and (time.time() - created) > MAX_AGE:
text += 'WARNING!'
return web.Response(text=text)
app = create_app(handler, key, max_age=MAX_AGE)
app.router.add_route('POST', '/', login)
client = await aiohttp_client(app)
resp = await client.post('/')
assert 'AIOHTTP_SESSION' in resp.cookies
cookie = resp.cookies['AIOHTTP_SESSION'].value
await asyncio.sleep(MAX_AGE + 1)
# Ignoring type until aiohttp#4252 is released
client.session.cookie_jar.update_cookies(
{'AIOHTTP_SESSION': cookie} # type: ignore
)
resp = await client.get('/')
body = await resp.text()
assert body == ''
| apache-2.0 |
deanhiller/databus | webapp/play1.3.x/python/Lib/nturl2path.py | 5 | 2302 | """Convert a NT pathname to a file URL and vice versa."""
def url2pathname(url):
"""OS-specific conversion from a relative URL of the 'file' scheme
to a file system path; not recommended for general use."""
# e.g.
# ///C|/foo/bar/spam.foo
# becomes
# C:\foo\bar\spam.foo
import string, urllib
# Windows itself uses ":" even in URLs.
url = url.replace(':', '|')
if not '|' in url:
# No drive specifier, just convert slashes
if url[:4] == '////':
# path is something like ////host/path/on/remote/host
# convert this to \\host\path\on\remote\host
# (notice halving of slashes at the start of the path)
url = url[2:]
components = url.split('/')
# make sure not to convert quoted slashes :-)
return urllib.unquote('\\'.join(components))
comp = url.split('|')
if len(comp) != 2 or comp[0][-1] not in string.ascii_letters:
error = 'Bad URL: ' + url
raise IOError, error
drive = comp[0][-1].upper()
components = comp[1].split('/')
path = drive + ':'
for comp in components:
if comp:
path = path + '\\' + urllib.unquote(comp)
return path
def pathname2url(p):
"""OS-specific conversion from a file system path to a relative URL
of the 'file' scheme; not recommended for general use."""
# e.g.
# C:\foo\bar\spam.foo
# becomes
# ///C|/foo/bar/spam.foo
import urllib
if not ':' in p:
# No drive specifier, just convert slashes and quote the name
if p[:2] == '\\\\':
# path is something like \\host\path\on\remote\host
# convert this to ////host/path/on/remote/host
# (notice doubling of slashes at the start of the path)
p = '\\\\' + p
components = p.split('\\')
return urllib.quote('/'.join(components))
comp = p.split(':')
if len(comp) != 2 or len(comp[0]) > 1:
error = 'Bad path: ' + p
raise IOError, error
drive = urllib.quote(comp[0].upper())
components = comp[1].split('\\')
path = '///' + drive + '|'
for comp in components:
if comp:
path = path + '/' + urllib.quote(comp)
return path
| mpl-2.0 |
SDX2000/hotwire | hotwire/builtins/json.py | 1 | 2675 | # This file is part of the Hotwire Shell project API.
# Copyright (C) 2007 Colin Walters <walters@verbum.org>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE X CONSORTIUM BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
# THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os,sys,pickle,inspect,locale
from io import StringIO
from hotwire.fs import FilePath
from hotwire.builtin import Builtin, BuiltinRegistry, InputStreamSchema
import simplejson
class LossyObjectJSONDumper(simplejson.JSONEncoder):
def __init__(self, *args, **kwargs):
super(LossyObjectJSONDumper, self).__init__(*args, **kwargs)
def default(self, o):
name_repr = {}
for name,member in sorted(inspect.getmembers(o), lambda a,b: locale.strcoll(a[0],b[0])):
if name.startswith('_'):
continue
name_repr[name] = str(type(member))
return self.encode(name_repr)
class JsonBuiltin(Builtin):
__doc__ = _("""Convert object stream to JSON.""")
def __init__(self):
super(JsonBuiltin, self).__init__('json',
output=str, # 'any'
input=InputStreamSchema('any'),
idempotent=True,
argspec=None)
def execute(self, context, args, options=[]):
out = StringIO()
for o in context.input:
simplejson.dump(o, out, indent=2, cls=LossyObjectJSONDumper)
# Should support binary streaming
for line in StringIO(out.getvalue()):
if line.endswith('\n'):
yield line[0:-1]
else:
yield line
BuiltinRegistry.getInstance().register_hotwire(JsonBuiltin())
| gpl-2.0 |
paineliu/tflearn | helen.py | 1 | 1084 | import os
from PIL import Image
img_path = '/home/palm/deep/helen/train'
lab_path = '/home/palm/deep/helen/annotation'
filename = '/home/palm/deep/helen/trainnames.txt'
f = open(filename)
index = 1
for each in f:
each = each.strip()
img_file = os.path.join(img_path, each + '.jpg')
img = Image.open(img_file)
width, height = img.size
img = img.resize((256, 256))
img = img.convert('L')
lab_file = os.path.join(lab_path, str(index) + '.txt')
fl = open(lab_file)
for line in fl:
line = line.strip()
item = line.split(',')
if len(item) == 2:
x = int(float(item[0]) * 256 / width)
y = int(float(item[1]) * 256 / height)
if x > 0 and x < img.size[0] and y > 0 and y < img.size[1]:
img.putpixel((x, y), 0xffffff)
img.putpixel((x-1, y), 0xffffff)
img.putpixel((x, y-1), 0xffffff)
img.putpixel((x-1, y-1), 0xffffff)
else:
print index, each, img.size, x, y
index += 1
img.show()
break
| apache-2.0 |
douglask3/allocationModules | runGday/DUKE/simulations/duke_spinup_to_equilibrium.py | 1 | 24694 | #!/usr/bin/env python
""" Duke Simulation for NCEAS FACE experiment
Site History:
-------------
* Pre-1850 temperate broadleaved deciduous forest
* Harvest forest.
* Grassland from 1850-1982, mowed (+/- annually - DO NOT SIMULATE MOWING :P)
* burnt prior to planting
* At start of experiment aboveground biomass 5.5-11 kg C m-2
* Needleaf forest planted in 1983
Spin-up the model to a steady state. Recycle the met data in batches of a
50 years (over and over), until the SOM, plant and litter C pools cease to
change.
-> Spinup with forest params, fixed NDEP, fixed CO2 .
-> Vary NDEP/CO2 for about 200 odd yrs...using grassland params so that we get
through the industrial to the 1980s period.
"""
import os
import shutil
import sys
import subprocess
import numpy as np
from gday import gday as model
from gday import adjust_gday_param_file as ad
from gday._version import __version__ as git_revision
__author__ = "Martin De Kauwe"
__version__ = "1.0 (14.12.2014)"
__email__ = "mdekauwe@gmail.com"
def main(experiment_id, site, SPIN_UP=True, POST_INDUST=True, SPIN_UP_SIMS=True):
base_dir = os.path.dirname(os.getcwd())
# dir names
base_param_name = "base_start"
base_dir = os.path.dirname(os.getcwd())
base_param_dir = "/Users/mq42056055/Documents/gdayFresh/example/params"
param_dir = os.path.join(base_dir, "params")
met_dir = os.path.join(base_dir, "met_data")
run_dir = os.path.join(base_dir, "outputs")
if SPIN_UP == True:
# copy base files to make two new experiment files
shutil.copy(os.path.join(base_param_dir, base_param_name + ".cfg"),
os.path.join(param_dir, "%s_%s_model_spinup.cfg" % \
(experiment_id, site)))
# Run model to equilibrium assuming forest, growing C pools from effectively
# zero
itag = "%s_%s_model_spinup" % (experiment_id, site)
otag = "%s_%s_model_spunup" % (experiment_id, site)
mtag = "%s_met_data_equilibrium_50_yrs.csv" % (site)
out_fn = itag + "_equilib.out"
out_param_fname = os.path.join(param_dir, otag + ".cfg")
cfg_fname = os.path.join(param_dir, itag + ".cfg")
met_fname = os.path.join(met_dir, mtag)
out_fname = os.path.join(run_dir, out_fn)
replace_dict = {
# git stuff
"git_hash": str(git_revision),
# files
"out_param_fname": "%s" % (out_param_fname),
"cfg_fname": "%s" % (cfg_fname),
"met_fname": "%s" % (met_fname),
"out_fname": "%s" % (out_fname),
# state - default C:N 25.
"age": "0.0",
"canht": "17.0", # Canopy height increased from 16m in 2001 to 18m in 2004 at Duke
"activesoil": "0.001",
"activesoiln": "0.00004",
"age": "0.0",
"branch": "0.001",
"branchn": "0.00004",
"cstore": "0.001",
"inorgn": "0.00004",
"metabsoil": "0.0",
"metabsoiln": "0.0",
"metabsurf": "0.0",
"metabsurfn": "0.0",
"nstore": "0.00004",
"passivesoil": "0.001",
"passivesoiln": "0.0004",
"prev_sma": "1.0",
"root": "0.001",
"root_depth": "-9999.9",
"rootn": "0.00004",
"sapwood": "0.001",
"shoot": "0.001",
"shootn": "0.00004",
"slowsoil": "0.001",
"slowsoiln": "0.00004",
"stem": "0.001",
"stemn": "0.00004",
"stemnimm": "0.00004",
"stemnmob": "0.0",
"structsoil": "0.001",
"structsoiln": "0.00004",
"structsurf": "0.001",
"structsurfn": "0.00004",
"croot": "0.0", # don't simulate coarse roots
"crootn": "0.0", # don't simulate coarse roots
# parameters
"latitude": "35.9",
"intercep_frac": "0.15",
"max_intercep_lai": "3.0",
"albedo": "0.123", # modis site avg
"finesoil": "0.51", # set based on silt+clay fractions of topsoil 0.42+0.09=0.5
"slamax": "4.4", # Protocol [m2 kg-1 DW]
"sla": "4.4", # Protocol [m2 kg-1 DW]
"slazero": "4.4", # Protocol [m2 kg-1 DW]
"cfracts": "0.5",
"lai_closed": "0.5", # I am effectively turning this feature off by setting it so low
#"c_alloc_fmax": "0.25",
#"c_alloc_fmin": "0.25",
#"c_alloc_rmax": "0.05",
#"c_alloc_rmin": "0.05",
#"c_alloc_bmax": "0.2",
#"c_alloc_bmin": "0.2",
#"c_alloc_fmax": "0.3",
#"c_alloc_fmin": "0.3",
#"c_alloc_rmax": "0.3",
#"c_alloc_rmin": "0.3",
#"c_alloc_bmax": "0.2",
#"c_alloc_bmin": "0.2",
#"c_alloc_cmax": "0.0", # turn off coarse roots!
"c_alloc_fmax": "0.35",
"c_alloc_fmin": "0.15",
"c_alloc_rmax": "0.35",
"c_alloc_rmin": "0.05",
"c_alloc_bmax": "0.1",
"c_alloc_bmin": "0.1",
"c_alloc_cmax": "0.0", # turn off coarse roots!
"fretrans": "0.5",
"rretrans": "0.0",
"bretrans": "0.0",
"wretrans": "0.0",
"ncwnewz": "0.003",
"ncwnew": "0.003",
"ncwimmz": "0.003",
"ncwimm": "0.003",
"ncbnewz": "0.003",
"ncbnew": "0.003",
"ncrfac": "0.8",
"ncmaxfyoung": "0.04",
"ncmaxfold": "0.04",
"ncmaxr": "0.03",
"retransmob": "0.0",
"fdecay": "0.59988", # Protocol [years-1]
"fdecaydry": "0.59988", # Protocol
"rdecay": "0.33333", # Protocol
"rdecaydry": "0.33333", # Protocol
"bdecay": "0.02", # No data, assuming 50 years
"wdecay": "0.02",
"crdecay": "0.00", # turn off coarse roots!
"watdecaydry": "0.0",
"watdecaywet": "0.1",
"ligshoot": "0.24", # Based on White et al. 2000 for ENF
"ligroot": "0.22", # Based on White et al. 2000
"rateuptake": "3.0", # set somewhat (very) arbitarly to get an LAI ~ 4.
"rateloss": "0.5",
"wcapac_root": "96.75", # [mm] (FC (m3/m-3)-WP (m3/m-3)) * rooting_depth (mm) using derived values and depth from protocol, 750 mm (FC=0.164 - WP=0.035)
"wcapac_topsoil": "25.8", # [mm] (FC (m3/m-3)-WP (m3/m-3)) * rooting_depth (mm) using derived values and depth from protocol, assuming 200 mm top soil following Corbeels 2005a (FC=0.164 - WP=0.035)
"ctheta_topsoil": "0.5", # Derive based on soil type clay_loam
"ntheta_topsoil": "5.0", # Derive based on soil type clay_loam
"ctheta_root": "0.4", # Derive based on soil type clay
"ntheta_root": "3.0", # Derive based on soil type clay
"topsoil_type": "clay_loam",
"rootsoil_type": "clay",
"measurement_temp": "25.0",
"dz0v_dh": "0.075", # However I have used value from Jarvis, quoted in Jones 1992, pg. 67. Produces a value within the bounds of 3.5-1.1 mol m-2 s-1 Drake, 2010, GCB for canht=17
"displace_ratio": "0.78",
"g1": "2.74",
#"jmaxna": "60.0", # Original values Belinda had, I think based on Crous 2008, fig 2. Although those values I think are measured at 28 and 30 deg, the assumption being here that this is the same as 25 deg!
#"jmaxnb": "0.0", # Original values Belinda had, I think based on Crous 2008, fig 2. Although those values I think are measured at 28 and 30 deg, the assumption being here that this is the same as 25 deg!
#"vcmaxna": "30.61",# Original values Belinda had, I think based on Crous 2008, fig 2. Although those values I think are measured at 28 and 30 deg, the assumption being here that this is the same as 25 deg!
#"vcmaxnb": "0.0", # Original values Belinda had, I think based on Crous 2008, fig 2. Although those values I think are measured at 28 and 30 deg, the assumption being here that this is the same as 25 deg!
"vcmaxna": "22.29",
"vcmaxnb": "8.45",
"jv_slope": "1.86",
"jv_intercept": "0.0",
"sapturnover": "0.1",
"heighto": "4.826",
"htpower": "0.35",
"height0": "5.0",
"height1": "20.0",
"leafsap0": "8000.0",
"leafsap1": "3060.0", # Duke protocol
"branch0": "5.61",
"branch1": "0.346",
"targ_sens": "0.5",
"density": "420.0",
# control
"adjust_rtslow": "false", # priming, off
"alloc_model": "allometric",
"assim_model": "mate",
"calc_sw_params": "false", #false=use fwp values, true=derive them
"deciduous_model": "false",
"disturbance": "0",
"exudation": "false",
"fixed_stem_nc": "true",
"fixleafnc": "false",
"grazing": "false",
"gs_model": "medlyn",
"model_optroot": "false",
"modeljm": "2",
"ncycle": "true",
"nuptake_model": "2",
"passiveconst": "false",
"print_options": "end",
"ps_pathway": "c3",
"respiration_model": "fixed",
"strfloat": "0",
"sw_stress_model": "1", # Sands and Landsberg
"trans_model": "1",
"use_eff_nc": "0",
"use_leuning": "0",
"water_stress": "true",
}
ad.adjust_param_file(cfg_fname, replace_dict)
G = model.Gday(cfg_fname, spin_up=True)
G.spin_up_pools()
if POST_INDUST == True:
# run for 200 odd years post industrial with increasing co2/ndep
# we are swapping forest params for grass params now
# copy spunup base files to make two new experiment files
shutil.copy(os.path.join(param_dir, "%s_%s_model_spunup.cfg" % (experiment_id, site)),
os.path.join(param_dir, "%s_%s_model_spunup_adj.cfg" % (experiment_id, site)))
itag = "%s_%s_model_spunup_adj" % (experiment_id, site)
otag = "%s_%s_model_indust" % (experiment_id, site)
mtag = "%s_met_data_industrial_to_present_1850_1983.csv" % (site)
out_fn = itag + "_indust.out"
out_param_fname = os.path.join(param_dir, otag + ".cfg")
cfg_fname = os.path.join(param_dir, itag + ".cfg")
print cfg_fname
met_fname = os.path.join(met_dir, mtag)
out_fname = os.path.join(run_dir, out_fn)
replace_dict = {
# git stuff
"git_hash": str(git_revision),
# files
"out_param_fname": "%s" % (out_param_fname),
"cfg_fname": "%s" % (cfg_fname),
"met_fname": "%s" % (met_fname),
"out_fname": "%s" % (out_fname),
# state - default C:N 25.
"age": "0.0",
"branch": "0.0",
"branchn": "0.0",
"canht": "0.79", # Taken default C3grass value from JULES
"cstore": "0.001",
"nstore": "0.00004",
"croot": "0.0", # don't simulate coarse roots
"crootn": "0.0", # don't simulate coarse roots
"root": "0.001",
"rootn": "0.00004",
"sapwood": "0.0",
"shoot": "0.001",
"shootn": "0.00004",
"stem": "0.0",
"stemn": "0.0",
"stemnimm": "0.0",
"stemnmob": "0.0",
"nepsum": "0.0",
"nppsum": "0.0",
# parameters
"ligshoot": "0.09", # Smith et al. 2000, GRASS
"ligroot": "0.22", # Smith et al. 2000
"age": "1.0",
"slamax": "6.0",
"sla": "6.0",
"slazero": "6.0",
"cfracts": "0.5",
"lai_closed": "0.5", # I am effectively turning this feature off by setting it so low
"c_alloc_fmax": "0.8",
"c_alloc_fmin": "0.2",
"c_alloc_rmax": "0.8",
"c_alloc_rmin": "0.2",
"c_alloc_bmax": "0.0",
"c_alloc_bmin": "0.0",
"c_alloc_cmax": "0.0", # turn off coarse roots!
"fretrans": "0.4",
"rretrans": "0.0",
"bretrans": "0.0",
"wretrans": "0.0",
"ncwnewz": "0.0",
"ncwnew": "0.0",
"ncwimmz": "0.0",
"ncwimm": "0.0",
"ncbnewz": "0.0",
"ncbnew": "0.0",
"ncrfac": "0.7",
"ncmaxfyoung": "0.035",
"ncmaxfold": "0.035",
"ncmaxr": "0.0287",
"retransmob": "0.0",
"fdecay": "1.0",
"fdecaydry": "1.0",
"rdecay": "1.0",
"rdecaydry": "1.0",
"bdecay": "0.0",
"wdecay": "0.0",
"watdecaydry": "0.0",
"watdecaywet": "0.1",
"crdecay": "0.00", # turn off coarse roots!
"dz0v_dh": "0.10", # Taken default C3grass value from JULES
"displace_ratio": "0.64", #Jones 1992, pg. 67.
"z0h_z0m": "1.0", # Assume z0m = z0h, probably a big assumption [as z0h often < z0m.], see comment in code!!
"jmaxna": "62.0", # assuming j = v * 2
"jmaxnb": "0.0", # assuming no intercept
"vcmaxna": "31.0", # C3 grasses - CLM4 tech doc, table 8.2, Oleson et al 2010, page 176
"vcmaxnb": "0.0", # assuming no intercept
# control
"adjust_rtslow": "false", # priming, off
"alloc_model": "grasses",
"assim_model": "mate",
"calc_sw_params": "false", #false=use fwp values, true=derive them
"deciduous_model": "false",
"disturbance": "0",
"exudation": "false",
"fixed_stem_nc": "true",
"fixleafnc": "false",
"grazing": "false",
"gs_model": "medlyn",
"model_optroot": "false",
"modeljm": "1",
"nuptake_model": "2",
"ncycle": "true",
"passiveconst": "false",
"print_options": "end",
"ps_pathway": "c3",
"respiration_model": "fixed",
"strfloat": "0",
"sw_stress_model": "1", # Sands and Landsberg
"trans_model": "1",
"use_eff_nc": "0",
"use_leuning": "0",
"water_stress": "true",
}
ad.adjust_param_file(cfg_fname, replace_dict)
G = model.Gday(cfg_fname)
G.run_sim()
if SPIN_UP_SIMS:
#
## Equilib
#
# copy spunup base files to make two new experiment files
shutil.copy(os.path.join(param_dir, "%s_%s_model_spunup.cfg" % (experiment_id, site)),
os.path.join(param_dir, "%s_%s_model_spunup_trace.cfg" % (experiment_id, site)))
itag = "%s_%s_model_spunup_trace" % (experiment_id, site)
otag = "%s_%s_model_indust" % (experiment_id, site)
mtag = "%s_met_data_preindust_traceability_equilib.csv" % (site)
out_fn = "D1GDAY%sSU280.csv" % (site)
out_param_fname = os.path.join(param_dir, otag + ".cfg")
cfg_fname = os.path.join(param_dir, itag + ".cfg")
met_fname = os.path.join(met_dir, mtag)
out_fname = os.path.join(run_dir, out_fn)
replace_dict = {
# git stuff
"git_hash": str(git_revision),
# files
"out_param_fname": "%s" % (out_param_fname),
"cfg_fname": "%s" % (cfg_fname),
"met_fname": "%s" % (met_fname),
"out_fname": "%s" % (out_fname),
"print_options": "daily",
}
ad.adjust_param_file(cfg_fname, replace_dict)
G = model.Gday(cfg_fname)
G.run_sim()
# translate output to NCEAS style output
# add this directory to python search path so we can find the scripts!
sys.path.append(os.path.join(base_dir, "scripts"))
import translate_GDAY_output_to_NCEAS_format as tr
tr.translate_output(out_fname, met_fname)
#
## Equilib - amb
#
# copy spunup base files to make two new experiment files
shutil.copy(os.path.join(param_dir, "%s_%s_model_spunup.cfg" % (experiment_id, site)),
os.path.join(param_dir, "%s_%s_model_spunup_trace.cfg" % (experiment_id, site)))
itag = "%s_%s_model_spunup_trace" % (experiment_id, site)
otag = "%s_%s_model_indust" % (experiment_id, site)
mtag = "%s_met_data_amb_traceability_equilib.csv" % (site)
out_fn = "D1GDAY%sSUAMB.csv" % (site)
out_param_fname = os.path.join(param_dir, otag + ".cfg")
cfg_fname = os.path.join(param_dir, itag + ".cfg")
met_fname = os.path.join(met_dir, mtag)
out_fname = os.path.join(run_dir, out_fn)
replace_dict = {
# git stuff
"git_hash": str(git_revision),
# files
"out_param_fname": "%s" % (out_param_fname),
"cfg_fname": "%s" % (cfg_fname),
"met_fname": "%s" % (met_fname),
"out_fname": "%s" % (out_fname),
"print_options": "daily",
}
ad.adjust_param_file(cfg_fname, replace_dict)
G = model.Gday(cfg_fname)
G.run_sim()
# translate output to NCEAS style output
# add this directory to python search path so we can find the scripts!
sys.path.append(os.path.join(base_dir, "scripts"))
import translate_GDAY_output_to_NCEAS_format as tr
tr.translate_output(out_fname, met_fname)
#
## Equilib - ele
#
# copy spunup base files to make two new experiment files
shutil.copy(os.path.join(param_dir, "%s_%s_model_spunup.cfg" % (experiment_id, site)),
os.path.join(param_dir, "%s_%s_model_spunup_trace.cfg" % (experiment_id, site)))
itag = "%s_%s_model_spunup_trace" % (experiment_id, site)
otag = "%s_%s_model_indust" % (experiment_id, site)
mtag = "%s_met_data_ele_traceability_equilib.csv" % (site)
out_fn = "D1GDAY%sSUELE.csv" % (site)
out_param_fname = os.path.join(param_dir, otag + ".cfg")
cfg_fname = os.path.join(param_dir, itag + ".cfg")
met_fname = os.path.join(met_dir, mtag)
out_fname = os.path.join(run_dir, out_fn)
replace_dict = {
# git stuff
"git_hash": str(git_revision),
# files
"out_param_fname": "%s" % (out_param_fname),
"cfg_fname": "%s" % (cfg_fname),
"met_fname": "%s" % (met_fname),
"out_fname": "%s" % (out_fname),
"print_options": "daily",
}
ad.adjust_param_file(cfg_fname, replace_dict)
G = model.Gday(cfg_fname)
G.run_sim()
# translate output to NCEAS style output
# add this directory to python search path so we can find the scripts!
sys.path.append(os.path.join(base_dir, "scripts"))
import translate_GDAY_output_to_NCEAS_format as tr
tr.translate_output(out_fname, met_fname)
if __name__ == "__main__":
experiment_id = "NCEAS"
site = "DUKE"
main(experiment_id, site, SPIN_UP=True, POST_INDUST=True)
| gpl-2.0 |
odoousers2014/odoo | addons/account/report/common_report_header.py | 8 | 6346 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.tools.translate import _
# Mixin to use with rml_parse, so self.pool will be defined.
class common_report_header(object):
def _sum_debit(self, period_id=False, journal_id=False):
if journal_id and isinstance(journal_id, int):
journal_id = [journal_id]
if period_id and isinstance(period_id, int):
period_id = [period_id]
if not journal_id:
journal_id = self.journal_ids
if not period_id:
period_id = self.period_ids
if not (period_id and journal_id):
return 0.0
self.cr.execute('SELECT SUM(debit) FROM account_move_line l '
'WHERE period_id IN %s AND journal_id IN %s ' + self.query_get_clause + ' ',
(tuple(period_id), tuple(journal_id)))
return self.cr.fetchone()[0] or 0.0
def _sum_credit(self, period_id=False, journal_id=False):
if journal_id and isinstance(journal_id, int):
journal_id = [journal_id]
if period_id and isinstance(period_id, int):
period_id = [period_id]
if not journal_id:
journal_id = self.journal_ids
if not period_id:
period_id = self.period_ids
if not (period_id and journal_id):
return 0.0
self.cr.execute('SELECT SUM(credit) FROM account_move_line l '
'WHERE period_id IN %s AND journal_id IN %s '+ self.query_get_clause+'',
(tuple(period_id), tuple(journal_id)))
return self.cr.fetchone()[0] or 0.0
def _get_start_date(self, data):
if data.get('form', False) and data['form'].get('date_from', False):
return data['form']['date_from']
return ''
def _get_target_move(self, data):
if data.get('form', False) and data['form'].get('target_move', False):
if data['form']['target_move'] == 'all':
return _('All Entries')
return _('All Posted Entries')
return ''
def _get_end_date(self, data):
if data.get('form', False) and data['form'].get('date_to', False):
return data['form']['date_to']
return ''
def get_start_period(self, data):
if data.get('form', False) and data['form'].get('period_from', False):
return self.pool.get('account.period').browse(self.cr,self.uid,data['form']['period_from']).name
return ''
def get_end_period(self, data):
if data.get('form', False) and data['form'].get('period_to', False):
return self.pool.get('account.period').browse(self.cr, self.uid, data['form']['period_to']).name
return ''
def _get_account(self, data):
if data.get('form', False) and data['form'].get('chart_account_id', False):
return self.pool.get('account.account').browse(self.cr, self.uid, data['form']['chart_account_id']).name
return ''
def _get_sortby(self, data):
raise (_('Error!'), _('Not implemented.'))
def _get_filter(self, data):
if data.get('form', False) and data['form'].get('filter', False):
if data['form']['filter'] == 'filter_date':
return self._translate('Date')
elif data['form']['filter'] == 'filter_period':
return self._translate('Periods')
return self._translate('No Filters')
def _sum_debit_period(self, period_id, journal_id=None):
journals = journal_id or self.journal_ids
if not journals:
return 0.0
self.cr.execute('SELECT SUM(debit) FROM account_move_line l '
'WHERE period_id=%s AND journal_id IN %s '+ self.query_get_clause +'',
(period_id, tuple(journals)))
return self.cr.fetchone()[0] or 0.0
def _sum_credit_period(self, period_id, journal_id=None):
journals = journal_id or self.journal_ids
if not journals:
return 0.0
self.cr.execute('SELECT SUM(credit) FROM account_move_line l '
'WHERE period_id=%s AND journal_id IN %s ' + self.query_get_clause +' ',
(period_id, tuple(journals)))
return self.cr.fetchone()[0] or 0.0
def _get_fiscalyear(self, data):
if data.get('form', False) and data['form'].get('fiscalyear_id', False):
return self.pool.get('account.fiscalyear').browse(self.cr, self.uid, data['form']['fiscalyear_id']).name
return ''
def _get_company(self, data):
if data.get('form', False) and data['form'].get('chart_account_id', False):
return self.pool.get('account.account').browse(self.cr, self.uid, data['form']['chart_account_id']).company_id.name
return ''
def _get_journal(self, data):
codes = []
if data.get('form', False) and data['form'].get('journal_ids', False):
self.cr.execute('select code from account_journal where id IN %s',(tuple(data['form']['journal_ids']),))
codes = [x for x, in self.cr.fetchall()]
return codes
def _get_currency(self, data):
if data.get('form', False) and data['form'].get('chart_account_id', False):
return self.pool.get('account.account').browse(self.cr, self.uid, data['form']['chart_account_id']).company_id.currency_id.symbol
return ''
| agpl-3.0 |
peterlauri/django | django/contrib/gis/forms/widgets.py | 14 | 3351 | from __future__ import unicode_literals
import logging
from django.conf import settings
from django.contrib.gis import gdal
from django.contrib.gis.geos import GEOSException, GEOSGeometry
from django.forms.widgets import Widget
from django.template import loader
from django.utils import six, translation
logger = logging.getLogger('django.contrib.gis')
class BaseGeometryWidget(Widget):
"""
The base class for rich geometry widgets.
Renders a map using the WKT of the geometry.
"""
geom_type = 'GEOMETRY'
map_srid = 4326
map_width = 600
map_height = 400
display_raw = False
supports_3d = False
template_name = '' # set on subclasses
def __init__(self, attrs=None):
self.attrs = {}
for key in ('geom_type', 'map_srid', 'map_width', 'map_height', 'display_raw'):
self.attrs[key] = getattr(self, key)
if attrs:
self.attrs.update(attrs)
def serialize(self, value):
return value.wkt if value else ''
def deserialize(self, value):
try:
return GEOSGeometry(value, self.map_srid)
except (GEOSException, ValueError) as err:
logger.error("Error creating geometry from value '%s' (%s)", value, err)
return None
def render(self, name, value, attrs=None):
# If a string reaches here (via a validation error on another
# field) then just reconstruct the Geometry.
if value and isinstance(value, six.string_types):
value = self.deserialize(value)
if value:
# Check that srid of value and map match
if value.srid != self.map_srid:
try:
ogr = value.ogr
ogr.transform(self.map_srid)
value = ogr
except gdal.GDALException as err:
logger.error(
"Error transforming geometry from srid '%s' to srid '%s' (%s)",
value.srid, self.map_srid, err
)
context = self.build_attrs(
attrs,
name=name,
module='geodjango_%s' % name.replace('-', '_'), # JS-safe
serialized=self.serialize(value),
geom_type=gdal.OGRGeomType(self.attrs['geom_type']),
STATIC_URL=settings.STATIC_URL,
LANGUAGE_BIDI=translation.get_language_bidi(),
)
return loader.render_to_string(self.template_name, context)
class OpenLayersWidget(BaseGeometryWidget):
template_name = 'gis/openlayers.html'
class Media:
js = (
'https://cdnjs.cloudflare.com/ajax/libs/openlayers/2.13.1/OpenLayers.js',
'gis/js/OLMapWidget.js',
)
class OSMWidget(BaseGeometryWidget):
"""
An OpenLayers/OpenStreetMap-based widget.
"""
template_name = 'gis/openlayers-osm.html'
default_lon = 5
default_lat = 47
map_srid = 3857
class Media:
js = (
'https://cdnjs.cloudflare.com/ajax/libs/openlayers/2.13.1/OpenLayers.js',
'gis/js/OLMapWidget.js',
)
def __init__(self, attrs=None):
super(OSMWidget, self).__init__()
for key in ('default_lon', 'default_lat'):
self.attrs[key] = getattr(self, key)
if attrs:
self.attrs.update(attrs)
| bsd-3-clause |
nathanial/lettuce | tests/integration/lib/Django-1.3/django/contrib/admin/templatetags/log.py | 310 | 2270 | from django import template
from django.contrib.admin.models import LogEntry
register = template.Library()
class AdminLogNode(template.Node):
def __init__(self, limit, varname, user):
self.limit, self.varname, self.user = limit, varname, user
def __repr__(self):
return "<GetAdminLog Node>"
def render(self, context):
if self.user is None:
context[self.varname] = LogEntry.objects.all().select_related('content_type', 'user')[:self.limit]
else:
user_id = self.user
if not user_id.isdigit():
user_id = context[self.user].id
context[self.varname] = LogEntry.objects.filter(user__id__exact=user_id).select_related('content_type', 'user')[:self.limit]
return ''
class DoGetAdminLog:
"""
Populates a template variable with the admin log for the given criteria.
Usage::
{% get_admin_log [limit] as [varname] for_user [context_var_containing_user_obj] %}
Examples::
{% get_admin_log 10 as admin_log for_user 23 %}
{% get_admin_log 10 as admin_log for_user user %}
{% get_admin_log 10 as admin_log %}
Note that ``context_var_containing_user_obj`` can be a hard-coded integer
(user ID) or the name of a template context variable containing the user
object whose ID you want.
"""
def __init__(self, tag_name):
self.tag_name = tag_name
def __call__(self, parser, token):
tokens = token.contents.split()
if len(tokens) < 4:
raise template.TemplateSyntaxError("'%s' statements require two arguments" % self.tag_name)
if not tokens[1].isdigit():
raise template.TemplateSyntaxError("First argument in '%s' must be an integer" % self.tag_name)
if tokens[2] != 'as':
raise template.TemplateSyntaxError("Second argument in '%s' must be 'as'" % self.tag_name)
if len(tokens) > 4:
if tokens[4] != 'for_user':
raise template.TemplateSyntaxError("Fourth argument in '%s' must be 'for_user'" % self.tag_name)
return AdminLogNode(limit=tokens[1], varname=tokens[3], user=(len(tokens) > 5 and tokens[5] or None))
register.tag('get_admin_log', DoGetAdminLog('get_admin_log'))
| gpl-3.0 |
safwanrahman/mozillians | vendor-local/lib/python/markdown/inlinepatterns.py | 11 | 17252 | """
INLINE PATTERNS
=============================================================================
Inline patterns such as *emphasis* are handled by means of auxiliary
objects, one per pattern. Pattern objects must be instances of classes
that extend markdown.Pattern. Each pattern object uses a single regular
expression and needs support the following methods:
pattern.getCompiledRegExp() # returns a regular expression
pattern.handleMatch(m) # takes a match object and returns
# an ElementTree element or just plain text
All of python markdown's built-in patterns subclass from Pattern,
but you can add additional patterns that don't.
Also note that all the regular expressions used by inline must
capture the whole block. For this reason, they all start with
'^(.*)' and end with '(.*)!'. In case with built-in expression
Pattern takes care of adding the "^(.*)" and "(.*)!".
Finally, the order in which regular expressions are applied is very
important - e.g. if we first replace http://.../ links with <a> tags
and _then_ try to replace inline html, we would end up with a mess.
So, we apply the expressions in the following order:
* escape and backticks have to go before everything else, so
that we can preempt any markdown patterns by escaping them.
* then we handle auto-links (must be done before inline html)
* then we handle inline HTML. At this point we will simply
replace all inline HTML strings with a placeholder and add
the actual HTML to a hash.
* then inline images (must be done before links)
* then bracketed links, first regular then reference-style
* finally we apply strong and emphasis
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from . import util
from . import odict
import re
try:
from urllib.parse import urlparse, urlunparse
except ImportError:
from urlparse import urlparse, urlunparse
try:
from html import entities
except ImportError:
import htmlentitydefs as entities
def build_inlinepatterns(md_instance, **kwargs):
""" Build the default set of inline patterns for Markdown. """
inlinePatterns = odict.OrderedDict()
inlinePatterns["backtick"] = BacktickPattern(BACKTICK_RE)
inlinePatterns["escape"] = EscapePattern(ESCAPE_RE, md_instance)
inlinePatterns["reference"] = ReferencePattern(REFERENCE_RE, md_instance)
inlinePatterns["link"] = LinkPattern(LINK_RE, md_instance)
inlinePatterns["image_link"] = ImagePattern(IMAGE_LINK_RE, md_instance)
inlinePatterns["image_reference"] = \
ImageReferencePattern(IMAGE_REFERENCE_RE, md_instance)
inlinePatterns["short_reference"] = \
ReferencePattern(SHORT_REF_RE, md_instance)
inlinePatterns["autolink"] = AutolinkPattern(AUTOLINK_RE, md_instance)
inlinePatterns["automail"] = AutomailPattern(AUTOMAIL_RE, md_instance)
inlinePatterns["linebreak"] = SubstituteTagPattern(LINE_BREAK_RE, 'br')
if md_instance.safeMode != 'escape':
inlinePatterns["html"] = HtmlPattern(HTML_RE, md_instance)
inlinePatterns["entity"] = HtmlPattern(ENTITY_RE, md_instance)
inlinePatterns["not_strong"] = SimpleTextPattern(NOT_STRONG_RE)
inlinePatterns["strong_em"] = DoubleTagPattern(STRONG_EM_RE, 'strong,em')
inlinePatterns["strong"] = SimpleTagPattern(STRONG_RE, 'strong')
inlinePatterns["emphasis"] = SimpleTagPattern(EMPHASIS_RE, 'em')
if md_instance.smart_emphasis:
inlinePatterns["emphasis2"] = SimpleTagPattern(SMART_EMPHASIS_RE, 'em')
else:
inlinePatterns["emphasis2"] = SimpleTagPattern(EMPHASIS_2_RE, 'em')
return inlinePatterns
"""
The actual regular expressions for patterns
-----------------------------------------------------------------------------
"""
NOBRACKET = r'[^\]\[]*'
BRK = ( r'\[('
+ (NOBRACKET + r'(\[')*6
+ (NOBRACKET+ r'\])*')*6
+ NOBRACKET + r')\]' )
NOIMG = r'(?<!\!)'
BACKTICK_RE = r'(?<!\\)(`+)(.+?)(?<!`)\2(?!`)' # `e=f()` or ``e=f("`")``
ESCAPE_RE = r'\\(.)' # \<
EMPHASIS_RE = r'(\*)([^\*]+)\2' # *emphasis*
STRONG_RE = r'(\*{2}|_{2})(.+?)\2' # **strong**
STRONG_EM_RE = r'(\*{3}|_{3})(.+?)\2' # ***strong***
SMART_EMPHASIS_RE = r'(?<!\w)(_)(?!_)(.+?)(?<!_)\2(?!\w)' # _smart_emphasis_
EMPHASIS_2_RE = r'(_)(.+?)\2' # _emphasis_
LINK_RE = NOIMG + BRK + \
r'''\(\s*(<.*?>|((?:(?:\(.*?\))|[^\(\)]))*?)\s*((['"])(.*?)\12\s*)?\)'''
# [text](url) or [text](<url>) or [text](url "title")
IMAGE_LINK_RE = r'\!' + BRK + r'\s*\((<.*?>|([^")]+"[^"]*"|[^\)]*))\)'
#  or 
REFERENCE_RE = NOIMG + BRK+ r'\s?\[([^\]]*)\]' # [Google][3]
SHORT_REF_RE = NOIMG + r'\[([^\]]+)\]' # [Google]
IMAGE_REFERENCE_RE = r'\!' + BRK + '\s?\[([^\]]*)\]' # ![alt text][2]
NOT_STRONG_RE = r'((^| )(\*|_)( |$))' # stand-alone * or _
AUTOLINK_RE = r'<((?:[Ff]|[Hh][Tt])[Tt][Pp][Ss]?://[^>]*)>' # <http://www.123.com>
AUTOMAIL_RE = r'<([^> \!]*@[^> ]*)>' # <me@example.com>
HTML_RE = r'(\<([a-zA-Z/][^\>]*?|\!--.*?--)\>)' # <...>
ENTITY_RE = r'(&[\#a-zA-Z0-9]*;)' # &
LINE_BREAK_RE = r' \n' # two spaces at end of line
def dequote(string):
"""Remove quotes from around a string."""
if ( ( string.startswith('"') and string.endswith('"'))
or (string.startswith("'") and string.endswith("'")) ):
return string[1:-1]
else:
return string
ATTR_RE = re.compile("\{@([^\}]*)=([^\}]*)}") # {@id=123}
def handleAttributes(text, parent):
"""Set values of an element based on attribute definitions ({@id=123})."""
def attributeCallback(match):
parent.set(match.group(1), match.group(2).replace('\n', ' '))
return ATTR_RE.sub(attributeCallback, text)
"""
The pattern classes
-----------------------------------------------------------------------------
"""
class Pattern(object):
"""Base class that inline patterns subclass. """
def __init__(self, pattern, markdown_instance=None):
"""
Create an instant of an inline pattern.
Keyword arguments:
* pattern: A regular expression that matches a pattern
"""
self.pattern = pattern
self.compiled_re = re.compile("^(.*?)%s(.*?)$" % pattern,
re.DOTALL | re.UNICODE)
# Api for Markdown to pass safe_mode into instance
self.safe_mode = False
if markdown_instance:
self.markdown = markdown_instance
def getCompiledRegExp(self):
""" Return a compiled regular expression. """
return self.compiled_re
def handleMatch(self, m):
"""Return a ElementTree element from the given match.
Subclasses should override this method.
Keyword arguments:
* m: A re match object containing a match of the pattern.
"""
pass
def type(self):
""" Return class name, to define pattern type """
return self.__class__.__name__
def unescape(self, text):
""" Return unescaped text given text with an inline placeholder. """
try:
stash = self.markdown.treeprocessors['inline'].stashed_nodes
except KeyError:
return text
def itertext(el):
' Reimplement Element.itertext for older python versions '
tag = el.tag
if not isinstance(tag, util.string_type) and tag is not None:
return
if el.text:
yield el.text
for e in el:
for s in itertext(e):
yield s
if e.tail:
yield e.tail
def get_stash(m):
id = m.group(1)
if id in stash:
value = stash.get(id)
if isinstance(value, util.string_type):
return value
else:
# An etree Element - return text content only
return ''.join(itertext(value))
return util.INLINE_PLACEHOLDER_RE.sub(get_stash, text)
class SimpleTextPattern(Pattern):
""" Return a simple text of group(2) of a Pattern. """
def handleMatch(self, m):
text = m.group(2)
if text == util.INLINE_PLACEHOLDER_PREFIX:
return None
return text
class EscapePattern(Pattern):
""" Return an escaped character. """
def handleMatch(self, m):
char = m.group(2)
if char in self.markdown.ESCAPED_CHARS:
return '%s%s%s' % (util.STX, ord(char), util.ETX)
else:
return None
class SimpleTagPattern(Pattern):
"""
Return element of type `tag` with a text attribute of group(3)
of a Pattern.
"""
def __init__ (self, pattern, tag):
Pattern.__init__(self, pattern)
self.tag = tag
def handleMatch(self, m):
el = util.etree.Element(self.tag)
el.text = m.group(3)
return el
class SubstituteTagPattern(SimpleTagPattern):
""" Return an element of type `tag` with no children. """
def handleMatch (self, m):
return util.etree.Element(self.tag)
class BacktickPattern(Pattern):
""" Return a `<code>` element containing the matching text. """
def __init__ (self, pattern):
Pattern.__init__(self, pattern)
self.tag = "code"
def handleMatch(self, m):
el = util.etree.Element(self.tag)
el.text = util.AtomicString(m.group(3).strip())
return el
class DoubleTagPattern(SimpleTagPattern):
"""Return a ElementTree element nested in tag2 nested in tag1.
Useful for strong emphasis etc.
"""
def handleMatch(self, m):
tag1, tag2 = self.tag.split(",")
el1 = util.etree.Element(tag1)
el2 = util.etree.SubElement(el1, tag2)
el2.text = m.group(3)
return el1
class HtmlPattern(Pattern):
""" Store raw inline html and return a placeholder. """
def handleMatch (self, m):
rawhtml = self.unescape(m.group(2))
place_holder = self.markdown.htmlStash.store(rawhtml)
return place_holder
def unescape(self, text):
""" Return unescaped text given text with an inline placeholder. """
try:
stash = self.markdown.treeprocessors['inline'].stashed_nodes
except KeyError:
return text
def get_stash(m):
id = m.group(1)
value = stash.get(id)
if value is not None:
try:
return self.markdown.serializer(value)
except:
return '\%s' % value
return util.INLINE_PLACEHOLDER_RE.sub(get_stash, text)
class LinkPattern(Pattern):
""" Return a link element from the given match. """
def handleMatch(self, m):
el = util.etree.Element("a")
el.text = m.group(2)
title = m.group(13)
href = m.group(9)
if href:
if href[0] == "<":
href = href[1:-1]
el.set("href", self.sanitize_url(self.unescape(href.strip())))
else:
el.set("href", "")
if title:
title = dequote(self.unescape(title))
el.set("title", title)
return el
def sanitize_url(self, url):
"""
Sanitize a url against xss attacks in "safe_mode".
Rather than specifically blacklisting `javascript:alert("XSS")` and all
its aliases (see <http://ha.ckers.org/xss.html>), we whitelist known
safe url formats. Most urls contain a network location, however some
are known not to (i.e.: mailto links). Script urls do not contain a
location. Additionally, for `javascript:...`, the scheme would be
"javascript" but some aliases will appear to `urlparse()` to have no
scheme. On top of that relative links (i.e.: "foo/bar.html") have no
scheme. Therefore we must check "path", "parameters", "query" and
"fragment" for any literal colons. We don't check "scheme" for colons
because it *should* never have any and "netloc" must allow the form:
`username:password@host:port`.
"""
if not self.markdown.safeMode:
# Return immediately bipassing parsing.
return url
try:
scheme, netloc, path, params, query, fragment = url = urlparse(url)
except ValueError:
# Bad url - so bad it couldn't be parsed.
return ''
locless_schemes = ['', 'mailto', 'news']
allowed_schemes = locless_schemes + ['http', 'https', 'ftp', 'ftps']
if scheme not in allowed_schemes:
# Not a known (allowed) scheme. Not safe.
return ''
if netloc == '' and scheme not in locless_schemes:
# This should not happen. Treat as suspect.
return ''
for part in url[2:]:
if ":" in part:
# A colon in "path", "parameters", "query" or "fragment" is suspect.
return ''
# Url passes all tests. Return url as-is.
return urlunparse(url)
class ImagePattern(LinkPattern):
""" Return a img element from the given match. """
def handleMatch(self, m):
el = util.etree.Element("img")
src_parts = m.group(9).split()
if src_parts:
src = src_parts[0]
if src[0] == "<" and src[-1] == ">":
src = src[1:-1]
el.set('src', self.sanitize_url(self.unescape(src)))
else:
el.set('src', "")
if len(src_parts) > 1:
el.set('title', dequote(self.unescape(" ".join(src_parts[1:]))))
if self.markdown.enable_attributes:
truealt = handleAttributes(m.group(2), el)
else:
truealt = m.group(2)
el.set('alt', self.unescape(truealt))
return el
class ReferencePattern(LinkPattern):
""" Match to a stored reference and return link element. """
NEWLINE_CLEANUP_RE = re.compile(r'[ ]?\n', re.MULTILINE)
def handleMatch(self, m):
try:
id = m.group(9).lower()
except IndexError:
id = None
if not id:
# if we got something like "[Google][]" or "[Goggle]"
# we'll use "google" as the id
id = m.group(2).lower()
# Clean up linebreaks in id
id = self.NEWLINE_CLEANUP_RE.sub(' ', id)
if not id in self.markdown.references: # ignore undefined refs
return None
href, title = self.markdown.references[id]
text = m.group(2)
return self.makeTag(href, title, text)
def makeTag(self, href, title, text):
el = util.etree.Element('a')
el.set('href', self.sanitize_url(href))
if title:
el.set('title', title)
el.text = text
return el
class ImageReferencePattern(ReferencePattern):
""" Match to a stored reference and return img element. """
def makeTag(self, href, title, text):
el = util.etree.Element("img")
el.set("src", self.sanitize_url(href))
if title:
el.set("title", title)
if self.markdown.enable_attributes:
text = handleAttributes(text, el)
el.set("alt", self.unescape(text))
return el
class AutolinkPattern(Pattern):
""" Return a link Element given an autolink (`<http://example/com>`). """
def handleMatch(self, m):
el = util.etree.Element("a")
el.set('href', self.unescape(m.group(2)))
el.text = util.AtomicString(m.group(2))
return el
class AutomailPattern(Pattern):
"""
Return a mailto link Element given an automail link (`<foo@example.com>`).
"""
def handleMatch(self, m):
el = util.etree.Element('a')
email = self.unescape(m.group(2))
if email.startswith("mailto:"):
email = email[len("mailto:"):]
def codepoint2name(code):
"""Return entity definition by code, or the code if not defined."""
entity = entities.codepoint2name.get(code)
if entity:
return "%s%s;" % (util.AMP_SUBSTITUTE, entity)
else:
return "%s#%d;" % (util.AMP_SUBSTITUTE, code)
letters = [codepoint2name(ord(letter)) for letter in email]
el.text = util.AtomicString(''.join(letters))
mailto = "mailto:" + email
mailto = "".join([util.AMP_SUBSTITUTE + '#%d;' %
ord(letter) for letter in mailto])
el.set('href', mailto)
return el
| bsd-3-clause |
bponsler/pysiriproxy | pysiriproxy/config/plugins/resetPlugin.py | 1 | 1672 | # Copyright 2012 Brett Ponsler
# This file is part of pysiriproxy.
#
# pysiriproxy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pysiriproxy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pysiriproxy. If not, see <http://www.gnu.org/licenses/>.
'''Contains the Reset-Plugin.'''
from pysiriproxy.plugins.plugin import BasePlugin
from pysiriproxy.plugins.objectClasses import ClearContext, \
StartSpeechRequest, CancelRequest, CancelSpeech, CommandFailed
from pyamp.logging import Colors
class Plugin(BasePlugin):
'''Handles resetting the context.'''
# Define the name and log color for this plugin
name = "Reset-Plugin"
logColor = Colors.Foreground.Green
##### Define all of the filters for this plugin. #####
@CancelRequest
@CancelSpeech
@ClearContext
@CommandFailed
def resetFilter(self, obj, direction):
'''Reset the context when a request is completed, or the context is
cleared.
* obj -- The received object
* direction -- The direction of the received data
'''
self.log.debug("Resetting object manager: %s" % obj.get("class", None),
level=0)
self.resetContext()
return obj
| gpl-3.0 |
wxgeo/geophar | wxgeometrie/sympy/functions/special/tests/test_mathieu.py | 85 | 1080 | from sympy import (sqrt, sin, cos, diff, conjugate,
mathieus, mathieuc, mathieusprime, mathieucprime)
from sympy.abc import a, q, z
def test_mathieus():
assert isinstance(mathieus(a, q, z), mathieus)
assert mathieus(a, 0, z) == sin(sqrt(a)*z)
assert conjugate(mathieus(a, q, z)) == mathieus(conjugate(a), conjugate(q), conjugate(z))
assert diff(mathieus(a, q, z), z) == mathieusprime(a, q, z)
def test_mathieuc():
assert isinstance(mathieuc(a, q, z), mathieuc)
assert mathieuc(a, 0, z) == cos(sqrt(a)*z)
assert diff(mathieuc(a, q, z), z) == mathieucprime(a, q, z)
def test_mathieusprime():
assert isinstance(mathieusprime(a, q, z), mathieusprime)
assert mathieusprime(a, 0, z) == sqrt(a)*cos(sqrt(a)*z)
assert diff(mathieusprime(a, q, z), z) == (-a + 2*q*cos(2*z))*mathieus(a, q, z)
def test_mathieucprime():
assert isinstance(mathieucprime(a, q, z), mathieucprime)
assert mathieucprime(a, 0, z) == -sqrt(a)*sin(sqrt(a)*z)
assert diff(mathieucprime(a, q, z), z) == (-a + 2*q*cos(2*z))*mathieuc(a, q, z)
| gpl-2.0 |
nawawi/poedit | deps/boost/tools/build/test/collect_debug_info.py | 51 | 8397 | #!/usr/bin/python
# Copyright 2012 Jurko Gospodnetic
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Temporarily enabled dummy test that always fails and is used to collect
# extra debugging information from Boost Build test runner sites.
import BoostBuild
import os
import re
import sys
###############################################################################
#
# Public interface.
#
###############################################################################
def collectDebugInfo():
t = _init()
global tag
tag = "Python version"
try:
_info(sys.version)
except:
_info_exc()
tag = "Python platform"
try:
_info(sys.platform)
except:
_info_exc()
tag = "Boost Jam/Build version"
try:
_infoX(_getJamVersionInfo(t))
except:
_info_exc()
#_collectDebugInfo_environ()
# Report prepared annotations.
t.fail_test(1, dump_difference=False, dump_stdio=False, dump_stack=False)
###############################################################################
#
# Private interface.
#
###############################################################################
varSeparator = "###$^%~~~"
def _collect(results, prefix, name, t):
results.append("%s - %s - os.getenv(): %r" % (prefix, name, os.getenv(
name)))
results.append("%s - %s - os.environ.get(): %r" % (prefix, name,
os.environ.get(name)))
external_values = _getExternalValues(t, name)
results.append("%s - %s - external: %r" % (prefix, name,
external_values[name]))
def _collectDebugInfo_environ(t):
dummyVars = ["WOOF_WOOFIE_%d" % x for x in xrange(4)]
global tag
tag = "XXX in os.environ"
try:
def f(name):
return "%s: %s" % (name, name in os.environ)
_infoX(f(x) for x in dummyVars)
except:
_info_exc()
tag = "os.environ[XXX]"
try:
def f(name):
try:
result = os.environ[name]
except:
result = _str_exc()
return "%s: %r" % (name, result)
_infoX(f(x) for x in dummyVars)
except:
_info_exc()
tag = "os.environ.get(XXX)"
try:
def f(name):
return "%s: %r" % (name, os.environ.get(name))
_infoX(f(x) for x in dummyVars)
except:
_info_exc()
tag = "os.getenv(XXX)"
try:
def f(name):
return "%s: %r" % (name, os.getenv(name))
_infoX(f(x) for x in dummyVars)
except:
_info_exc()
name = dummyVars[0]
value = "foo"
tag = "os.putenv(%s) to %r" % (name, value)
try:
results = []
_collect(results, "before", name, t)
os.putenv(name, value)
_collect(results, "after", name, t)
_infoX(results)
except:
_info_exc()
name = dummyVars[1]
value = "bar"
tag = "os.environ[%s] to %r" % (name, value)
try:
results = []
_collect(results, "before", name, t)
os.environ[name] = value
_collect(results, "after", name, t)
_infoX(results)
except:
_info_exc()
name = dummyVars[1]
value = "baz"
tag = "os.putenv(%s) to %r" % (name, value)
try:
results = []
_collect(results, "before", name, t)
os.putenv(name, value)
_collect(results, "after", name, t)
_infoX(results)
except:
_info_exc()
name = dummyVars[1]
value = ""
tag = "os.putenv(%s) to %r" % (name, value)
try:
results = []
_collect(results, "before", name, t)
os.putenv(name, value)
_collect(results, "after", name, t)
_infoX(results)
except:
_info_exc()
name = dummyVars[2]
value = "foo"
tag = "os.unsetenv(%s) from %r" % (name, value)
try:
results = []
os.environ[name] = value
_collect(results, "before", name, t)
os.unsetenv(name)
_collect(results, "after", name, t)
_infoX(results)
except:
_info_exc()
name = dummyVars[2]
value = "foo"
tag = "del os.environ[%s] from %r" % (name, value)
try:
results = []
os.environ[name] = value
_collect(results, "before", name, t)
del os.environ[name]
_collect(results, "after", name, t)
_infoX(results)
except:
_info_exc()
name = dummyVars[2]
value = "foo"
tag = "os.environ.pop(%s) from %r" % (name, value)
try:
results = []
os.environ[name] = value
_collect(results, "before", name, t)
os.environ.pop(name)
_collect(results, "after", name, t)
_infoX(results)
except:
_info_exc()
name = dummyVars[2]
value1 = "foo"
value2 = ""
tag = "os.environ[%s] to %r from %r" % (name, value2, value1)
try:
results = []
os.environ[name] = value1
_collect(results, "before", name, t)
os.environ[name] = value2
_collect(results, "after", name, t)
_infoX(results)
except:
_info_exc()
name = dummyVars[3]
value = '""'
tag = "os.environ[%s] to %r" % (name, value)
try:
results = []
_collect(results, "before", name, t)
os.environ[name] = value
_collect(results, "after", name, t)
_infoX(results)
except:
_info_exc()
def _getExternalValues(t, *args):
t.run_build_system(["---var-name=%s" % x for x in args])
result = dict()
for x in args:
m = re.search(r"^\*\*\*ENV\*\*\* %s: '(.*)' \*\*\*$" % x, t.stdout(),
re.MULTILINE)
if m:
result[x] = m.group(1)
else:
result[x] = None
return result
def _getJamVersionInfo(t):
result = []
# JAM version variables.
t.run_build_system(["---version"])
for m in re.finditer(r"^\*\*\*VAR\*\*\* ([^:]*): (.*)\*\*\*$", t.stdout(),
re.MULTILINE):
name = m.group(1)
value = m.group(2)
if not value:
value = []
elif value[-1] == ' ':
value = value[:-1].split(varSeparator)
else:
value = "!!!INVALID!!! - '%s'" % value
result.append("%s = %s" % (name, value))
result.append("")
# bjam -v output.
t.run_build_system(["-v"])
result.append("--- output for 'bjam -v' ---")
result.append(t.stdout())
# bjam --version output.
t.run_build_system(["--version"], status=1)
result.append("--- output for 'bjam --version' ---")
result.append(t.stdout())
return result
def _init():
toolsetName = "__myDummyToolset__"
t = BoostBuild.Tester(["toolset=%s" % toolsetName], pass_toolset=False,
use_test_config=False)
# Prepare a dummy toolset so we do not get errors in case the default one
# is not found.
t.write(toolsetName + ".jam", """\
import feature ;
feature.extend toolset : %s ;
rule init ( ) { }
""" % toolsetName )
# Python version of the same dummy toolset.
t.write(toolsetName + ".py", """\
from b2.build import feature
feature.extend('toolset', ['%s'])
def init(): pass
""" % toolsetName )
t.write("jamroot.jam", """\
import os ;
.argv = [ modules.peek : ARGV ] ;
local names = [ MATCH ^---var-name=(.*) : $(.argv) ] ;
for x in $(names)
{
value = [ os.environ $(x) ] ;
ECHO ***ENV*** $(x): '$(value)' *** ;
}
if ---version in $(.argv)
{
for x in JAMVERSION JAM_VERSION JAMUNAME JAM_TIMESTAMP_RESOLUTION OS
{
v = [ modules.peek : $(x) ] ;
ECHO ***VAR*** $(x): "$(v:J=%s)" *** ;
}
}
""" % varSeparator)
return t
def _info(*values):
values = list(values) + [""]
BoostBuild.annotation(tag, "\n".join(str(x) for x in values))
def _infoX(values):
_info(*values)
def _info_exc():
_info(_str_exc())
def _str_exc():
exc_type, exc_value = sys.exc_info()[0:2]
if exc_type is None:
exc_type_name = "None"
else:
exc_type_name = exc_type.__name__
return "*** EXCEPTION *** %s - %s ***" % (exc_type_name, exc_value)
###############################################################################
#
# main()
# ------
#
###############################################################################
collectDebugInfo()
| mit |
seem-sky/kbengine | kbe/res/scripts/common/Lib/site-packages/setuptools/command/develop.py | 477 | 6447 | from setuptools.command.easy_install import easy_install
from distutils.util import convert_path, subst_vars
from pkg_resources import Distribution, PathMetadata, normalize_path
from distutils import log
from distutils.errors import DistutilsError, DistutilsOptionError
import os, sys, setuptools, glob
class develop(easy_install):
"""Set up package for development"""
description = "install package in 'development mode'"
user_options = easy_install.user_options + [
("uninstall", "u", "Uninstall this source package"),
("egg-path=", None, "Set the path to be used in the .egg-link file"),
]
boolean_options = easy_install.boolean_options + ['uninstall']
command_consumes_arguments = False # override base
def run(self):
if self.uninstall:
self.multi_version = True
self.uninstall_link()
else:
self.install_for_development()
self.warn_deprecated_options()
def initialize_options(self):
self.uninstall = None
self.egg_path = None
easy_install.initialize_options(self)
self.setup_path = None
self.always_copy_from = '.' # always copy eggs installed in curdir
def finalize_options(self):
ei = self.get_finalized_command("egg_info")
if ei.broken_egg_info:
raise DistutilsError(
"Please rename %r to %r before using 'develop'"
% (ei.egg_info, ei.broken_egg_info)
)
self.args = [ei.egg_name]
easy_install.finalize_options(self)
self.expand_basedirs()
self.expand_dirs()
# pick up setup-dir .egg files only: no .egg-info
self.package_index.scan(glob.glob('*.egg'))
self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link')
self.egg_base = ei.egg_base
if self.egg_path is None:
self.egg_path = os.path.abspath(ei.egg_base)
target = normalize_path(self.egg_base)
if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target:
raise DistutilsOptionError(
"--egg-path must be a relative path from the install"
" directory to "+target
)
# Make a distribution for the package's source
self.dist = Distribution(
target,
PathMetadata(target, os.path.abspath(ei.egg_info)),
project_name = ei.egg_name
)
p = self.egg_base.replace(os.sep,'/')
if p!= os.curdir:
p = '../' * (p.count('/')+1)
self.setup_path = p
p = normalize_path(os.path.join(self.install_dir, self.egg_path, p))
if p != normalize_path(os.curdir):
raise DistutilsOptionError(
"Can't get a consistent path to setup script from"
" installation directory", p, normalize_path(os.curdir))
def install_for_development(self):
if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False):
# If we run 2to3 we can not do this inplace:
# Ensure metadata is up-to-date
self.reinitialize_command('build_py', inplace=0)
self.run_command('build_py')
bpy_cmd = self.get_finalized_command("build_py")
build_path = normalize_path(bpy_cmd.build_lib)
# Build extensions
self.reinitialize_command('egg_info', egg_base=build_path)
self.run_command('egg_info')
self.reinitialize_command('build_ext', inplace=0)
self.run_command('build_ext')
# Fixup egg-link and easy-install.pth
ei_cmd = self.get_finalized_command("egg_info")
self.egg_path = build_path
self.dist.location = build_path
self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info) # XXX
else:
# Without 2to3 inplace works fine:
self.run_command('egg_info')
# Build extensions in-place
self.reinitialize_command('build_ext', inplace=1)
self.run_command('build_ext')
self.install_site_py() # ensure that target dir is site-safe
if setuptools.bootstrap_install_from:
self.easy_install(setuptools.bootstrap_install_from)
setuptools.bootstrap_install_from = None
# create an .egg-link in the installation dir, pointing to our egg
log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
if not self.dry_run:
f = open(self.egg_link,"w")
f.write(self.egg_path + "\n" + self.setup_path)
f.close()
# postprocess the installed distro, fixing up .pth, installing scripts,
# and handling requirements
self.process_distribution(None, self.dist, not self.no_deps)
def uninstall_link(self):
if os.path.exists(self.egg_link):
log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
egg_link_file = open(self.egg_link)
contents = [line.rstrip() for line in egg_link_file]
egg_link_file.close()
if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
log.warn("Link points to %s: uninstall aborted", contents)
return
if not self.dry_run:
os.unlink(self.egg_link)
if not self.dry_run:
self.update_pth(self.dist) # remove any .pth link to us
if self.distribution.scripts:
# XXX should also check for entry point scripts!
log.warn("Note: you must uninstall or replace scripts manually!")
def install_egg_scripts(self, dist):
if dist is not self.dist:
# Installing a dependency, so fall back to normal behavior
return easy_install.install_egg_scripts(self,dist)
# create wrapper scripts in the script dir, pointing to dist.scripts
# new-style...
self.install_wrapper_scripts(dist)
# ...and old-style
for script_name in self.distribution.scripts or []:
script_path = os.path.abspath(convert_path(script_name))
script_name = os.path.basename(script_path)
f = open(script_path,'rU')
script_text = f.read()
f.close()
self.install_script(dist, script_name, script_text, script_path)
| lgpl-3.0 |
arximboldi/jagsat | lib/tf/gfx/widget/basic.py | 1 | 14927 | #
# Copyright (C) 2009 TribleFlame Oy
#
# This file is part of TF.
#
# TF is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# TF is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
from tf.gfx import ui
from PySFML import sf
import math
import sets
from tf.gfx import uirotation
from tf.gfx import uiactions
import re
import os
import random
from PySFML import sf
from tf import signalslot
class PlayerPositionMarker(ui.Image):
"""This is a small marker symbol at the edge of the screen
from which the user can control the direction in which the user
interface is displayed."""
def __init__(self,
parent,
uirotationnotifier,
realplayerinfo,
marker_img_filename,
markershadow_img_filename):
ui.Image.__init__(self, parent, marker_img_filename)
self.uirotationnotifier = uirotationnotifier
self.set_enable_hitting(True)
self.set_center(self._get_width() - 5,
self._get_height() / 2.0)
self.shadow = ui.Image(parent, markershadow_img_filename)
self.shadow.set_center(self._get_width() - 5,
self._get_height() / 2.0)
self.shadow.set_visible(False)
self.sticky_angles = sets.Set()
def _calc_sticky_angles(self, window):
self.sticky_angles = sets.Set([0, 90, 180, 270, 360])
ang = math.degrees(math.atan2(window.window.GetHeight(),
window.window.GetWidth()))
self.sticky_angles.add(90 - ang)
self.sticky_angles.add(90 + ang)
self.sticky_angles.add(270 + ang)
self.sticky_angles.add(270 - ang)
def set_focus(self, realplayerinfo, window):
self.realplayerinfo = realplayerinfo
self.window = window
self._calc_sticky_angles(self.window)
if self.realplayerinfo.rotate_ui:
x, y = self.rotate(self.realplayerinfo.rotation)
self.set_position(x, y)
self.SetRotation(-self.realplayerinfo.rotation - 90)
self.uirotationnotifier.notify_all_ui(self.realplayerinfo.rotation)
def rotate(self, rotation):
width = self.window.window.GetWidth()
height = self.window.window.GetHeight()
hw = width / 2
hh = height / 2
rot = rotation
rot += 90 # XXX ???
rot = rot % 360
r = math.radians(rot)
t = math.tan(r)
# I don't understand this, but it seems to work
if rot > 270 or rot <= 90:
y = hh + t * hw
x = width - 1
if y <= 0:
y = 0
x = hw - hh / t
elif y >= height:
y = height - 1
x = hw + hh / t
else:
y = height - (hh + t * hw)
x = 0
if y <= 0:
y = 0
x = hw - hh / t
elif y >= height:
y = height - 1
x = hw + hh / t
return x, y
def start_pan(self, event):
print "start player marker panning"
self.shadow.set_visible(True)
x, y = self.GetPosition()
self.shadow.set_position(x, y)
self.shadow.SetRotation(self.GetRotation())
def do_pan(self, event):
x = event[0]
y = event[1]
angle = self._calc(x, y)
x, y = self.rotate(angle)
self.shadow.set_position(x, y)
self.shadow.SetRotation(-angle - 90)
def _calc(self, x, y):
width = self.window.window.GetWidth()
height = self.window.window.GetHeight()
hw = width / 2
hh = height / 2
dx = x - hw
dy = - (y - hh)
angle = -math.degrees(math.atan2(dy, dx))
angle += 270 #??
angle = angle % 360
assert angle >= 0
assert angle < 360
STICKY_ANGLE = 3
for i in self.sticky_angles:
da = abs(angle - i)
if da < STICKY_ANGLE:
angle = i
break
angle = angle % 360
return angle
def end_pan(self, event):
if self.realplayerinfo.rotate_ui:
angle = self._calc(event[0], event[1])
self.realplayerinfo.rotation = angle
self.set_focus(self.realplayerinfo, self.window)
self.shadow.set_visible(False)
def create_player_position_marker(parent,
uirotationnotifier,
realplayerinfo,
img_name,
imgshadow_name):
c = PlayerPositionMarker(parent,
uirotationnotifier,
realplayerinfo,
img_name,
imgshadow_name)
return c
class _Key(ui.Image):
def __init__(self, parent, fname, keyinfo):
ui.Image.__init__(self, parent, fname)
#self.set_center_rel(ui.CENTER)
self.set_enable_hitting(True)
self.keyinfo = keyinfo
self.signal_click.add(lambda e: self.parent._hit_key(self))
# def user_hit(self, gameloop, x, y):
# self.parent._hit_key(self)
def get_dir_of_python_file(file):
dir = re.sub("\\" + os.sep + "[^\\" + os.sep + "]+$", "", file)
return dir
class _KeyboardConfig:
def __init__(self, parent, theme = None):
self.keys = set()
self.is_shifted = False
dir = theme.directory if theme else get_dir_of_python_file(__file__)
start_x = 30
start_y = 90
x = start_x
y = start_y
key_unit_width = 50
key_unit_height = 50
for i in [
#{ "key": "!", "pos": (0.0, 0.0) },
#{ "key": "?", },
#{ "key": ".", },
#{ "key": ",", },
#{ "key": ":", },
#{ "key": ";", },
#{ "key": "+", },
#{ "key": "-", },
#{ "key": "_", },
#{ "key": "\"", },
#{ "key": "#", },
#{ "key": "$", },
#{ "key": "%", },
#{ "key": "&", },
#{ "key": "/", },
#{ "key": "(", },
#{ "key": ")", },
#{ "key": "=", },
#{ "key": "<", "pos": (0.0, 0.0) },
#{ "key": ">", },
#{ "key": "{", },
#{ "key": "}", },
#{ "key": "[", },
#{ "key": "]", },
#{ "key": "", },
#{ "key": "", },
#{ "key": "", },
#{ "key": "", },
# + accented chars
# + Other european chars
# + others?
{"key": "1", "shift": "!", "pos": (1.0, 0.0)},
{"key": "2", "shift": "\""},
{"key": "3", "shift": "#"},
{"key": "5", "shift": "%"},
{"key": "6", "shift": "&"},
{"key": "7", "shift": "/"},
{"key": "8", "shift": "("},
{"key": "9", "shift": ")"},
{"key": "0", "shift": "="},
{"key": "<-", "extra": "DEL", "pos": (13, 0)},
{"key": "q", "shift": "Q", "pos": (1.5, 1.0)},
{"key": "w", "shift": "W"},
{"key": "e", "shift": "E"},
{"key": "r", "shift": "R"},
{"key": "t", "shift": "T"},
{"key": "y", "shift": "Y"},
{"key": "u", "shift": "U"},
{"key": "i", "shift": "I"},
{"key": "o", "shift": "O"},
{"key": "p", "shift": "P"},
{"key": "a", "shift": "A", "pos": (2, 2.0)},
{"key": "s", "shift": "S"},
{"key": "d", "shift": "D"},
{"key": "f", "shift": "F"},
{"key": "g", "shift": "G"},
{"key": "h", "shift": "H"},
{"key": "j", "shift": "J"},
{"key": "k", "shift": "K"},
{"key": "l", "shift": "L"},
{"key": "<-/", "extra": "ENTER", "pos": (13, 1)},
{"key": "=^=", "extra": "SHIFTLEFT", "pos": (0.5, 3.0)},
{"key": "<", "shift": ">"},
{"key": "z", "shift": "Z"},
{"key": "x", "shift": "X"},
{"key": "c", "shift": "C"},
{"key": "v", "shift": "V"},
{"key": "b", "shift": "B"},
{"key": "n", "shift": "N"},
{"key": "m", "shift": "M"},
{"key": "=^=", "extra": "SHIFTRIGHT"},
]:
i["key"] = unicode(i["key"], "UTF-8")
if "shift" in i:
i["shift"] = unicode(i["shift"], "UTF-8")
fname = None
if "extra" in i:
e = i["extra"]
if False:
pass
elif e == "DEL":
fname = dir + os.sep + "key_del.png"
elif e == "ENTER":
fname = dir + os.sep + "key_return.png"
elif e == "SHIFTLEFT":
fname = dir + os.sep + "key_shift_left.png"
elif e == "SHIFTRIGHT":
fname = dir + os.sep + "key_shift_right.png"
else:
# Key not supported
print "KEY", e
assert 0
else:
fname = dir + os.sep + "one_key.png"
u = _Key(None, fname, i)
if "pos" in i:
x = i["pos"][0] * key_unit_width + start_x
y = i["pos"][1] * key_unit_height + start_y
u.SetPosition(x, y)
# XXX render this to the blackbox instead...
parent.add_back_child(u)
self.keys.add(u)
if "extra" not in i:
k = ui.String(None, i["key"])
yfuzz = -4
xfuzz = 0
k.SetPosition(u.GetPosition()[0] + u._get_width()/2.0 - \
k._get_width()/2.0 + xfuzz,
u.GetPosition()[1] + u._get_height()/2.0 - \
k._get_height()/2.0 + yfuzz)
parent.add_child(k)
u.keysprite = k
x += key_unit_width
if x > parent.width - key_unit_width:
x = start_x
y += key_unit_height
class Keyboard(ui.FreeformContainer):
def __init__(self, parent, theme = None):
ui.FreeformContainer.__init__(self, parent)
self.width = 750
self.height = 300
self.set_position_rel(ui.CENTER)
self.set_center_rel(ui.CENTER)
# In letters -- not anymore, now in pixels :D
self.maxlength = 260
blackbox = ui.RoundedRectangle(None,
0, 0,
self.width, self.height,
theme.blackbox.radius,
theme.blackbox.color,
theme.blackbox.border,
theme.blackbox.thickness)
self.set_enable_hitting(True)
self.add_back_child(blackbox)
whitestr = ui.String(None,
u"")
whitebox = ui.RoundedRectangle(None,
170, 30,
300, whitestr._get_height() + 45,
theme.whitebox.radius,
theme.whitebox.color,
theme.whitebox.border,
theme.whitebox.thickness)
whitestr.SetPosition(15, 7)
whitestr.set_size (25)
self.add_child(whitebox)
whitebox.add_child(whitestr)
self.whitestr = whitestr
self.kconfig = _KeyboardConfig(self, theme)
self.signal_text_entered = signalslot.Signal("text_entered")
self.signal_text_failed = signalslot.Signal("text_failed")
uiactions.move_out(self, 200)
def reinitialize(self):
uiactions.move_in(self, 50)
self.signal_text_entered.remove_all()
self.signal_text_failed.remove_all()
def set_max_length(self, mx):
# In letters
self.maxlength = mx
def get_max_length(self):
return self.maxlength
def _hit_key(self, key):
keyinfo = key.keyinfo
s = self.whitestr.get_string()
if "extra" in keyinfo:
e = keyinfo["extra"]
if 0:
pass
elif e == "DEL":
s = s[:-1]
elif e == "ENTER":
# Accepts the string and sends it no observers
e = signalslot.Event("text_entered",
text = self.whitestr.get_string())
self.signal_text_entered.call(e)
uiactions.move_out(self, 50)
elif e == "SHIFTLEFT" or e == "SHIFTRIGHT":
self.kconfig.is_shifted ^= True
value = "key"
if self.kconfig.is_shifted:
value = "shift"
for key in self.kconfig.keys:
if "extra" in key.keyinfo:
e = key.keyinfo["extra"]
if e == "SHIFTLEFT" or e == "SHIFTRIGHT":
if self.kconfig.is_shifted:
c = sf.Color(200, 200, 200, 255)
else:
c = sf.Color(255, 255, 255, 255)
key._sprite.SetColor(c)
continue
if value in key.keyinfo:
# BUG this should be automatic
u = key
k = u.keysprite
yfuzz = -4
xfuzz = 0
k.set_string(key.keyinfo[value])
k.SetPosition(u.GetPosition()[0] + \
u._get_width()/2.0 - \
k._get_width()/2.0 + xfuzz,
u.GetPosition()[1] + \
u._get_height()/2.0 - \
k._get_height()/2.0 + yfuzz)
else:
assert 0
else:
if self.whitestr._get_width () >= self.maxlength:
return
s += key.keysprite.get_string()
self.whitestr.set_string(s)
def show_keyboard_and_inject_answer(self,
originstr,
targetcallback_on_success,
targetcallback_on_failure):
self.reinitialize()
self.whitestr.set_string(originstr)
self.set_visible(True)
self.signal_text_entered.add(targetcallback_on_success)
self.signal_text_failed.add(targetcallback_on_failure)
def show_keyboard_and_inject_answer_string(self,
string):
self.reinitialize()
self.whitestr.set_string(string.get_string())
self.set_visible(True)
self.signal_text_entered.add(lambda e: string.set_string(e.text))
| gpl-3.0 |
ToonTownInfiniteRepo/ToontownInfinite | toontown/toon/InventoryNew.py | 1 | 56688 | from direct.gui.DirectGui import *
from pandac.PandaModules import *
from toontown.toonbase.ToontownBattleGlobals import *
import InventoryBase
from toontown.toonbase import TTLocalizer
from toontown.quest import BlinkingArrows
from direct.interval.IntervalGlobal import *
from direct.directnotify import DirectNotifyGlobal
from toontown.toonbase import ToontownGlobals
from otp.otpbase import OTPGlobals
class InventoryNew(InventoryBase.InventoryBase, DirectFrame):
notify = DirectNotifyGlobal.directNotify.newCategory('InventoryNew')
PressableTextColor = Vec4(1, 1, 1, 1)
PressableGeomColor = Vec4(1, 1, 1, 1)
PressableImageColor = Vec4(0, 0.6, 1, 1)
PropBonusPressableImageColor = Vec4(1.0, 0.6, 0.0, 1)
NoncreditPressableImageColor = Vec4(0.3, 0.6, 0.6, 1)
PropBonusNoncreditPressableImageColor = Vec4(0.6, 0.6, 0.3, 1)
DeletePressableImageColor = Vec4(0.7, 0.1, 0.1, 1)
UnpressableTextColor = Vec4(1, 1, 1, 0.3)
UnpressableGeomColor = Vec4(1, 1, 1, 0.3)
UnpressableImageColor = Vec4(0.3, 0.3, 0.3, 0.8)
BookUnpressableTextColor = Vec4(1, 1, 1, 1)
BookUnpressableGeomColor = Vec4(1, 1, 1, 1)
BookUnpressableImage0Color = Vec4(0, 0.6, 1, 1)
BookUnpressableImage2Color = Vec4(0.1, 0.7, 1, 1)
ShadowColor = Vec4(0, 0, 0, 0)
ShadowBuffedColor = Vec4(1, 1, 1, 1)
UnpressableShadowBuffedColor = Vec4(1, 1, 1, 0.3)
TrackYOffset = 0.0
TrackYSpacing = -0.12
ButtonXOffset = -0.31
ButtonXSpacing = 0.18
def __init__(self, toon, invStr = None, ShowSuperGags = 1):
InventoryBase.InventoryBase.__init__(self, toon, invStr)
DirectFrame.__init__(self, relief=None)
self.initialiseoptions(InventoryNew)
self.battleCreditLevel = None
self.detailCredit = None
self.__battleCreditMultiplier = 1
self.__invasionCreditMultiplier = 1
self.__respectInvasions = 1
self._interactivePropTrackBonus = -1
self.tutorialFlag = 0
self.gagTutMode = 0
self.showSuperGags = ShowSuperGags
self.clickSuperGags = 1
self.propAndOrganicBonusStack = base.config.GetBool('prop-and-organic-bonus-stack', 0)
self.propBonusIval = Parallel()
self.activateMode = 'book'
self.load()
self.hide()
return
def setBattleCreditMultiplier(self, mult):
self.__battleCreditMultiplier = mult
def getBattleCreditMultiplier(self):
return self.__battleCreditMultiplier
def setInteractivePropTrackBonus(self, trackBonus):
self._interactivePropTrackBonus = trackBonus
def getInteractivePropTrackBonus(self):
return self._interactivePropTrackBonus
def setInvasionCreditMultiplier(self, mult):
self.__invasionCreditMultiplier = mult
def getInvasionCreditMultiplier(self):
return self.__invasionCreditMultiplier
def setRespectInvasions(self, flag):
self.__respectInvasions = flag
def getRespectInvasions(self):
return self.__respectInvasions
def show(self):
if self.tutorialFlag:
self.tutArrows.arrowsOn(-0.43, -0.12, 180, -0.43, -0.24, 180, onTime=1.0, offTime=0.2)
if self.numItem(THROW_TRACK, 0) == 0:
self.tutArrows.arrow1.reparentTo(hidden)
else:
self.tutArrows.arrow1.reparentTo(self.battleFrame, 1)
if self.numItem(SQUIRT_TRACK, 0) == 0:
self.tutArrows.arrow2.reparentTo(hidden)
else:
self.tutArrows.arrow2.reparentTo(self.battleFrame, 1)
self.tutText.show()
self.tutText.reparentTo(self.battleFrame, 1)
DirectFrame.show(self)
def uberGagToggle(self, showSuperGags = 1):
self.showSuperGags = showSuperGags
for itemList in self.invModels:
for itemIndex in xrange(MAX_LEVEL_INDEX + 1):
if itemIndex <= LAST_REGULAR_GAG_LEVEL + 1 or self.showSuperGags:
itemList[itemIndex].show()
else:
itemList[itemIndex].hide()
for buttonList in self.buttons:
for buttonIndex in xrange(MAX_LEVEL_INDEX + 1):
if buttonIndex <= LAST_REGULAR_GAG_LEVEL or self.showSuperGags:
buttonList[buttonIndex].show()
else:
buttonList[buttonIndex].hide()
def enableUberGags(self, enableSG = -1):
if enableSG != -1:
self.clickSuperGags = enableSG
for buttonList in self.buttons:
for buttonIndex in xrange(LAST_REGULAR_GAG_LEVEL + 1, MAX_LEVEL_INDEX + 1):
if self.clickSuperGags:
pass
else:
self.makeUnpressable(buttonList[buttonIndex], self.buttons.index(buttonList), buttonIndex)
def hide(self):
if self.tutorialFlag:
self.tutArrows.arrowsOff()
self.tutText.hide()
DirectFrame.hide(self)
def updateTotalPropsText(self):
textTotal = TTLocalizer.InventoryTotalGags % (self.totalProps, self.toon.getMaxCarry())
if localAvatar.getPinkSlips() > 1:
textTotal = textTotal + '\n\n' + TTLocalizer.InventroyPinkSlips % localAvatar.getPinkSlips()
elif localAvatar.getPinkSlips() == 1:
textTotal = textTotal + '\n\n' + TTLocalizer.InventroyPinkSlip
self.totalLabel['text'] = textTotal
def unload(self):
self.notify.debug('Unloading Inventory for %d' % self.toon.doId)
self.stopAndClearPropBonusIval()
self.propBonusIval.finish()
self.propBonusIval = None
del self.invModels
self.buttonModels.removeNode()
del self.buttonModels
del self.upButton
del self.downButton
del self.rolloverButton
del self.flatButton
del self.invFrame
del self.battleFrame
del self.purchaseFrame
del self.storePurchaseFrame
self.deleteEnterButton.destroy()
del self.deleteEnterButton
self.deleteExitButton.destroy()
del self.deleteExitButton
del self.detailFrame
del self.detailNameLabel
del self.detailAmountLabel
del self.detailDataLabel
del self.totalLabel
for row in self.trackRows:
row.destroy()
del self.trackRows
del self.trackNameLabels
del self.trackBars
for buttonList in self.buttons:
for buttonIndex in xrange(MAX_LEVEL_INDEX + 1):
buttonList[buttonIndex].destroy()
del self.buttons
InventoryBase.InventoryBase.unload(self)
DirectFrame.destroy(self)
return
def load(self):
self.notify.debug('Loading Inventory for %d' % self.toon.doId)
invModel = loader.loadModel('phase_3.5/models/gui/inventory_icons')
self.invModels = []
for track in xrange(len(AvPropsNew)):
itemList = []
for item in xrange(len(AvPropsNew[track])):
itemList.append(invModel.find('**/' + AvPropsNew[track][item]))
self.invModels.append(itemList)
invModel.removeNode()
del invModel
self.buttonModels = loader.loadModel('phase_3.5/models/gui/inventory_gui')
self.rowModel = self.buttonModels.find('**/InventoryRow')
self.upButton = self.buttonModels.find('**/InventoryButtonUp')
self.downButton = self.buttonModels.find('**/InventoryButtonDown')
self.rolloverButton = self.buttonModels.find('**/InventoryButtonRollover')
self.flatButton = self.buttonModels.find('**/InventoryButtonFlat')
self.invFrame = DirectFrame(relief=None, parent=self)
self.battleFrame = None
self.purchaseFrame = None
self.storePurchaseFrame = None
trashcanGui = loader.loadModel('phase_3/models/gui/trashcan_gui')
self.deleteEnterButton = DirectButton(parent=self.invFrame, image=(trashcanGui.find('**/TrashCan_CLSD'), trashcanGui.find('**/TrashCan_OPEN'), trashcanGui.find('**/TrashCan_RLVR')), text=('', TTLocalizer.InventoryDelete, TTLocalizer.InventoryDelete), text_fg=(1, 1, 1, 1), text_shadow=(0, 0, 0, 1), text_scale=0.1, text_pos=(0, -0.1), text_font=getInterfaceFont(), textMayChange=0, relief=None, pos=(-1, 0, -0.35), scale=1.0)
self.deleteExitButton = DirectButton(parent=self.invFrame, image=(trashcanGui.find('**/TrashCan_OPEN'), trashcanGui.find('**/TrashCan_CLSD'), trashcanGui.find('**/TrashCan_RLVR')), text=('', TTLocalizer.InventoryDone, TTLocalizer.InventoryDone), text_fg=(1, 1, 1, 1), text_shadow=(0, 0, 0, 1), text_scale=0.1, text_pos=(0, -0.1), text_font=getInterfaceFont(), textMayChange=0, relief=None, pos=(-1, 0, -0.35), scale=1.0)
trashcanGui.removeNode()
self.deleteHelpText = DirectLabel(parent=self.invFrame, relief=None, pos=(0.272, 0.3, -0.907), text=TTLocalizer.InventoryDeleteHelp, text_fg=(0, 0, 0, 1), text_scale=0.08, textMayChange=0)
self.deleteHelpText.hide()
self.detailFrame = DirectFrame(parent=self.invFrame, relief=None, pos=(1.05, 0, -0.08))
self.detailNameLabel = DirectLabel(parent=self.detailFrame, text='', text_scale=TTLocalizer.INdetailNameLabel, text_fg=(0.05, 0.14, 0.4, 1), scale=0.045, pos=(0, 0, 0), text_font=getInterfaceFont(), relief=None, image=self.invModels[0][0])
self.detailAmountLabel = DirectLabel(parent=self.detailFrame, text='', text_fg=(0.05, 0.14, 0.4, 1), scale=0.04, pos=(0.16, 0, -0.175), text_font=getInterfaceFont(), text_align=TextNode.ARight, relief=None)
self.detailDataLabel = DirectLabel(parent=self.detailFrame, text='', text_fg=(0.05, 0.14, 0.4, 1), scale=0.04, pos=(-0.22, 0, -0.24), text_font=getInterfaceFont(), text_align=TextNode.ALeft, relief=None)
self.detailCreditLabel = DirectLabel(parent=self.detailFrame, text=TTLocalizer.InventorySkillCreditNone, text_fg=(0.05, 0.14, 0.4, 1), scale=0.04, pos=(-0.22, 0, -0.365), text_font=getInterfaceFont(), text_align=TextNode.ALeft, relief=None)
self.detailCreditLabel.hide()
self.totalLabel = DirectLabel(text='', parent=self.detailFrame, pos=(0, 0, -0.095), scale=0.05, text_fg=(0.05, 0.14, 0.4, 1), text_font=getInterfaceFont(), relief=None)
self.updateTotalPropsText()
self.trackRows = []
self.trackNameLabels = []
self.trackBars = []
self.buttons = []
for track in xrange(0, len(Tracks)):
trackFrame = DirectFrame(parent=self.invFrame, image=self.rowModel, scale=(1.0, 1.0, 1.1), pos=(0, 0.3, self.TrackYOffset + track * self.TrackYSpacing), image_color=(TrackColors[track][0],
TrackColors[track][1],
TrackColors[track][2],
1), state=DGG.NORMAL, relief=None)
trackFrame.bind(DGG.WITHIN, self.enterTrackFrame, extraArgs=[track])
trackFrame.bind(DGG.WITHOUT, self.exitTrackFrame, extraArgs=[track])
self.trackRows.append(trackFrame)
adjustLeft = -0.065
self.trackNameLabels.append(DirectLabel(text=TextEncoder.upper(Tracks[track]), parent=self.trackRows[track], pos=(-0.72 + adjustLeft, -0.1, 0.01), scale=TTLocalizer.INtrackNameLabels, relief=None, text_fg=(0.2, 0.2, 0.2, 1), text_font=getInterfaceFont(), text_align=TextNode.ALeft, textMayChange=0))
self.trackBars.append(DirectWaitBar(parent=self.trackRows[track], pos=(-0.58 + adjustLeft, -0.1, -0.025), relief=DGG.SUNKEN, frameSize=(-0.6,
0.6,
-0.1,
0.1), borderWidth=(0.02, 0.02), scale=0.25, frameColor=(TrackColors[track][0] * 0.6,
TrackColors[track][1] * 0.6,
TrackColors[track][2] * 0.6,
1), barColor=(TrackColors[track][0] * 0.9,
TrackColors[track][1] * 0.9,
TrackColors[track][2] * 0.9,
1), text='0 / 0', text_scale=0.16, text_fg=(0, 0, 0, 0.8), text_align=TextNode.ACenter, text_pos=(0, -0.05)))
self.buttons.append([])
for item in xrange(0, len(Levels[track])):
button = DirectButton(parent=self.trackRows[track], image=(self.upButton,
self.downButton,
self.rolloverButton,
self.flatButton), geom=self.invModels[track][item], text='50', text_scale=0.04, text_align=TextNode.ARight, geom_scale=0.7, geom_pos=(-0.01, -0.1, 0), text_fg=Vec4(1, 1, 1, 1), text_pos=(0.07, -0.04), textMayChange=1, relief=None, image_color=(0, 0.6, 1, 1), pos=(self.ButtonXOffset + item * self.ButtonXSpacing + adjustLeft, -0.1, 0), command=self.__handleSelection, extraArgs=[track, item])
button.bind(DGG.ENTER, self.showDetail, extraArgs=[track, item])
button.bind(DGG.EXIT, self.hideDetail)
self.buttons[track].append(button)
return
def __handleSelection(self, track, level):
if self.activateMode == 'purchaseDelete' or self.activateMode == 'bookDelete' or self.activateMode == 'storePurchaseDelete':
if self.numItem(track, level):
self.useItem(track, level)
self.updateGUI(track, level)
messenger.send('inventory-deletion', [track, level])
self.showDetail(track, level)
elif self.activateMode == 'purchase' or self.activateMode == 'storePurchase':
messenger.send('inventory-selection', [track, level])
self.showDetail(track, level)
elif self.gagTutMode:
pass
else:
messenger.send('inventory-selection', [track, level])
def __handleRun(self):
messenger.send('inventory-run')
def __handleFire(self):
messenger.send('inventory-fire')
def __handleSOS(self):
messenger.send('inventory-sos')
def __handlePass(self):
messenger.send('inventory-pass')
def __handleBackToPlayground(self):
messenger.send('inventory-back-to-playground')
def showDetail(self, track, level, event = None):
self.totalLabel.hide()
self.detailNameLabel.show()
self.detailNameLabel.configure(text=AvPropStrings[track][level], image_image=self.invModels[track][level])
self.detailNameLabel.configure(image_scale=20, image_pos=(-0.2, 0, -2.2))
self.detailAmountLabel.show()
self.detailAmountLabel.configure(text=TTLocalizer.InventoryDetailAmount % {'numItems': self.numItem(track, level),
'maxItems': self.getMax(track, level)})
self.detailDataLabel.show()
damage = getAvPropDamage(track, level, self.toon.experience.getExp(track))
organicBonus = self.toon.checkGagBonus(track, level)
propBonus = self.checkPropBonus(track)
damageBonusStr = ''
damageBonus = 0
if self.propAndOrganicBonusStack:
if propBonus:
damageBonus += getDamageBonus(damage)
if organicBonus:
damageBonus += getDamageBonus(damage)
if damageBonus:
damageBonusStr = TTLocalizer.InventoryDamageBonus % damageBonus
else:
if propBonus or organicBonus:
damageBonus += getDamageBonus(damage)
if damageBonus:
damageBonusStr = TTLocalizer.InventoryDamageBonus % damageBonus
accString = AvTrackAccStrings[track]
if (organicBonus or propBonus) and track == LURE_TRACK:
accString = TTLocalizer.BattleGlobalLureAccMedium
self.detailDataLabel.configure(text=TTLocalizer.InventoryDetailData % {'accuracy': accString,
'damageString': self.getToonupDmgStr(track, level),
'damage': damage,
'bonus': damageBonusStr,
'singleOrGroup': self.getSingleGroupStr(track, level)})
if self.itemIsCredit(track, level):
mult = self.__battleCreditMultiplier
if self.__respectInvasions:
mult *= self.__invasionCreditMultiplier
self.setDetailCredit(track, (level + 1) * mult)
else:
self.setDetailCredit(track, None)
self.detailCreditLabel.show()
return
def setDetailCredit(self, track, credit):
if credit != None:
if self.toon.earnedExperience:
maxCredit = ExperienceCap - self.toon.earnedExperience[track]
credit = min(credit, maxCredit)
credit = int(credit * 10 + 0.5)
if credit % 10 == 0:
credit /= 10
else:
credit /= 10.0
if self.detailCredit == credit:
return
if credit != None:
self.detailCreditLabel['text'] = TTLocalizer.InventorySkillCredit % credit
if self.detailCredit == None:
self.detailCreditLabel['text_fg'] = (0.05, 0.14, 0.4, 1)
else:
self.detailCreditLabel['text'] = TTLocalizer.InventorySkillCreditNone
self.detailCreditLabel['text_fg'] = (0.5, 0.0, 0.0, 1.0)
self.detailCredit = credit
return
def hideDetail(self, event = None):
self.totalLabel.show()
self.detailNameLabel.hide()
self.detailAmountLabel.hide()
self.detailDataLabel.hide()
self.detailCreditLabel.hide()
def noDetail(self):
self.totalLabel.hide()
self.detailNameLabel.hide()
self.detailAmountLabel.hide()
self.detailDataLabel.hide()
self.detailCreditLabel.hide()
def setActivateMode(self, mode, heal = 1, trap = 1, lure = 1, bldg = 0, creditLevel = None, tutorialFlag = 0, gagTutMode = 0):
self.notify.debug('setActivateMode() mode:%s heal:%s trap:%s lure:%s bldg:%s' % (mode,
heal,
trap,
lure,
bldg))
self.previousActivateMode = self.activateMode
self.activateMode = mode
self.deactivateButtons()
self.heal = heal
self.trap = trap
self.lure = lure
self.bldg = bldg
self.battleCreditLevel = creditLevel
self.tutorialFlag = tutorialFlag
self.gagTutMode = gagTutMode
self.__activateButtons()
self.enableUberGags()
return None
def setActivateModeBroke(self):
if self.activateMode == 'storePurchase':
self.setActivateMode('storePurchaseBroke')
elif self.activateMode == 'purchase':
self.setActivateMode('purchaseBroke', gagTutMode=self.gagTutMode)
else:
self.notify.error('Unexpected mode in setActivateModeBroke(): %s' % self.activateMode)
self.enableUberGags()
def deactivateButtons(self):
if self.previousActivateMode == 'book':
self.bookDeactivateButtons()
elif self.previousActivateMode == 'bookDelete':
self.bookDeleteDeactivateButtons()
elif self.previousActivateMode == 'purchaseDelete':
self.purchaseDeleteDeactivateButtons()
elif self.previousActivateMode == 'purchase':
self.purchaseDeactivateButtons()
elif self.previousActivateMode == 'purchaseBroke':
self.purchaseBrokeDeactivateButtons()
elif self.previousActivateMode == 'gagTutDisabled':
self.gagTutDisabledDeactivateButtons()
elif self.previousActivateMode == 'battle':
self.battleDeactivateButtons()
elif self.previousActivateMode == 'storePurchaseDelete':
self.storePurchaseDeleteDeactivateButtons()
elif self.previousActivateMode == 'storePurchase':
self.storePurchaseDeactivateButtons()
elif self.previousActivateMode == 'storePurchaseBroke':
self.storePurchaseBrokeDeactivateButtons()
elif self.previousActivateMode == 'plantTree':
self.plantTreeDeactivateButtons()
else:
self.notify.error('No such mode as %s' % self.previousActivateMode)
return None
def __activateButtons(self):
if hasattr(self, 'activateMode'):
if self.activateMode == 'book':
self.bookActivateButtons()
elif self.activateMode == 'bookDelete':
self.bookDeleteActivateButtons()
elif self.activateMode == 'purchaseDelete':
self.purchaseDeleteActivateButtons()
elif self.activateMode == 'purchase':
self.purchaseActivateButtons()
elif self.activateMode == 'purchaseBroke':
self.purchaseBrokeActivateButtons()
elif self.activateMode == 'gagTutDisabled':
self.gagTutDisabledActivateButtons()
elif self.activateMode == 'battle':
self.battleActivateButtons()
elif self.activateMode == 'storePurchaseDelete':
self.storePurchaseDeleteActivateButtons()
elif self.activateMode == 'storePurchase':
self.storePurchaseActivateButtons()
elif self.activateMode == 'storePurchaseBroke':
self.storePurchaseBrokeActivateButtons()
elif self.activateMode == 'plantTree':
self.plantTreeActivateButtons()
else:
self.notify.error('No such mode as %s' % self.activateMode)
return None
def bookActivateButtons(self):
self.setPos(0, 0, 0.52)
self.setScale(1.0)
self.detailFrame.setPos(0.1, 0, -0.855)
self.detailFrame.setScale(0.75)
self.deleteEnterButton.hide()
self.deleteEnterButton.setPos(1.029, 0, -0.639)
self.deleteEnterButton.setScale(0.75)
self.deleteExitButton.hide()
self.deleteExitButton.setPos(1.029, 0, -0.639)
self.deleteExitButton.setScale(0.75)
self.invFrame.reparentTo(self)
self.invFrame.setPos(0, 0, 0)
self.invFrame.setScale(1)
self.deleteEnterButton['command'] = self.setActivateMode
self.deleteEnterButton['extraArgs'] = ['bookDelete']
for track in xrange(len(Tracks)):
if self.toon.hasTrackAccess(track):
self.showTrack(track)
for level in xrange(len(Levels[track])):
button = self.buttons[track][level]
if self.itemIsUsable(track, level):
button.show()
self.makeBookUnpressable(button, track, level)
else:
button.hide()
else:
self.hideTrack(track)
return None
def bookDeactivateButtons(self):
self.deleteEnterButton['command'] = None
return
def bookDeleteActivateButtons(self):
messenger.send('enterBookDelete')
self.setPos(-0.2, 0, 0.4)
self.setScale(0.8)
self.deleteEnterButton.hide()
self.deleteEnterButton.setPos(1.029, 0, -0.639)
self.deleteEnterButton.setScale(0.75)
self.deleteExitButton.show()
self.deleteExitButton.setPos(1.029, 0, -0.639)
self.deleteExitButton.setScale(0.75)
self.deleteHelpText.show()
self.invFrame.reparentTo(self)
self.invFrame.setPos(0, 0, 0)
self.invFrame.setScale(1)
self.deleteExitButton['command'] = self.setActivateMode
self.deleteExitButton['extraArgs'] = [self.previousActivateMode]
for track in xrange(len(Tracks)):
if self.toon.hasTrackAccess(track):
self.showTrack(track)
for level in xrange(len(Levels[track])):
button = self.buttons[track][level]
if self.itemIsUsable(track, level):
button.show()
if self.numItem(track, level) <= 0:
self.makeUnpressable(button, track, level)
else:
self.makeDeletePressable(button, track, level)
else:
button.hide()
else:
self.hideTrack(track)
def bookDeleteDeactivateButtons(self):
messenger.send('exitBookDelete')
self.deleteHelpText.hide()
self.deleteDeactivateButtons()
def purchaseDeleteActivateButtons(self):
self.reparentTo(aspect2d)
self.setPos(0.2, 0, -0.04)
self.setScale(1)
if self.purchaseFrame == None:
self.loadPurchaseFrame()
self.purchaseFrame.show()
self.invFrame.reparentTo(self.purchaseFrame)
self.invFrame.setPos(-0.235, 0, 0.52)
self.invFrame.setScale(0.81)
self.detailFrame.setPos(1.17, 0, -0.02)
self.detailFrame.setScale(1.25)
self.deleteEnterButton.hide()
self.deleteEnterButton.setPos(-0.441, 0, -0.917)
self.deleteEnterButton.setScale(0.75)
self.deleteExitButton.show()
self.deleteExitButton.setPos(-0.441, 0, -0.917)
self.deleteExitButton.setScale(0.75)
self.deleteExitButton['command'] = self.setActivateMode
self.deleteExitButton['extraArgs'] = [self.previousActivateMode]
for track in xrange(len(Tracks)):
if self.toon.hasTrackAccess(track):
self.showTrack(track)
for level in xrange(len(Levels[track])):
button = self.buttons[track][level]
if self.itemIsUsable(track, level):
button.show()
if self.numItem(track, level) <= 0 or level >= UBER_GAG_LEVEL_INDEX:
self.makeUnpressable(button, track, level)
else:
self.makeDeletePressable(button, track, level)
else:
button.hide()
else:
self.hideTrack(track)
return
def purchaseDeleteDeactivateButtons(self):
self.invFrame.reparentTo(self)
self.purchaseFrame.hide()
self.deleteDeactivateButtons()
for track in xrange(len(Tracks)):
if self.toon.hasTrackAccess(track):
self.showTrack(track)
for level in xrange(len(Levels[track])):
button = self.buttons[track][level]
if self.itemIsUsable(track, level):
button.show()
if self.numItem(track, level) <= 0 or level >= UBER_GAG_LEVEL_INDEX:
self.makeUnpressable(button, track, level)
else:
self.makeDeletePressable(button, track, level)
else:
button.hide()
else:
self.hideTrack(track)
def storePurchaseDeleteActivateButtons(self):
self.reparentTo(aspect2d)
self.setPos(0.2, 0, -0.04)
self.setScale(1)
if self.storePurchaseFrame == None:
self.loadStorePurchaseFrame()
self.storePurchaseFrame.show()
self.invFrame.reparentTo(self.storePurchaseFrame)
self.invFrame.setPos(-0.23, 0, 0.505)
self.invFrame.setScale(0.81)
self.detailFrame.setPos(1.175, 0, 0)
self.detailFrame.setScale(1.25)
self.deleteEnterButton.hide()
self.deleteEnterButton.setPos(-0.55, 0, -0.91)
self.deleteEnterButton.setScale(0.75)
self.deleteExitButton.show()
self.deleteExitButton.setPos(-0.55, 0, -0.91)
self.deleteExitButton.setScale(0.75)
self.deleteExitButton['command'] = self.setActivateMode
self.deleteExitButton['extraArgs'] = [self.previousActivateMode]
for track in xrange(len(Tracks)):
if self.toon.hasTrackAccess(track):
self.showTrack(track)
for level in xrange(len(Levels[track])):
button = self.buttons[track][level]
if self.itemIsUsable(track, level):
button.show()
if self.numItem(track, level) <= 0 or level >= UBER_GAG_LEVEL_INDEX:
self.makeUnpressable(button, track, level)
else:
self.makeDeletePressable(button, track, level)
else:
button.hide()
else:
self.hideTrack(track)
return
def storePurchaseDeleteDeactivateButtons(self):
self.invFrame.reparentTo(self)
self.storePurchaseFrame.hide()
self.deleteDeactivateButtons()
def storePurchaseBrokeActivateButtons(self):
self.reparentTo(aspect2d)
self.setPos(0.2, 0, -0.04)
self.setScale(1)
if self.storePurchaseFrame == None:
self.loadStorePurchaseFrame()
self.storePurchaseFrame.show()
self.invFrame.reparentTo(self.storePurchaseFrame)
self.invFrame.setPos(-0.23, 0, 0.505)
self.invFrame.setScale(0.81)
self.detailFrame.setPos(1.175, 0, 0)
self.detailFrame.setScale(1.25)
self.deleteEnterButton.show()
self.deleteEnterButton.setPos(-0.55, 0, -0.91)
self.deleteEnterButton.setScale(0.75)
self.deleteExitButton.hide()
self.deleteExitButton.setPos(-0.551, 0, -0.91)
self.deleteExitButton.setScale(0.75)
for track in xrange(len(Tracks)):
if self.toon.hasTrackAccess(track):
self.showTrack(track)
for level in xrange(len(Levels[track])):
button = self.buttons[track][level]
if self.itemIsUsable(track, level):
button.show()
self.makeUnpressable(button, track, level)
else:
button.hide()
else:
self.hideTrack(track)
return
def storePurchaseBrokeDeactivateButtons(self):
self.invFrame.reparentTo(self)
self.storePurchaseFrame.hide()
def deleteActivateButtons(self):
self.reparentTo(aspect2d)
self.setPos(0, 0, 0)
self.setScale(1)
self.deleteEnterButton.hide()
self.deleteExitButton.show()
self.deleteExitButton['command'] = self.setActivateMode
self.deleteExitButton['extraArgs'] = [self.previousActivateMode]
for track in xrange(len(Tracks)):
if self.toon.hasTrackAccess(track):
self.showTrack(track)
for level in xrange(len(Levels[track])):
button = self.buttons[track][level]
if self.itemIsUsable(track, level):
button.show()
if self.numItem(track, level) <= 0:
self.makeUnpressable(button, track, level)
else:
self.makePressable(button, track, level)
else:
button.hide()
else:
self.hideTrack(track)
return None
def deleteDeactivateButtons(self):
self.deleteExitButton['command'] = None
return
def purchaseActivateButtons(self):
self.reparentTo(aspect2d)
self.setPos(0.2, 0, -0.04)
self.setScale(1)
if self.purchaseFrame == None:
self.loadPurchaseFrame()
self.purchaseFrame.show()
self.invFrame.reparentTo(self.purchaseFrame)
self.invFrame.setPos(-0.235, 0, 0.52)
self.invFrame.setScale(0.81)
self.detailFrame.setPos(1.17, 0, -0.02)
self.detailFrame.setScale(1.25)
totalProps = self.totalProps
maxProps = self.toon.getMaxCarry()
self.deleteEnterButton.show()
self.deleteEnterButton.setPos(-0.441, 0, -0.917)
self.deleteEnterButton.setScale(0.75)
self.deleteExitButton.hide()
self.deleteExitButton.setPos(-0.441, 0, -0.917)
self.deleteExitButton.setScale(0.75)
if self.gagTutMode:
self.deleteEnterButton.hide()
self.deleteEnterButton['command'] = self.setActivateMode
self.deleteEnterButton['extraArgs'] = ['purchaseDelete']
for track in xrange(len(Tracks)):
if self.toon.hasTrackAccess(track):
self.showTrack(track)
for level in xrange(len(Levels[track])):
button = self.buttons[track][level]
if self.itemIsUsable(track, level):
button.show()
unpaid = not base.cr.isPaid()
if self.numItem(track, level) >= self.getMax(track, level) or totalProps == maxProps or unpaid and gagIsPaidOnly(track, level) or level > LAST_REGULAR_GAG_LEVEL:
if gagIsPaidOnly(track, level):
self.makeDisabledPressable(button, track, level)
elif unpaid and gagIsVelvetRoped(track, level):
self.makeDisabledPressable(button, track, level)
else:
self.makeUnpressable(button, track, level)
elif unpaid and gagIsVelvetRoped(track, level):
self.makeDisabledPressable(button, track, level)
else:
self.makePressable(button, track, level)
else:
button.hide()
else:
self.hideTrack(track)
return
def purchaseDeactivateButtons(self):
self.invFrame.reparentTo(self)
self.purchaseFrame.hide()
def storePurchaseActivateButtons(self):
self.reparentTo(aspect2d)
self.setPos(0.2, 0, -0.04)
self.setScale(1)
if self.storePurchaseFrame == None:
self.loadStorePurchaseFrame()
self.storePurchaseFrame.show()
self.invFrame.reparentTo(self.storePurchaseFrame)
self.invFrame.setPos(-0.23, 0, 0.505)
self.invFrame.setScale(0.81)
self.detailFrame.setPos(1.175, 0, 0)
self.detailFrame.setScale(1.25)
totalProps = self.totalProps
maxProps = self.toon.getMaxCarry()
self.deleteEnterButton.show()
self.deleteEnterButton.setPos(-0.55, 0, -0.91)
self.deleteEnterButton.setScale(0.75)
self.deleteExitButton.hide()
self.deleteExitButton.setPos(-0.55, 0, -0.91)
self.deleteExitButton.setScale(0.75)
self.deleteEnterButton['command'] = self.setActivateMode
self.deleteEnterButton['extraArgs'] = ['storePurchaseDelete']
for track in xrange(len(Tracks)):
if self.toon.hasTrackAccess(track):
self.showTrack(track)
for level in xrange(len(Levels[track])):
button = self.buttons[track][level]
if self.itemIsUsable(track, level):
button.show()
unpaid = not base.cr.isPaid()
if self.numItem(track, level) >= self.getMax(track, level) or totalProps == maxProps or unpaid and gagIsPaidOnly(track, level) or level > LAST_REGULAR_GAG_LEVEL:
if gagIsPaidOnly(track, level):
self.makeDisabledPressable(button, track, level)
elif unpaid and gagIsVelvetRoped(track, level):
self.makeDisabledPressable(button, track, level)
else:
self.makeUnpressable(button, track, level)
elif unpaid and gagIsVelvetRoped(track, level):
self.makeDisabledPressable(button, track, level)
else:
self.makePressable(button, track, level)
else:
button.hide()
else:
self.hideTrack(track)
return
def storePurchaseDeactivateButtons(self):
self.invFrame.reparentTo(self)
self.storePurchaseFrame.hide()
def purchaseBrokeActivateButtons(self):
self.reparentTo(aspect2d)
self.setPos(0.2, 0, -0.04)
self.setScale(1)
if self.purchaseFrame == None:
self.loadPurchaseFrame()
self.purchaseFrame.show()
self.invFrame.reparentTo(self.purchaseFrame)
self.invFrame.setPos(-0.235, 0, 0.52)
self.invFrame.setScale(0.81)
self.detailFrame.setPos(1.17, 0, -0.02)
self.detailFrame.setScale(1.25)
self.deleteEnterButton.show()
self.deleteEnterButton.setPos(-0.441, 0, -0.917)
self.deleteEnterButton.setScale(0.75)
self.deleteExitButton.hide()
self.deleteExitButton.setPos(-0.441, 0, -0.917)
self.deleteExitButton.setScale(0.75)
if self.gagTutMode:
self.deleteEnterButton.hide()
for track in xrange(len(Tracks)):
if self.toon.hasTrackAccess(track):
self.showTrack(track)
for level in xrange(len(Levels[track])):
button = self.buttons[track][level]
if self.itemIsUsable(track, level):
button.show()
if not self.gagTutMode:
self.makeUnpressable(button, track, level)
else:
button.hide()
else:
self.hideTrack(track)
return
def purchaseBrokeDeactivateButtons(self):
self.invFrame.reparentTo(self)
self.purchaseFrame.hide()
def gagTutDisabledActivateButtons(self):
self.reparentTo(aspect2d)
self.setPos(0.2, 0, -0.04)
self.setScale(1)
if self.purchaseFrame == None:
self.loadPurchaseFrame()
self.purchaseFrame.show()
self.invFrame.reparentTo(self.purchaseFrame)
self.invFrame.setPos(-0.235, 0, 0.52)
self.invFrame.setScale(0.81)
self.detailFrame.setPos(1.17, 0, -0.02)
self.detailFrame.setScale(1.25)
self.deleteEnterButton.show()
self.deleteEnterButton.setPos(-0.441, 0, -0.917)
self.deleteEnterButton.setScale(0.75)
self.deleteExitButton.hide()
self.deleteExitButton.setPos(-0.441, 0, -0.917)
self.deleteExitButton.setScale(0.75)
self.deleteEnterButton.hide()
for track in xrange(len(Tracks)):
if self.toon.hasTrackAccess(track):
self.showTrack(track)
for level in xrange(len(Levels[track])):
button = self.buttons[track][level]
if self.itemIsUsable(track, level):
button.show()
self.makeUnpressable(button, track, level)
else:
button.hide()
else:
self.hideTrack(track)
return
def gagTutDisabledDeactivateButtons(self):
self.invFrame.reparentTo(self)
self.purchaseFrame.hide()
def battleActivateButtons(self):
self.stopAndClearPropBonusIval()
self.reparentTo(aspect2d)
self.setPos(0, 0, 0.1)
self.setScale(1)
if self.battleFrame == None:
self.loadBattleFrame()
self.battleFrame.show()
self.battleFrame.setScale(0.9)
self.invFrame.reparentTo(self.battleFrame)
self.invFrame.setPos(-0.26, 0, 0.35)
self.invFrame.setScale(1)
self.detailFrame.setPos(1.125, 0, -0.08)
self.detailFrame.setScale(1)
self.deleteEnterButton.hide()
self.deleteExitButton.hide()
if self.bldg == 1:
self.runButton.hide()
self.sosButton.show()
self.passButton.show()
elif self.tutorialFlag == 1:
self.runButton.hide()
self.sosButton.hide()
self.passButton.hide()
self.fireButton.hide()
else:
self.runButton.show()
self.sosButton.show()
self.passButton.show()
self.fireButton.show()
if localAvatar.getPinkSlips() > 0:
self.fireButton['state'] = DGG.NORMAL
self.fireButton['image_color'] = Vec4(0, 0.6, 1, 1)
else:
self.fireButton['state'] = DGG.DISABLED
self.fireButton['image_color'] = Vec4(0.4, 0.4, 0.4, 1)
for track in xrange(len(Tracks)):
if self.toon.hasTrackAccess(track):
self.showTrack(track)
for level in xrange(len(Levels[track])):
button = self.buttons[track][level]
if self.itemIsUsable(track, level):
unpaid = not base.cr.isPaid()
button.show()
if self.numItem(track, level) <= 0 or track == HEAL_TRACK and not self.heal or track == TRAP_TRACK and not self.trap or track == LURE_TRACK and not self.lure:
self.makeUnpressable(button, track, level)
elif unpaid and gagIsVelvetRoped(track, level):
self.makeDisabledPressable(button, track, level)
elif self.itemIsCredit(track, level):
self.makePressable(button, track, level)
else:
self.makeNoncreditPressable(button, track, level)
else:
button.hide()
else:
self.hideTrack(track)
self.propBonusIval.loop()
return
def battleDeactivateButtons(self):
self.invFrame.reparentTo(self)
self.battleFrame.hide()
self.stopAndClearPropBonusIval()
def plantTreeActivateButtons(self):
self.reparentTo(aspect2d)
self.setPos(0, 0, 0.1)
self.setScale(1)
if self.battleFrame == None:
self.loadBattleFrame()
self.battleFrame.show()
self.battleFrame.setScale(0.9)
self.invFrame.reparentTo(self.battleFrame)
self.invFrame.setPos(-0.25, 0, 0.35)
self.invFrame.setScale(1)
self.detailFrame.setPos(1.125, 0, -0.08)
self.detailFrame.setScale(1)
self.deleteEnterButton.hide()
self.deleteExitButton.hide()
self.runButton.hide()
self.sosButton.hide()
self.passButton['text'] = TTLocalizer.lCancel
self.passButton.show()
for track in xrange(len(Tracks)):
if self.toon.hasTrackAccess(track):
self.showTrack(track)
for level in xrange(len(Levels[track])):
button = self.buttons[track][level]
if self.itemIsUsable(track, level) and (level == 0 or self.toon.doIHaveRequiredTrees(track, level)):
button.show()
self.makeUnpressable(button, track, level)
if self.numItem(track, level) > 0:
if not self.toon.isTreePlanted(track, level):
self.makePressable(button, track, level)
else:
button.hide()
else:
self.hideTrack(track)
return
def plantTreeDeactivateButtons(self):
self.passButton['text'] = TTLocalizer.InventoryPass
self.invFrame.reparentTo(self)
self.battleFrame.hide()
def itemIsUsable(self, track, level):
if self.gagTutMode:
trackAccess = self.toon.getTrackAccess()
return trackAccess[track] >= level + 1
curSkill = self.toon.experience.getExp(track)
if curSkill < Levels[track][level]:
return 0
else:
return 1
def itemIsCredit(self, track, level):
if self.toon.earnedExperience:
if self.toon.earnedExperience[track] >= ExperienceCap:
return 0
if self.battleCreditLevel == None:
return 1
else:
return level < self.battleCreditLevel
return
def getMax(self, track, level):
if self.gagTutMode and (track not in (4, 5) or level > 0):
return 1
return InventoryBase.InventoryBase.getMax(self, track, level)
def getCurAndNextExpValues(self, track):
curSkill = self.toon.experience.getExp(track)
retVal = MaxSkill
for amount in Levels[track]:
if curSkill < amount:
retVal = amount
return (curSkill, retVal)
return (curSkill, retVal)
def makePressable(self, button, track, level):
organicBonus = self.toon.checkGagBonus(track, level)
propBonus = self.checkPropBonus(track)
bonus = organicBonus or propBonus
if bonus:
shadowColor = self.ShadowBuffedColor
else:
shadowColor = self.ShadowColor
button.configure(image0_image=self.upButton, image2_image=self.rolloverButton, text_shadow=shadowColor, geom_color=self.PressableGeomColor, commandButtons=(DGG.LMB,))
if self._interactivePropTrackBonus == track:
button.configure(image_color=self.PropBonusPressableImageColor)
self.addToPropBonusIval(button)
else:
button.configure(image_color=self.PressableImageColor)
def makeDisabledPressable(self, button, track, level):
organicBonus = self.toon.checkGagBonus(track, level)
propBonus = self.checkPropBonus(track)
bonus = organicBonus or propBonus
if bonus:
shadowColor = self.UnpressableShadowBuffedColor
else:
shadowColor = self.ShadowColor
button.configure(text_shadow=shadowColor, geom_color=self.UnpressableGeomColor, image_image=self.flatButton, commandButtons=(DGG.LMB,))
button.configure(image_color=self.UnpressableImageColor)
def makeNoncreditPressable(self, button, track, level):
organicBonus = self.toon.checkGagBonus(track, level)
propBonus = self.checkPropBonus(track)
bonus = organicBonus or propBonus
if bonus:
shadowColor = self.ShadowBuffedColor
else:
shadowColor = self.ShadowColor
button.configure(image0_image=self.upButton, image2_image=self.rolloverButton, text_shadow=shadowColor, geom_color=self.PressableGeomColor, commandButtons=(DGG.LMB,))
if self._interactivePropTrackBonus == track:
button.configure(image_color=self.PropBonusNoncreditPressableImageColor)
self.addToPropBonusIval(button)
else:
button.configure(image_color=self.NoncreditPressableImageColor)
def makeDeletePressable(self, button, track, level):
organicBonus = self.toon.checkGagBonus(track, level)
propBonus = self.checkPropBonus(track)
bonus = organicBonus or propBonus
if bonus:
shadowColor = self.ShadowBuffedColor
else:
shadowColor = self.ShadowColor
button.configure(image0_image=self.upButton, image2_image=self.rolloverButton, text_shadow=shadowColor, geom_color=self.PressableGeomColor, commandButtons=(DGG.LMB,))
button.configure(image_color=self.DeletePressableImageColor)
def makeUnpressable(self, button, track, level):
organicBonus = self.toon.checkGagBonus(track, level)
propBonus = self.checkPropBonus(track)
bonus = organicBonus or propBonus
if bonus:
shadowColor = self.UnpressableShadowBuffedColor
else:
shadowColor = self.ShadowColor
button.configure(text_shadow=shadowColor, geom_color=self.UnpressableGeomColor, image_image=self.flatButton, commandButtons=())
button.configure(image_color=self.UnpressableImageColor)
def makeBookUnpressable(self, button, track, level):
organicBonus = self.toon.checkGagBonus(track, level)
propBonus = self.checkPropBonus(track)
bonus = organicBonus or propBonus
if bonus:
shadowColor = self.ShadowBuffedColor
else:
shadowColor = self.ShadowColor
button.configure(text_shadow=shadowColor, geom_color=self.BookUnpressableGeomColor, image_image=self.flatButton, commandButtons=())
button.configure(image0_color=self.BookUnpressableImage0Color, image2_color=self.BookUnpressableImage2Color)
def hideTrack(self, trackIndex):
self.trackNameLabels[trackIndex].show()
self.trackBars[trackIndex].hide()
for levelIndex in xrange(0, len(Levels[trackIndex])):
self.buttons[trackIndex][levelIndex].hide()
def showTrack(self, trackIndex):
self.trackNameLabels[trackIndex].show()
self.trackBars[trackIndex].show()
for levelIndex in xrange(0, len(Levels[trackIndex])):
self.buttons[trackIndex][levelIndex].show()
curExp, nextExp = self.getCurAndNextExpValues(trackIndex)
if curExp >= UnpaidMaxSkills[trackIndex] and self.toon.getGameAccess() != OTPGlobals.AccessFull:
self.trackBars[trackIndex]['range'] = nextExp
self.trackBars[trackIndex]['text'] = TTLocalizer.InventoryGuestExp
elif curExp >= regMaxSkill:
self.trackBars[trackIndex]['range'] = UberSkill
self.trackBars[trackIndex]['text'] = TTLocalizer.InventoryUberTrackExp % {'nextExp': MaxSkill - curExp}
else:
self.trackBars[trackIndex]['range'] = nextExp
self.trackBars[trackIndex]['text'] = TTLocalizer.InventoryTrackExp % {'curExp': curExp,
'nextExp': nextExp}
def updateInvString(self, invString):
InventoryBase.InventoryBase.updateInvString(self, invString)
self.updateGUI()
return None
def updateButton(self, track, level):
button = self.buttons[track][level]
button['text'] = str(self.numItem(track, level))
organicBonus = self.toon.checkGagBonus(track, level)
propBonus = self.checkPropBonus(track)
bonus = organicBonus or propBonus
if bonus:
textScale = 0.05
else:
textScale = 0.04
button.configure(text_scale=textScale)
def buttonBoing(self, track, level):
button = self.buttons[track][level]
oldScale = button.getScale()
s = Sequence(button.scaleInterval(0.1, oldScale * 1.333, blendType='easeOut'), button.scaleInterval(0.1, oldScale, blendType='easeIn'), name='inventoryButtonBoing-' + str(self.this))
s.start()
def updateGUI(self, track = None, level = None):
self.updateTotalPropsText()
if track == None and level == None:
for track in xrange(len(Tracks)):
curExp, nextExp = self.getCurAndNextExpValues(track)
if curExp >= UnpaidMaxSkills[track] and self.toon.getGameAccess() != OTPGlobals.AccessFull:
self.trackBars[track]['range'] = nextExp
self.trackBars[track]['text'] = TTLocalizer.InventoryGuestExp
elif curExp >= regMaxSkill:
self.trackBars[track]['text'] = TTLocalizer.InventoryUberTrackExp % {'nextExp': MaxSkill - curExp}
self.trackBars[track]['value'] = curExp - regMaxSkill
else:
self.trackBars[track]['text'] = TTLocalizer.InventoryTrackExp % {'curExp': curExp,
'nextExp': nextExp}
self.trackBars[track]['value'] = curExp
for level in xrange(0, len(Levels[track])):
self.updateButton(track, level)
elif track != None and level != None:
self.updateButton(track, level)
else:
self.notify.error('Invalid use of updateGUI')
self.__activateButtons()
return
def getSingleGroupStr(self, track, level):
if track == HEAL_TRACK:
if isGroup(track, level):
return TTLocalizer.InventoryAffectsAllToons
else:
return TTLocalizer.InventoryAffectsOneToon
elif isGroup(track, level):
return TTLocalizer.InventoryAffectsAllCogs
else:
return TTLocalizer.InventoryAffectsOneCog
def getToonupDmgStr(self, track, level):
if track == HEAL_TRACK:
return TTLocalizer.InventoryHealString
else:
return TTLocalizer.InventoryDamageString
def deleteItem(self, track, level):
if self.numItem(track, level) > 0:
self.useItem(track, level)
self.updateGUI(track, level)
def loadBattleFrame(self):
battleModels = loader.loadModel('phase_3.5/models/gui/battle_gui')
self.battleFrame = DirectFrame(relief=None, image=battleModels.find('**/BATTLE_Menu'), image_scale=0.8, parent=self)
self.runButton = DirectButton(parent=self.battleFrame, relief=None, pos=(0.73, 0, -0.398), text=TTLocalizer.InventoryRun, text_scale=TTLocalizer.INrunButton, text_pos=(0, -0.02), text_fg=Vec4(1, 1, 1, 1), textMayChange=0, image=(self.upButton, self.downButton, self.rolloverButton), image_scale=1.05, image_color=(0, 0.6, 1, 1), command=self.__handleRun)
self.sosButton = DirectButton(parent=self.battleFrame, relief=None, pos=(0.96, 0, -0.398), text=TTLocalizer.InventorySOS, text_scale=0.05, text_pos=(0, -0.02), text_fg=Vec4(1, 1, 1, 1), textMayChange=0, image=(self.upButton, self.downButton, self.rolloverButton), image_scale=1.05, image_color=(0, 0.6, 1, 1), command=self.__handleSOS)
self.passButton = DirectButton(parent=self.battleFrame, relief=None, pos=(0.96, 0, -0.242), text=TTLocalizer.InventoryPass, text_scale=TTLocalizer.INpassButton, text_pos=(0, -0.02), text_fg=Vec4(1, 1, 1, 1), textMayChange=1, image=(self.upButton, self.downButton, self.rolloverButton), image_scale=1.05, image_color=(0, 0.6, 1, 1), command=self.__handlePass)
self.fireButton = DirectButton(parent=self.battleFrame, relief=None, pos=(0.73, 0, -0.242), text=TTLocalizer.InventoryFire, text_scale=TTLocalizer.INfireButton, text_pos=(0, -0.02), text_fg=Vec4(1, 1, 1, 1), textMayChange=0, image=(self.upButton, self.downButton, self.rolloverButton), image_scale=1.05, image_color=(0, 0.6, 1, 1), command=self.__handleFire)
self.tutText = DirectFrame(parent=self.battleFrame, relief=None, pos=(0.05, 0, -0.1133), scale=0.143, image=DGG.getDefaultDialogGeom(), image_scale=5.125, image_pos=(0, 0, -0.65), image_color=ToontownGlobals.GlobalDialogColor, text_scale=TTLocalizer.INclickToAttack, text=TTLocalizer.InventoryClickToAttack, textMayChange=0)
self.tutText.hide()
self.tutArrows = BlinkingArrows.BlinkingArrows(parent=self.battleFrame)
battleModels.removeNode()
self.battleFrame.hide()
return
def loadPurchaseFrame(self):
purchaseModels = loader.loadModel('phase_4/models/gui/purchase_gui')
self.purchaseFrame = DirectFrame(relief=None, image=purchaseModels.find('**/PurchasePanel'), image_pos=(-0.21, 0, 0.08), parent=self)
self.purchaseFrame.setX(-.06)
self.purchaseFrame.hide()
purchaseModels.removeNode()
return
def loadStorePurchaseFrame(self):
storePurchaseModels = loader.loadModel('phase_4/models/gui/gag_shop_purchase_gui')
self.storePurchaseFrame = DirectFrame(relief=None, image=storePurchaseModels.find('**/gagShopPanel'), image_pos=(-0.21, 0, 0.18), parent=self)
self.storePurchaseFrame.hide()
storePurchaseModels.removeNode()
return
def buttonLookup(self, track, level):
return self.invModels[track][level]
def enterTrackFrame(self, track, guiItem):
messenger.send('enterTrackFrame', [track])
def exitTrackFrame(self, track, guiItem):
messenger.send('exitTrackFrame', [track])
def checkPropBonus(self, track):
result = False
if track == self._interactivePropTrackBonus:
result = True
return result
def stopAndClearPropBonusIval(self):
if self.propBonusIval and self.propBonusIval.isPlaying():
self.propBonusIval.finish()
self.propBonusIval = Parallel(name='dummyPropBonusIval')
def addToPropBonusIval(self, button):
flashObject = button
try:
flashObject = button.component('image0')
except:
pass
goDark = LerpColorScaleInterval(flashObject, 0.5, Point4(0.1, 0.1, 0.1, 1.0), Point4(1, 1, 1, 1), blendType='easeIn')
goBright = LerpColorScaleInterval(flashObject, 0.5, Point4(1, 1, 1, 1), Point4(0.1, 0.1, 0.1, 1.0), blendType='easeOut')
newSeq = Sequence(goDark, goBright, Wait(0.2))
self.propBonusIval.append(newSeq)
| mit |
TheWardoctor/Wardoctors-repo | script.module.schism.common/lib/requests/packages/chardet/mbcssm.py | 1783 | 19590 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .constants import eStart, eError, eItsMe
# BIG5
BIG5_cls = (
1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as legal value
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,1, # 78 - 7f
4,4,4,4,4,4,4,4, # 80 - 87
4,4,4,4,4,4,4,4, # 88 - 8f
4,4,4,4,4,4,4,4, # 90 - 97
4,4,4,4,4,4,4,4, # 98 - 9f
4,3,3,3,3,3,3,3, # a0 - a7
3,3,3,3,3,3,3,3, # a8 - af
3,3,3,3,3,3,3,3, # b0 - b7
3,3,3,3,3,3,3,3, # b8 - bf
3,3,3,3,3,3,3,3, # c0 - c7
3,3,3,3,3,3,3,3, # c8 - cf
3,3,3,3,3,3,3,3, # d0 - d7
3,3,3,3,3,3,3,3, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,3,3,3, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,3,3,0 # f8 - ff
)
BIG5_st = (
eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,#08-0f
eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart#10-17
)
Big5CharLenTable = (0, 1, 1, 2, 0)
Big5SMModel = {'classTable': BIG5_cls,
'classFactor': 5,
'stateTable': BIG5_st,
'charLenTable': Big5CharLenTable,
'name': 'Big5'}
# CP949
CP949_cls = (
1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f
1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f
1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f
1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f
1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f
4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f
1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f
5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f
0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f
6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f
6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af
7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf
7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf
2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df
2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef
2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff
)
CP949_st = (
#cls= 0 1 2 3 4 5 6 7 8 9 # previous state =
eError,eStart, 3,eError,eStart,eStart, 4, 5,eError, 6, # eStart
eError,eError,eError,eError,eError,eError,eError,eError,eError,eError, # eError
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe, # eItsMe
eError,eError,eStart,eStart,eError,eError,eError,eStart,eStart,eStart, # 3
eError,eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 4
eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 5
eError,eStart,eStart,eStart,eStart,eError,eError,eStart,eStart,eStart, # 6
)
CP949CharLenTable = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2)
CP949SMModel = {'classTable': CP949_cls,
'classFactor': 10,
'stateTable': CP949_st,
'charLenTable': CP949CharLenTable,
'name': 'CP949'}
# EUC-JP
EUCJP_cls = (
4,4,4,4,4,4,4,4, # 00 - 07
4,4,4,4,4,4,5,5, # 08 - 0f
4,4,4,4,4,4,4,4, # 10 - 17
4,4,4,5,4,4,4,4, # 18 - 1f
4,4,4,4,4,4,4,4, # 20 - 27
4,4,4,4,4,4,4,4, # 28 - 2f
4,4,4,4,4,4,4,4, # 30 - 37
4,4,4,4,4,4,4,4, # 38 - 3f
4,4,4,4,4,4,4,4, # 40 - 47
4,4,4,4,4,4,4,4, # 48 - 4f
4,4,4,4,4,4,4,4, # 50 - 57
4,4,4,4,4,4,4,4, # 58 - 5f
4,4,4,4,4,4,4,4, # 60 - 67
4,4,4,4,4,4,4,4, # 68 - 6f
4,4,4,4,4,4,4,4, # 70 - 77
4,4,4,4,4,4,4,4, # 78 - 7f
5,5,5,5,5,5,5,5, # 80 - 87
5,5,5,5,5,5,1,3, # 88 - 8f
5,5,5,5,5,5,5,5, # 90 - 97
5,5,5,5,5,5,5,5, # 98 - 9f
5,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,0,5 # f8 - ff
)
EUCJP_st = (
3, 4, 3, 5,eStart,eError,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eStart,eError,eStart,eError,eError,eError,#10-17
eError,eError,eStart,eError,eError,eError, 3,eError,#18-1f
3,eError,eError,eError,eStart,eStart,eStart,eStart#20-27
)
EUCJPCharLenTable = (2, 2, 2, 3, 1, 0)
EUCJPSMModel = {'classTable': EUCJP_cls,
'classFactor': 6,
'stateTable': EUCJP_st,
'charLenTable': EUCJPCharLenTable,
'name': 'EUC-JP'}
# EUC-KR
EUCKR_cls = (
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
1,1,1,1,1,1,1,1, # 40 - 47
1,1,1,1,1,1,1,1, # 48 - 4f
1,1,1,1,1,1,1,1, # 50 - 57
1,1,1,1,1,1,1,1, # 58 - 5f
1,1,1,1,1,1,1,1, # 60 - 67
1,1,1,1,1,1,1,1, # 68 - 6f
1,1,1,1,1,1,1,1, # 70 - 77
1,1,1,1,1,1,1,1, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,3,3,3, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,3,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,0 # f8 - ff
)
EUCKR_st = (
eError,eStart, 3,eError,eError,eError,eError,eError,#00-07
eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,eStart #08-0f
)
EUCKRCharLenTable = (0, 1, 2, 0)
EUCKRSMModel = {'classTable': EUCKR_cls,
'classFactor': 4,
'stateTable': EUCKR_st,
'charLenTable': EUCKRCharLenTable,
'name': 'EUC-KR'}
# EUC-TW
EUCTW_cls = (
2,2,2,2,2,2,2,2, # 00 - 07
2,2,2,2,2,2,0,0, # 08 - 0f
2,2,2,2,2,2,2,2, # 10 - 17
2,2,2,0,2,2,2,2, # 18 - 1f
2,2,2,2,2,2,2,2, # 20 - 27
2,2,2,2,2,2,2,2, # 28 - 2f
2,2,2,2,2,2,2,2, # 30 - 37
2,2,2,2,2,2,2,2, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,2, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,6,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,3,4,4,4,4,4,4, # a0 - a7
5,5,1,1,1,1,1,1, # a8 - af
1,1,1,1,1,1,1,1, # b0 - b7
1,1,1,1,1,1,1,1, # b8 - bf
1,1,3,1,3,3,3,3, # c0 - c7
3,3,3,3,3,3,3,3, # c8 - cf
3,3,3,3,3,3,3,3, # d0 - d7
3,3,3,3,3,3,3,3, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,3,3,3, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,3,3,0 # f8 - ff
)
EUCTW_st = (
eError,eError,eStart, 3, 3, 3, 4,eError,#00-07
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eStart,eError,#10-17
eStart,eStart,eStart,eError,eError,eError,eError,eError,#18-1f
5,eError,eError,eError,eStart,eError,eStart,eStart,#20-27
eStart,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f
)
EUCTWCharLenTable = (0, 0, 1, 2, 2, 2, 3)
EUCTWSMModel = {'classTable': EUCTW_cls,
'classFactor': 7,
'stateTable': EUCTW_st,
'charLenTable': EUCTWCharLenTable,
'name': 'x-euc-tw'}
# GB2312
GB2312_cls = (
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
3,3,3,3,3,3,3,3, # 30 - 37
3,3,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,4, # 78 - 7f
5,6,6,6,6,6,6,6, # 80 - 87
6,6,6,6,6,6,6,6, # 88 - 8f
6,6,6,6,6,6,6,6, # 90 - 97
6,6,6,6,6,6,6,6, # 98 - 9f
6,6,6,6,6,6,6,6, # a0 - a7
6,6,6,6,6,6,6,6, # a8 - af
6,6,6,6,6,6,6,6, # b0 - b7
6,6,6,6,6,6,6,6, # b8 - bf
6,6,6,6,6,6,6,6, # c0 - c7
6,6,6,6,6,6,6,6, # c8 - cf
6,6,6,6,6,6,6,6, # d0 - d7
6,6,6,6,6,6,6,6, # d8 - df
6,6,6,6,6,6,6,6, # e0 - e7
6,6,6,6,6,6,6,6, # e8 - ef
6,6,6,6,6,6,6,6, # f0 - f7
6,6,6,6,6,6,6,0 # f8 - ff
)
GB2312_st = (
eError,eStart,eStart,eStart,eStart,eStart, 3,eError,#00-07
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,eStart,#10-17
4,eError,eStart,eStart,eError,eError,eError,eError,#18-1f
eError,eError, 5,eError,eError,eError,eItsMe,eError,#20-27
eError,eError,eStart,eStart,eStart,eStart,eStart,eStart #28-2f
)
# To be accurate, the length of class 6 can be either 2 or 4.
# But it is not necessary to discriminate between the two since
# it is used for frequency analysis only, and we are validing
# each code range there as well. So it is safe to set it to be
# 2 here.
GB2312CharLenTable = (0, 1, 1, 1, 1, 1, 2)
GB2312SMModel = {'classTable': GB2312_cls,
'classFactor': 7,
'stateTable': GB2312_st,
'charLenTable': GB2312CharLenTable,
'name': 'GB2312'}
# Shift_JIS
SJIS_cls = (
1,1,1,1,1,1,1,1, # 00 - 07
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
2,2,2,2,2,2,2,2, # 40 - 47
2,2,2,2,2,2,2,2, # 48 - 4f
2,2,2,2,2,2,2,2, # 50 - 57
2,2,2,2,2,2,2,2, # 58 - 5f
2,2,2,2,2,2,2,2, # 60 - 67
2,2,2,2,2,2,2,2, # 68 - 6f
2,2,2,2,2,2,2,2, # 70 - 77
2,2,2,2,2,2,2,1, # 78 - 7f
3,3,3,3,3,2,2,3, # 80 - 87
3,3,3,3,3,3,3,3, # 88 - 8f
3,3,3,3,3,3,3,3, # 90 - 97
3,3,3,3,3,3,3,3, # 98 - 9f
#0xa0 is illegal in sjis encoding, but some pages does
#contain such byte. We need to be more error forgiven.
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
3,3,3,3,3,3,3,3, # e0 - e7
3,3,3,3,3,4,4,4, # e8 - ef
3,3,3,3,3,3,3,3, # f0 - f7
3,3,3,3,3,0,0,0) # f8 - ff
SJIS_st = (
eError,eStart,eStart, 3,eError,eError,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe,eError,eError,eStart,eStart,eStart,eStart #10-17
)
SJISCharLenTable = (0, 1, 1, 2, 0, 0)
SJISSMModel = {'classTable': SJIS_cls,
'classFactor': 6,
'stateTable': SJIS_st,
'charLenTable': SJISCharLenTable,
'name': 'Shift_JIS'}
# UCS2-BE
UCS2BE_cls = (
0,0,0,0,0,0,0,0, # 00 - 07
0,0,1,0,0,2,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,3,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,3,3,3,3,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,0,0,0,0,0,0,0, # a0 - a7
0,0,0,0,0,0,0,0, # a8 - af
0,0,0,0,0,0,0,0, # b0 - b7
0,0,0,0,0,0,0,0, # b8 - bf
0,0,0,0,0,0,0,0, # c0 - c7
0,0,0,0,0,0,0,0, # c8 - cf
0,0,0,0,0,0,0,0, # d0 - d7
0,0,0,0,0,0,0,0, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,4,5 # f8 - ff
)
UCS2BE_st = (
5, 7, 7,eError, 4, 3,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe, 6, 6, 6, 6,eError,eError,#10-17
6, 6, 6, 6, 6,eItsMe, 6, 6,#18-1f
6, 6, 6, 6, 5, 7, 7,eError,#20-27
5, 8, 6, 6,eError, 6, 6, 6,#28-2f
6, 6, 6, 6,eError,eError,eStart,eStart #30-37
)
UCS2BECharLenTable = (2, 2, 2, 0, 2, 2)
UCS2BESMModel = {'classTable': UCS2BE_cls,
'classFactor': 6,
'stateTable': UCS2BE_st,
'charLenTable': UCS2BECharLenTable,
'name': 'UTF-16BE'}
# UCS2-LE
UCS2LE_cls = (
0,0,0,0,0,0,0,0, # 00 - 07
0,0,1,0,0,2,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,3,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,3,3,3,3,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
0,0,0,0,0,0,0,0, # 80 - 87
0,0,0,0,0,0,0,0, # 88 - 8f
0,0,0,0,0,0,0,0, # 90 - 97
0,0,0,0,0,0,0,0, # 98 - 9f
0,0,0,0,0,0,0,0, # a0 - a7
0,0,0,0,0,0,0,0, # a8 - af
0,0,0,0,0,0,0,0, # b0 - b7
0,0,0,0,0,0,0,0, # b8 - bf
0,0,0,0,0,0,0,0, # c0 - c7
0,0,0,0,0,0,0,0, # c8 - cf
0,0,0,0,0,0,0,0, # d0 - d7
0,0,0,0,0,0,0,0, # d8 - df
0,0,0,0,0,0,0,0, # e0 - e7
0,0,0,0,0,0,0,0, # e8 - ef
0,0,0,0,0,0,0,0, # f0 - f7
0,0,0,0,0,0,4,5 # f8 - ff
)
UCS2LE_st = (
6, 6, 7, 6, 4, 3,eError,eError,#00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,#08-0f
eItsMe,eItsMe, 5, 5, 5,eError,eItsMe,eError,#10-17
5, 5, 5,eError, 5,eError, 6, 6,#18-1f
7, 6, 8, 8, 5, 5, 5,eError,#20-27
5, 5, 5,eError,eError,eError, 5, 5,#28-2f
5, 5, 5,eError, 5,eError,eStart,eStart #30-37
)
UCS2LECharLenTable = (2, 2, 2, 2, 2, 2)
UCS2LESMModel = {'classTable': UCS2LE_cls,
'classFactor': 6,
'stateTable': UCS2LE_st,
'charLenTable': UCS2LECharLenTable,
'name': 'UTF-16LE'}
# UTF-8
UTF8_cls = (
1,1,1,1,1,1,1,1, # 00 - 07 #allow 0x00 as a legal value
1,1,1,1,1,1,0,0, # 08 - 0f
1,1,1,1,1,1,1,1, # 10 - 17
1,1,1,0,1,1,1,1, # 18 - 1f
1,1,1,1,1,1,1,1, # 20 - 27
1,1,1,1,1,1,1,1, # 28 - 2f
1,1,1,1,1,1,1,1, # 30 - 37
1,1,1,1,1,1,1,1, # 38 - 3f
1,1,1,1,1,1,1,1, # 40 - 47
1,1,1,1,1,1,1,1, # 48 - 4f
1,1,1,1,1,1,1,1, # 50 - 57
1,1,1,1,1,1,1,1, # 58 - 5f
1,1,1,1,1,1,1,1, # 60 - 67
1,1,1,1,1,1,1,1, # 68 - 6f
1,1,1,1,1,1,1,1, # 70 - 77
1,1,1,1,1,1,1,1, # 78 - 7f
2,2,2,2,3,3,3,3, # 80 - 87
4,4,4,4,4,4,4,4, # 88 - 8f
4,4,4,4,4,4,4,4, # 90 - 97
4,4,4,4,4,4,4,4, # 98 - 9f
5,5,5,5,5,5,5,5, # a0 - a7
5,5,5,5,5,5,5,5, # a8 - af
5,5,5,5,5,5,5,5, # b0 - b7
5,5,5,5,5,5,5,5, # b8 - bf
0,0,6,6,6,6,6,6, # c0 - c7
6,6,6,6,6,6,6,6, # c8 - cf
6,6,6,6,6,6,6,6, # d0 - d7
6,6,6,6,6,6,6,6, # d8 - df
7,8,8,8,8,8,8,8, # e0 - e7
8,8,8,8,8,9,8,8, # e8 - ef
10,11,11,11,11,11,11,11, # f0 - f7
12,13,13,13,14,15,0,0 # f8 - ff
)
UTF8_st = (
eError,eStart,eError,eError,eError,eError, 12, 10,#00-07
9, 11, 8, 7, 6, 5, 4, 3,#08-0f
eError,eError,eError,eError,eError,eError,eError,eError,#10-17
eError,eError,eError,eError,eError,eError,eError,eError,#18-1f
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#20-27
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,#28-2f
eError,eError, 5, 5, 5, 5,eError,eError,#30-37
eError,eError,eError,eError,eError,eError,eError,eError,#38-3f
eError,eError,eError, 5, 5, 5,eError,eError,#40-47
eError,eError,eError,eError,eError,eError,eError,eError,#48-4f
eError,eError, 7, 7, 7, 7,eError,eError,#50-57
eError,eError,eError,eError,eError,eError,eError,eError,#58-5f
eError,eError,eError,eError, 7, 7,eError,eError,#60-67
eError,eError,eError,eError,eError,eError,eError,eError,#68-6f
eError,eError, 9, 9, 9, 9,eError,eError,#70-77
eError,eError,eError,eError,eError,eError,eError,eError,#78-7f
eError,eError,eError,eError,eError, 9,eError,eError,#80-87
eError,eError,eError,eError,eError,eError,eError,eError,#88-8f
eError,eError, 12, 12, 12, 12,eError,eError,#90-97
eError,eError,eError,eError,eError,eError,eError,eError,#98-9f
eError,eError,eError,eError,eError, 12,eError,eError,#a0-a7
eError,eError,eError,eError,eError,eError,eError,eError,#a8-af
eError,eError, 12, 12, 12,eError,eError,eError,#b0-b7
eError,eError,eError,eError,eError,eError,eError,eError,#b8-bf
eError,eError,eStart,eStart,eStart,eStart,eError,eError,#c0-c7
eError,eError,eError,eError,eError,eError,eError,eError #c8-cf
)
UTF8CharLenTable = (0, 1, 0, 0, 0, 0, 2, 3, 3, 3, 4, 4, 5, 5, 6, 6)
UTF8SMModel = {'classTable': UTF8_cls,
'classFactor': 16,
'stateTable': UTF8_st,
'charLenTable': UTF8CharLenTable,
'name': 'UTF-8'}
| apache-2.0 |
athkishore/vgr | tests/test_user_model.py | 16 | 5437 | import unittest
import time
from datetime import datetime
from app import create_app, db
from app.models import User, AnonymousUser, Role, Permission
class UserModelTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
Role.insert_roles()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_password_setter(self):
u = User(password='cat')
self.assertTrue(u.password_hash is not None)
def test_no_password_getter(self):
u = User(password='cat')
with self.assertRaises(AttributeError):
u.password
def test_password_verification(self):
u = User(password='cat')
self.assertTrue(u.verify_password('cat'))
self.assertFalse(u.verify_password('dog'))
def test_password_salts_are_random(self):
u = User(password='cat')
u2 = User(password='cat')
self.assertTrue(u.password_hash != u2.password_hash)
def test_valid_confirmation_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_confirmation_token()
self.assertTrue(u.confirm(token))
def test_invalid_confirmation_token(self):
u1 = User(password='cat')
u2 = User(password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_confirmation_token()
self.assertFalse(u2.confirm(token))
def test_expired_confirmation_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_confirmation_token(1)
time.sleep(2)
self.assertFalse(u.confirm(token))
def test_valid_reset_token(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_reset_token()
self.assertTrue(u.reset_password(token, 'dog'))
self.assertTrue(u.verify_password('dog'))
def test_invalid_reset_token(self):
u1 = User(password='cat')
u2 = User(password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_reset_token()
self.assertFalse(u2.reset_password(token, 'horse'))
self.assertTrue(u2.verify_password('dog'))
def test_valid_email_change_token(self):
u = User(email='john@example.com', password='cat')
db.session.add(u)
db.session.commit()
token = u.generate_email_change_token('susan@example.org')
self.assertTrue(u.change_email(token))
self.assertTrue(u.email == 'susan@example.org')
def test_invalid_email_change_token(self):
u1 = User(email='john@example.com', password='cat')
u2 = User(email='susan@example.org', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u1.generate_email_change_token('david@example.net')
self.assertFalse(u2.change_email(token))
self.assertTrue(u2.email == 'susan@example.org')
def test_duplicate_email_change_token(self):
u1 = User(email='john@example.com', password='cat')
u2 = User(email='susan@example.org', password='dog')
db.session.add(u1)
db.session.add(u2)
db.session.commit()
token = u2.generate_email_change_token('john@example.com')
self.assertFalse(u2.change_email(token))
self.assertTrue(u2.email == 'susan@example.org')
def test_roles_and_permissions(self):
u = User(email='john@example.com', password='cat')
self.assertTrue(u.can(Permission.WRITE_ARTICLES))
self.assertFalse(u.can(Permission.MODERATE_COMMENTS))
def test_anonymous_user(self):
u = AnonymousUser()
self.assertFalse(u.can(Permission.FOLLOW))
def test_timestamps(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
self.assertTrue(
(datetime.utcnow() - u.member_since).total_seconds() < 3)
self.assertTrue(
(datetime.utcnow() - u.last_seen).total_seconds() < 3)
def test_ping(self):
u = User(password='cat')
db.session.add(u)
db.session.commit()
time.sleep(2)
last_seen_before = u.last_seen
u.ping()
self.assertTrue(u.last_seen > last_seen_before)
def test_gravatar(self):
u = User(email='john@example.com', password='cat')
with self.app.test_request_context('/'):
gravatar = u.gravatar()
gravatar_256 = u.gravatar(size=256)
gravatar_pg = u.gravatar(rating='pg')
gravatar_retro = u.gravatar(default='retro')
with self.app.test_request_context('/', base_url='https://example.com'):
gravatar_ssl = u.gravatar()
self.assertTrue('http://www.gravatar.com/avatar/' +
'd4c74594d841139328695756648b6bd6'in gravatar)
self.assertTrue('s=256' in gravatar_256)
self.assertTrue('r=pg' in gravatar_pg)
self.assertTrue('d=retro' in gravatar_retro)
self.assertTrue('https://secure.gravatar.com/avatar/' +
'd4c74594d841139328695756648b6bd6' in gravatar_ssl)
| mit |
yasirkhan380/Tutorials | notebooks/fig_code/svm_gui.py | 47 | 11549 | """
==========
Libsvm GUI
==========
A simple graphical frontend for Libsvm mainly intended for didactic
purposes. You can create data points by point and click and visualize
the decision region induced by different kernels and parameter settings.
To create positive examples click the left mouse button; to create
negative examples click the right button.
If all examples are from the same class, it uses a one-class SVM.
"""
from __future__ import division, print_function
print(__doc__)
# Author: Peter Prettenhoer <peter.prettenhofer@gmail.com>
#
# License: BSD 3 clause
import matplotlib
matplotlib.use('TkAgg')
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.backends.backend_tkagg import NavigationToolbar2TkAgg
from matplotlib.figure import Figure
from matplotlib.contour import ContourSet
import Tkinter as Tk
import sys
import numpy as np
from sklearn import svm
from sklearn.datasets import dump_svmlight_file
from sklearn.externals.six.moves import xrange
y_min, y_max = -50, 50
x_min, x_max = -50, 50
class Model(object):
"""The Model which hold the data. It implements the
observable in the observer pattern and notifies the
registered observers on change event.
"""
def __init__(self):
self.observers = []
self.surface = None
self.data = []
self.cls = None
self.surface_type = 0
def changed(self, event):
"""Notify the observers. """
for observer in self.observers:
observer.update(event, self)
def add_observer(self, observer):
"""Register an observer. """
self.observers.append(observer)
def set_surface(self, surface):
self.surface = surface
def dump_svmlight_file(self, file):
data = np.array(self.data)
X = data[:, 0:2]
y = data[:, 2]
dump_svmlight_file(X, y, file)
class Controller(object):
def __init__(self, model):
self.model = model
self.kernel = Tk.IntVar()
self.surface_type = Tk.IntVar()
# Whether or not a model has been fitted
self.fitted = False
def fit(self):
print("fit the model")
train = np.array(self.model.data)
X = train[:, 0:2]
y = train[:, 2]
C = float(self.complexity.get())
gamma = float(self.gamma.get())
coef0 = float(self.coef0.get())
degree = int(self.degree.get())
kernel_map = {0: "linear", 1: "rbf", 2: "poly"}
if len(np.unique(y)) == 1:
clf = svm.OneClassSVM(kernel=kernel_map[self.kernel.get()],
gamma=gamma, coef0=coef0, degree=degree)
clf.fit(X)
else:
clf = svm.SVC(kernel=kernel_map[self.kernel.get()], C=C,
gamma=gamma, coef0=coef0, degree=degree)
clf.fit(X, y)
if hasattr(clf, 'score'):
print("Accuracy:", clf.score(X, y) * 100)
X1, X2, Z = self.decision_surface(clf)
self.model.clf = clf
self.model.set_surface((X1, X2, Z))
self.model.surface_type = self.surface_type.get()
self.fitted = True
self.model.changed("surface")
def decision_surface(self, cls):
delta = 1
x = np.arange(x_min, x_max + delta, delta)
y = np.arange(y_min, y_max + delta, delta)
X1, X2 = np.meshgrid(x, y)
Z = cls.decision_function(np.c_[X1.ravel(), X2.ravel()])
Z = Z.reshape(X1.shape)
return X1, X2, Z
def clear_data(self):
self.model.data = []
self.fitted = False
self.model.changed("clear")
def add_example(self, x, y, label):
self.model.data.append((x, y, label))
self.model.changed("example_added")
# update decision surface if already fitted.
self.refit()
def refit(self):
"""Refit the model if already fitted. """
if self.fitted:
self.fit()
class View(object):
"""Test docstring. """
def __init__(self, root, controller):
f = Figure()
ax = f.add_subplot(111)
ax.set_xticks([])
ax.set_yticks([])
ax.set_xlim((x_min, x_max))
ax.set_ylim((y_min, y_max))
canvas = FigureCanvasTkAgg(f, master=root)
canvas.show()
canvas.get_tk_widget().pack(side=Tk.TOP, fill=Tk.BOTH, expand=1)
canvas._tkcanvas.pack(side=Tk.TOP, fill=Tk.BOTH, expand=1)
canvas.mpl_connect('key_press_event', self.onkeypress)
canvas.mpl_connect('key_release_event', self.onkeyrelease)
canvas.mpl_connect('button_press_event', self.onclick)
toolbar = NavigationToolbar2TkAgg(canvas, root)
toolbar.update()
self.shift_down = False
self.controllbar = ControllBar(root, controller)
self.f = f
self.ax = ax
self.canvas = canvas
self.controller = controller
self.contours = []
self.c_labels = None
self.plot_kernels()
def plot_kernels(self):
self.ax.text(-50, -60, "Linear: $u^T v$")
self.ax.text(-20, -60, "RBF: $\exp (-\gamma \| u-v \|^2)$")
self.ax.text(10, -60, "Poly: $(\gamma \, u^T v + r)^d$")
def onkeypress(self, event):
if event.key == "shift":
self.shift_down = True
def onkeyrelease(self, event):
if event.key == "shift":
self.shift_down = False
def onclick(self, event):
if event.xdata and event.ydata:
if self.shift_down or event.button == 3:
self.controller.add_example(event.xdata, event.ydata, -1)
elif event.button == 1:
self.controller.add_example(event.xdata, event.ydata, 1)
def update_example(self, model, idx):
x, y, l = model.data[idx]
if l == 1:
color = 'w'
elif l == -1:
color = 'k'
self.ax.plot([x], [y], "%so" % color, scalex=0.0, scaley=0.0)
def update(self, event, model):
if event == "examples_loaded":
for i in xrange(len(model.data)):
self.update_example(model, i)
if event == "example_added":
self.update_example(model, -1)
if event == "clear":
self.ax.clear()
self.ax.set_xticks([])
self.ax.set_yticks([])
self.contours = []
self.c_labels = None
self.plot_kernels()
if event == "surface":
self.remove_surface()
self.plot_support_vectors(model.clf.support_vectors_)
self.plot_decision_surface(model.surface, model.surface_type)
self.canvas.draw()
def remove_surface(self):
"""Remove old decision surface."""
if len(self.contours) > 0:
for contour in self.contours:
if isinstance(contour, ContourSet):
for lineset in contour.collections:
lineset.remove()
else:
contour.remove()
self.contours = []
def plot_support_vectors(self, support_vectors):
"""Plot the support vectors by placing circles over the
corresponding data points and adds the circle collection
to the contours list."""
cs = self.ax.scatter(support_vectors[:, 0], support_vectors[:, 1],
s=80, edgecolors="k", facecolors="none")
self.contours.append(cs)
def plot_decision_surface(self, surface, type):
X1, X2, Z = surface
if type == 0:
levels = [-1.0, 0.0, 1.0]
linestyles = ['dashed', 'solid', 'dashed']
colors = 'k'
self.contours.append(self.ax.contour(X1, X2, Z, levels,
colors=colors,
linestyles=linestyles))
elif type == 1:
self.contours.append(self.ax.contourf(X1, X2, Z, 10,
cmap=matplotlib.cm.bone,
origin='lower', alpha=0.85))
self.contours.append(self.ax.contour(X1, X2, Z, [0.0], colors='k',
linestyles=['solid']))
else:
raise ValueError("surface type unknown")
class ControllBar(object):
def __init__(self, root, controller):
fm = Tk.Frame(root)
kernel_group = Tk.Frame(fm)
Tk.Radiobutton(kernel_group, text="Linear", variable=controller.kernel,
value=0, command=controller.refit).pack(anchor=Tk.W)
Tk.Radiobutton(kernel_group, text="RBF", variable=controller.kernel,
value=1, command=controller.refit).pack(anchor=Tk.W)
Tk.Radiobutton(kernel_group, text="Poly", variable=controller.kernel,
value=2, command=controller.refit).pack(anchor=Tk.W)
kernel_group.pack(side=Tk.LEFT)
valbox = Tk.Frame(fm)
controller.complexity = Tk.StringVar()
controller.complexity.set("1.0")
c = Tk.Frame(valbox)
Tk.Label(c, text="C:", anchor="e", width=7).pack(side=Tk.LEFT)
Tk.Entry(c, width=6, textvariable=controller.complexity).pack(
side=Tk.LEFT)
c.pack()
controller.gamma = Tk.StringVar()
controller.gamma.set("0.01")
g = Tk.Frame(valbox)
Tk.Label(g, text="gamma:", anchor="e", width=7).pack(side=Tk.LEFT)
Tk.Entry(g, width=6, textvariable=controller.gamma).pack(side=Tk.LEFT)
g.pack()
controller.degree = Tk.StringVar()
controller.degree.set("3")
d = Tk.Frame(valbox)
Tk.Label(d, text="degree:", anchor="e", width=7).pack(side=Tk.LEFT)
Tk.Entry(d, width=6, textvariable=controller.degree).pack(side=Tk.LEFT)
d.pack()
controller.coef0 = Tk.StringVar()
controller.coef0.set("0")
r = Tk.Frame(valbox)
Tk.Label(r, text="coef0:", anchor="e", width=7).pack(side=Tk.LEFT)
Tk.Entry(r, width=6, textvariable=controller.coef0).pack(side=Tk.LEFT)
r.pack()
valbox.pack(side=Tk.LEFT)
cmap_group = Tk.Frame(fm)
Tk.Radiobutton(cmap_group, text="Hyperplanes",
variable=controller.surface_type, value=0,
command=controller.refit).pack(anchor=Tk.W)
Tk.Radiobutton(cmap_group, text="Surface",
variable=controller.surface_type, value=1,
command=controller.refit).pack(anchor=Tk.W)
cmap_group.pack(side=Tk.LEFT)
train_button = Tk.Button(fm, text='Fit', width=5,
command=controller.fit)
train_button.pack()
fm.pack(side=Tk.LEFT)
Tk.Button(fm, text='Clear', width=5,
command=controller.clear_data).pack(side=Tk.LEFT)
def get_parser():
from optparse import OptionParser
op = OptionParser()
op.add_option("--output",
action="store", type="str", dest="output",
help="Path where to dump data.")
return op
def main(argv):
op = get_parser()
opts, args = op.parse_args(argv[1:])
root = Tk.Tk()
model = Model()
controller = Controller(model)
root.wm_title("Scikit-learn Libsvm GUI")
view = View(root, controller)
model.add_observer(view)
Tk.mainloop()
if opts.output:
model.dump_svmlight_file(opts.output)
if __name__ == "__main__":
main(sys.argv)
| bsd-3-clause |
jlelli/sched-deadline | tools/perf/scripts/python/syscall-counts.py | 11181 | 1522 | # system call counts
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
print "%-40s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
GustavoHennig/ansible | lib/ansible/utils/module_docs_fragments/cloudstack.py | 175 | 2631 | # -*- coding: utf-8 -*-
# Copyright (c) 2015 René Moser <mail@renemoser.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard cloudstack documentation fragment
DOCUMENTATION = '''
options:
api_key:
description:
- API key of the CloudStack API.
required: false
default: null
api_secret:
description:
- Secret key of the CloudStack API.
required: false
default: null
api_url:
description:
- URL of the CloudStack API e.g. https://cloud.example.com/client/api.
required: false
default: null
api_http_method:
description:
- HTTP method used.
required: false
default: 'get'
choices: [ 'get', 'post' ]
api_timeout:
description:
- HTTP timeout.
required: false
default: 10
api_region:
description:
- Name of the ini section in the C(cloustack.ini) file.
required: false
default: cloudstack
requirements:
- "python >= 2.6"
- "cs >= 0.6.10"
notes:
- Ansible uses the C(cs) library's configuration method if credentials are not
provided by the arguments C(api_url), C(api_key), C(api_secret).
Configuration is read from several locations, in the following order.
- The C(CLOUDSTACK_ENDPOINT), C(CLOUDSTACK_KEY), C(CLOUDSTACK_SECRET) and
C(CLOUDSTACK_METHOD). C(CLOUDSTACK_TIMEOUT) environment variables.
- A C(CLOUDSTACK_CONFIG) environment variable pointing to an C(.ini) file,
- A C(cloudstack.ini) file in the current working directory.
- A C(.cloudstack.ini) file in the users home directory.
Optionally multiple credentials and endpoints can be specified using ini sections in C(cloudstack.ini).
Use the argument C(api_region) to select the section name, default section is C(cloudstack).
See https://github.com/exoscale/cs for more information.
- A detailed guide about cloudstack modules can be found on http://docs.ansible.com/ansible/guide_cloudstack.html
- This module supports check mode.
'''
| gpl-3.0 |
thiagopio/vendas_healthcheck | vendas/healthcheck/models.py | 1 | 3084 | # -*- coding: utf-8 -*-
from django.db import models
import requests
import re
class Project(models.Model):
name = models.CharField(max_length=50)
environment = models.CharField(max_length=10, choices=(('DEV', 'DEV'), ('QA', 'QA'), ('PROD', 'PROD')))
related_project = models.ManyToManyField('Project', blank=True)
@staticmethod
def in_environment(env):
return Project.objects.filter(environment__iexact=env)
def __unicode__(self):
return u"{0} ({1})".format(self.name, self.environment)
def dependents_size(self):
return len(self.related_project.all())
def verify(self):
working, info = (False, None)
for expected_response in self.statusresponse_set.all():
working, info = expected_response.check()
if working is False:
return working, info
return working, info
def to_json(self, with_verify=True):
working, info = self.verify() if with_verify else (False, requests.codes.NOT_FOUND)
return {
'id': self.id,
'name': self.name,
'info': info,
'working': working,
'dependents_ids': [project.id for project in self.related_project.all()]
}
class StatusResponse(models.Model):
OPTIONS = (('STATUS', 'Status'), ('TEXT', 'Text'), ('URL', 'Other Url'), ('REGEX', 'Regex'))
project = models.ForeignKey(Project)
name = models.CharField(max_length=50)
url = models.URLField(max_length=100)
response_type = models.CharField(max_length=10, choices=OPTIONS)
method = models.CharField(max_length=10, choices=(('GET', 'GET'),))
status = models.PositiveSmallIntegerField(blank=False)
content = models.CharField(max_length=200, blank=True)
def check(self):
working, info = False, None
try:
if self.method == 'GET':
response = requests.get(self.url, timeout=2, verify=False)
working, info = self.status_successful(response)
else:
raise Exception("Method '{}' not implemented".format(self.method))
except Exception:
working, info = working, requests.codes.SERVER_ERROR
return working, info
def status_successful(self, response):
status_from_response = response.status_code
if status_from_response == self.status:
if self.response_type == 'STATUS':
return True, status_from_response
elif self.response_type == 'TEXT':
same_content = self.content == response.text
return same_content, response.text
elif self.response_type == 'URL':
response_extra = requests.get(self.content, timeout=2, verify=False)
same_content = response.text == response_extra.text
return same_content, response_extra.text
elif self.response_type == 'REGEX':
return True, '+{}'.format(re.search(self.content, response.text).group(1))
return False, status_from_response
| gpl-2.0 |
fufufanatic/pyNES | pynes/tests/guide_test.py | 28 | 5904 | # -*- coding: utf-8 -*-
import unittest
from pynes.compiler import lexical, syntax
'''
Those tests are based on examples from:
http://nesdev.parodius.com/6502guid.txt
'''
class GuideTest(unittest.TestCase):
def test_example_16_bit_subtraction_routine(self):
ex_2 = '''
SEC ;clear the carry
LDA $20 ;get the low byte of the first number
SBC $22 ;add to it the low byte of the second
STA $24 ;store in the low byte of the result
LDA $21 ;get the high byte of the first number
SBC $23 ;add to it the high byte of the second, plus carry
STA $25 ;store in high byte of the result
'''
tokens = list(lexical(ex_2))
self.assertEquals(21, len(tokens))
ast = syntax(tokens)
self.assertEquals(7, len(ast))
def test_example_4_2(self):
example_4_2 = '''
; Example 4-2. Deleting an entry from an unordered list
;
; Delete the contents of $2F from a list whose starting
; address is in $30 and $31. The first byte of the list
; is its length.
;
deluel: LDY #$00 ; fetch element count
LDA ($30),Y
TAX ; transfer length to X
LDA $2F ; item to delete
nextel: INY ; index to next element
CMP ($30),Y ; do entry and element match?
BEQ delete ; yes. delete element
DEX ; no. decrement element count
BNE nextel ; any more elements to compare?
RTS ; no. element not in list. done
; delete an element by moving the ones below it up one location
delete: DEX ; decrement element count
BEQ deccnt ; end of list?
INY ; no. move next element up
LDA ($30),Y
DEY
STA ($30),Y
INY
JMP delete
deccnt: LDA ($30,X) ; update element count of list
SBC #$01
STA ($30,X)
RTS
'''
tokens = list(lexical(example_4_2))
self.assertEquals(96, len(tokens))
def test_example_5_6(self):
"""
example_5_6 = '''
; Example 5-6. 16-bit by 16-bit unsigned multiply
;
; Multiply $22 (low) and $23 (high) by $20 (low) and
; $21 (high) producing a 32-bit result in $24 (low) to $27 (high)
;
mlt16: LDA #$00 ; clear p2 and p3 of product
STA $26
STA $27
LDX #$16 ; multiplier bit count = 16
nxtbt: LSR $21 ; shift two-byte multiplier right
ROR $20
BCC align ; multiplier = 1?
LDA $26 ; yes. fetch p2
CLC
ADC $22 ; and add m0 to it
STA $26 ; store new p2
LDA $27 ; fetch p3
ADC $23 ; and add m1 to it
align: ROR A ; rotate four-byte product right
STA $27 ; store new p3
ROR $26
ROR $25
ROR $24
DEX ; decrement bit count
BNE nxtbt ; loop until 16 bits are done
RTS
'''
# TODO ROR A?
# tokens = list(lexical(example_5_6))
"""
def test_example_5_14(self):
example_5_14 = '''
; Example 5-14. Simple 16-bit square root.
;
; Returns the 8-bit square root in $20 of the
; 16-bit number in $20 (low) and $21 (high). The
; remainder is in location $21.
sqrt16: LDY #$01 ; lsby of first odd number = 1
STY $22
DEY
STY $23 ; msby of first odd number (sqrt = 0)
again: SEC
LDA $20 ; save remainder in X register
TAX ; subtract odd lo from integer lo
SBC $22
STA $20
LDA $21 ; subtract odd hi from integer hi
SBC $23
STA $21 ; is subtract result negative?
BCC nomore ; no. increment square root
INY
LDA $22 ; calculate next odd number
ADC #$01
STA $22
BCC again
INC $23
JMP again
nomore: STY $20 ; all done, store square root
STX $21 ; and remainder
RTS
'''
tokens = list(lexical(example_5_14))
self.assertEquals(74, len(tokens))
self.assertEquals('T_ENDLINE', tokens[0]['type'])
self.assertEquals('T_ENDLINE', tokens[1]['type'])
self.assertEquals('T_ENDLINE', tokens[2]['type'])
self.assertEquals('T_ENDLINE', tokens[3]['type'])
self.assertEquals('T_ENDLINE', tokens[4]['type'])
self.assertEquals('T_ENDLINE', tokens[5]['type'])
self.assertEquals('T_ENDLINE', tokens[6]['type'])
self.assertEquals('T_LABEL', tokens[7]['type'])
self.assertEquals('T_INSTRUCTION', tokens[8]['type'])
self.assertEquals('T_HEX_NUMBER', tokens[9]['type'])
self.assertEquals('T_ENDLINE', tokens[10]['type'])
self.assertEquals('T_INSTRUCTION', tokens[11]['type'])
self.assertEquals('T_ADDRESS', tokens[12]['type'])
self.assertEquals('T_ENDLINE', tokens[13]['type'])
self.assertEquals('T_INSTRUCTION', tokens[14]['type'])
self.assertEquals('T_ENDLINE', tokens[15]['type'])
self.assertEquals('T_INSTRUCTION', tokens[16]['type'])
self.assertEquals('T_ADDRESS', tokens[17]['type'])
self.assertEquals('T_ENDLINE', tokens[18]['type'])
| bsd-3-clause |
maxlikely/scikit-learn | sklearn/ensemble/tests/test_partial_dependence.py | 44 | 7031 | """
Testing for the partial dependence module.
"""
import numpy as np
from numpy.testing import assert_array_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import if_matplotlib
from sklearn.ensemble.partial_dependence import partial_dependence
from sklearn.ensemble.partial_dependence import plot_partial_dependence
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.ensemble import GradientBoostingRegressor
from sklearn import datasets
# toy sample
X = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]]
y = [-1, -1, -1, 1, 1, 1]
T = [[-1, -1], [2, 2], [3, 2]]
true_result = [-1, 1, 1]
# also load the boston dataset
boston = datasets.load_boston()
# also load the iris dataset
iris = datasets.load_iris()
def test_partial_dependence_classifier():
"""Test partial dependence for classifier """
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
clf.fit(X, y)
pdp, axes = partial_dependence(clf, [0], X=X, grid_resolution=5)
# only 4 grid points instead of 5 because only 4 unique X[:,0] vals
assert pdp.shape == (1, 4)
assert axes[0].shape[0] == 4
# now with our own grid
X_ = np.asarray(X)
grid = np.unique(X_[:, 0])
pdp_2, axes = partial_dependence(clf, [0], grid=grid)
assert axes is None
assert_array_equal(pdp, pdp_2)
def test_partial_dependence_multiclass():
"""Test partial dependence for multi-class classifier """
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
clf.fit(iris.data, iris.target)
grid_resolution = 25
n_classes = clf.n_classes_
pdp, axes = partial_dependence(
clf, [0], X=iris.data, grid_resolution=grid_resolution)
assert pdp.shape == (n_classes, grid_resolution)
assert len(axes) == 1
assert axes[0].shape[0] == grid_resolution
def test_partial_dependence_regressor():
"""Test partial dependence for regressor """
clf = GradientBoostingRegressor(n_estimators=10, random_state=1)
clf.fit(boston.data, boston.target)
grid_resolution = 25
pdp, axes = partial_dependence(
clf, [0], X=boston.data, grid_resolution=grid_resolution)
assert pdp.shape == (1, grid_resolution)
assert axes[0].shape[0] == grid_resolution
def test_partial_dependecy_input():
"""Test input validation of partial dependence. """
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
clf.fit(X, y)
assert_raises(ValueError, partial_dependence,
clf, [0], grid=None, X=None)
assert_raises(ValueError, partial_dependence,
clf, [0], grid=[0, 1], X=X)
# first argument must be an instance of BaseGradientBoosting
assert_raises(ValueError, partial_dependence,
{}, [0], X=X)
# Gradient boosting estimator must be fit
assert_raises(ValueError, partial_dependence,
GradientBoostingClassifier(), [0], X=X)
assert_raises(ValueError, partial_dependence, clf, [-1], X=X)
assert_raises(ValueError, partial_dependence, clf, [100], X=X)
# wrong ndim for grid
grid = np.random.rand(10, 2, 1)
assert_raises(ValueError, partial_dependence, clf, [0], grid=grid)
@if_matplotlib
def test_plot_partial_dependence():
"""Test partial dependence plot function. """
clf = GradientBoostingRegressor(n_estimators=10, random_state=1)
clf.fit(boston.data, boston.target)
grid_resolution = 25
fig, axs = plot_partial_dependence(clf, boston.data, [0, 1, (0, 1)],
grid_resolution=grid_resolution,
feature_names=boston.feature_names)
assert len(axs) == 3
assert all(ax.has_data for ax in axs)
# check with str features and array feature names
fig, axs = plot_partial_dependence(clf, boston.data, ['CRIM', 'ZN',
('CRIM', 'ZN')],
grid_resolution=grid_resolution,
feature_names=boston.feature_names)
assert len(axs) == 3
assert all(ax.has_data for ax in axs)
# check with list feature_names
feature_names = boston.feature_names.tolist()
fig, axs = plot_partial_dependence(clf, boston.data, ['CRIM', 'ZN',
('CRIM', 'ZN')],
grid_resolution=grid_resolution,
feature_names=feature_names)
assert len(axs) == 3
assert all(ax.has_data for ax in axs)
@if_matplotlib
def test_plot_partial_dependence_input():
"""Test partial dependence plot function input checks. """
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
# not fitted yet
assert_raises(ValueError, plot_partial_dependence,
clf, X, [0])
clf.fit(X, y)
assert_raises(ValueError, plot_partial_dependence,
clf, np.array(X)[:, :0], [0])
# first argument must be an instance of BaseGradientBoosting
assert_raises(ValueError, plot_partial_dependence,
{}, X, [0])
# must be larger than -1
assert_raises(ValueError, plot_partial_dependence,
clf, X, [-1])
# too large feature value
assert_raises(ValueError, plot_partial_dependence,
clf, X, [100])
# str feature but no feature_names
assert_raises(ValueError, plot_partial_dependence,
clf, X, ['foobar'])
# not valid features value
assert_raises(ValueError, plot_partial_dependence,
clf, X, [{'foo': 'bar'}])
@if_matplotlib
def test_plot_partial_dependence_multiclass():
"""Test partial dependence plot function on multi-class input. """
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
clf.fit(iris.data, iris.target)
grid_resolution = 25
fig, axs = plot_partial_dependence(clf, iris.data, [0, 1],
label=0,
grid_resolution=grid_resolution)
assert len(axs) == 2
assert all(ax.has_data for ax in axs)
# now with symbol labels
target = iris.target_names[iris.target]
clf = GradientBoostingClassifier(n_estimators=10, random_state=1)
clf.fit(iris.data, target)
grid_resolution = 25
fig, axs = plot_partial_dependence(clf, iris.data, [0, 1],
label='setosa',
grid_resolution=grid_resolution)
assert len(axs) == 2
assert all(ax.has_data for ax in axs)
# label not in gbrt.classes_
assert_raises(ValueError, plot_partial_dependence,
clf, iris.data, [0, 1], label='foobar',
grid_resolution=grid_resolution)
# label not provided
assert_raises(ValueError, plot_partial_dependence,
clf, iris.data, [0, 1],
grid_resolution=grid_resolution)
| bsd-3-clause |
hrjn/scikit-learn | examples/feature_selection/plot_f_test_vs_mi.py | 75 | 1647 | """
===========================================
Comparison of F-test and mutual information
===========================================
This example illustrates the differences between univariate F-test statistics
and mutual information.
We consider 3 features x_1, x_2, x_3 distributed uniformly over [0, 1], the
target depends on them as follows:
y = x_1 + sin(6 * pi * x_2) + 0.1 * N(0, 1), that is the third features is completely irrelevant.
The code below plots the dependency of y against individual x_i and normalized
values of univariate F-tests statistics and mutual information.
As F-test captures only linear dependency, it rates x_1 as the most
discriminative feature. On the other hand, mutual information can capture any
kind of dependency between variables and it rates x_2 as the most
discriminative feature, which probably agrees better with our intuitive
perception for this example. Both methods correctly marks x_3 as irrelevant.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn.feature_selection import f_regression, mutual_info_regression
np.random.seed(0)
X = np.random.rand(1000, 3)
y = X[:, 0] + np.sin(6 * np.pi * X[:, 1]) + 0.1 * np.random.randn(1000)
f_test, _ = f_regression(X, y)
f_test /= np.max(f_test)
mi = mutual_info_regression(X, y)
mi /= np.max(mi)
plt.figure(figsize=(15, 5))
for i in range(3):
plt.subplot(1, 3, i + 1)
plt.scatter(X[:, i], y)
plt.xlabel("$x_{}$".format(i + 1), fontsize=14)
if i == 0:
plt.ylabel("$y$", fontsize=14)
plt.title("F-test={:.2f}, MI={:.2f}".format(f_test[i], mi[i]),
fontsize=16)
plt.show()
| bsd-3-clause |
yufengg/tensorflow | tensorflow/python/framework/framework_lib.py | 15 | 4208 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=unused-import,g-bad-import-order
"""Classes and functions for building TensorFlow graphs.
## Core graph data structures
@@Graph
@@Operation
@@Tensor
## Tensor types
@@DType
@@as_dtype
## Utility functions
@@device
@@container
@@name_scope
@@control_dependencies
@@convert_to_tensor
@@convert_to_tensor_or_indexed_slices
@@convert_to_tensor_or_sparse_tensor
@@get_default_graph
@@reset_default_graph
@@import_graph_def
@@load_file_system_library
@@load_op_library
@@make_tensor_proto
@@make_ndarray
## Graph collections
@@add_to_collection
@@get_collection
@@get_collection_ref
@@GraphKeys
## Defining new operations
@@RegisterGradient
@@NotDifferentiable
@@NoGradient
@@TensorShape
@@Dimension
@@op_scope
@@get_seed
## For libraries building on TensorFlow
@@register_tensor_conversion_function
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Classes used when building a Graph.
from tensorflow.python.framework.device import DeviceSpec
from tensorflow.python.framework.ops import Graph
from tensorflow.python.framework.ops import Operation
from tensorflow.python.framework.ops import Tensor
from tensorflow.python.framework.ops import IndexedSlices
from tensorflow.python.framework.sparse_tensor import SparseTensor
from tensorflow.python.framework.sparse_tensor import SparseTensorValue
# Utilities used when building a Graph.
from tensorflow.python.framework.ops import device
from tensorflow.python.framework.ops import container
from tensorflow.python.framework.ops import name_scope
from tensorflow.python.framework.ops import op_scope
from tensorflow.python.framework.ops import control_dependencies
from tensorflow.python.framework.ops import get_default_graph
from tensorflow.python.framework.ops import reset_default_graph
from tensorflow.python.framework.ops import GraphKeys
from tensorflow.python.framework.ops import add_to_collection
from tensorflow.python.framework.ops import get_collection
from tensorflow.python.framework.ops import get_collection_ref
from tensorflow.python.framework.ops import convert_to_tensor
from tensorflow.python.framework.ops import convert_to_tensor_or_indexed_slices
from tensorflow.python.framework.random_seed import get_seed
from tensorflow.python.framework.random_seed import set_random_seed
from tensorflow.python.framework.sparse_tensor import convert_to_tensor_or_sparse_tensor
from tensorflow.python.framework.subscribe import subscribe
from tensorflow.python.framework.importer import import_graph_def
# Utilities for working with Tensors
from tensorflow.python.framework.tensor_util import make_tensor_proto
from tensorflow.python.framework.tensor_util import MakeNdarray as make_ndarray
# Needed when you defined a new Op in C++.
from tensorflow.python.framework.ops import RegisterGradient
from tensorflow.python.framework.ops import NotDifferentiable
from tensorflow.python.framework.ops import NoGradient
from tensorflow.python.framework.ops import RegisterShape
from tensorflow.python.framework.tensor_shape import Dimension
from tensorflow.python.framework.tensor_shape import TensorShape
# Needed when interfacing tensorflow to new array libraries
from tensorflow.python.framework.ops import register_tensor_conversion_function
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.framework.dtypes import *
# Load a TensorFlow plugin
from tensorflow.python.framework.load_library import *
# pylint: enable=wildcard-import
| apache-2.0 |
fbidu/namebench | nb_third_party/dns/e164.py | 248 | 3063 | # Copyright (C) 2006, 2007, 2009 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS E.164 helpers
@var public_enum_domain: The DNS public ENUM domain, e164.arpa.
@type public_enum_domain: dns.name.Name object
"""
import dns.exception
import dns.name
import dns.resolver
public_enum_domain = dns.name.from_text('e164.arpa.')
def from_e164(text, origin=public_enum_domain):
"""Convert an E.164 number in textual form into a Name object whose
value is the ENUM domain name for that number.
@param text: an E.164 number in textual form.
@type text: str
@param origin: The domain in which the number should be constructed.
The default is e164.arpa.
@type: dns.name.Name object or None
@rtype: dns.name.Name object
"""
parts = [d for d in text if d.isdigit()]
parts.reverse()
return dns.name.from_text('.'.join(parts), origin=origin)
def to_e164(name, origin=public_enum_domain, want_plus_prefix=True):
"""Convert an ENUM domain name into an E.164 number.
@param name: the ENUM domain name.
@type name: dns.name.Name object.
@param origin: A domain containing the ENUM domain name. The
name is relativized to this domain before being converted to text.
@type: dns.name.Name object or None
@param want_plus_prefix: if True, add a '+' to the beginning of the
returned number.
@rtype: str
"""
if not origin is None:
name = name.relativize(origin)
dlabels = [d for d in name.labels if (d.isdigit() and len(d) == 1)]
if len(dlabels) != len(name.labels):
raise dns.exception.SyntaxError('non-digit labels in ENUM domain name')
dlabels.reverse()
text = ''.join(dlabels)
if want_plus_prefix:
text = '+' + text
return text
def query(number, domains, resolver=None):
"""Look for NAPTR RRs for the specified number in the specified domains.
e.g. lookup('16505551212', ['e164.dnspython.org.', 'e164.arpa.'])
"""
if resolver is None:
resolver = dns.resolver.get_default_resolver()
for domain in domains:
if isinstance(domain, (str, unicode)):
domain = dns.name.from_text(domain)
qname = dns.e164.from_e164(number, domain)
try:
return resolver.query(qname, 'NAPTR')
except dns.resolver.NXDOMAIN:
pass
raise dns.resolver.NXDOMAIN
| apache-2.0 |
Sylrob434/CouchPotatoServer | couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/newgrounds.py | 19 | 1277 | from __future__ import unicode_literals
import json
import re
from .common import InfoExtractor
class NewgroundsIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?newgrounds\.com/audio/listen/(?P<id>[0-9]+)'
_TEST = {
'url': 'http://www.newgrounds.com/audio/listen/549479',
'md5': 'fe6033d297591288fa1c1f780386f07a',
'info_dict': {
'id': '549479',
'ext': 'mp3',
'title': 'B7 - BusMode',
'uploader': 'Burn7',
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
music_id = mobj.group('id')
webpage = self._download_webpage(url, music_id)
title = self._html_search_regex(
r',"name":"([^"]+)",', webpage, 'music title')
uploader = self._html_search_regex(
r',"artist":"([^"]+)",', webpage, 'music uploader')
music_url_json_string = self._html_search_regex(
r'({"url":"[^"]+"),', webpage, 'music url') + '}'
music_url_json = json.loads(music_url_json_string)
music_url = music_url_json['url']
return {
'id': music_id,
'title': title,
'url': music_url,
'uploader': uploader,
}
| gpl-3.0 |
etherkit/OpenBeacon2 | client/linux-arm/venv/lib/python3.6/site-packages/serial/tools/list_ports_linux.py | 12 | 4427 | #!/usr/bin/env python
#
# This is a module that gathers a list of serial ports including details on
# GNU/Linux systems.
#
# This file is part of pySerial. https://github.com/pyserial/pyserial
# (C) 2011-2015 Chris Liechti <cliechti@gmx.net>
#
# SPDX-License-Identifier: BSD-3-Clause
import glob
import os
from serial.tools import list_ports_common
class SysFS(list_ports_common.ListPortInfo):
"""Wrapper for easy sysfs access and device info"""
def __init__(self, device):
super(SysFS, self).__init__(device)
# special handling for links
if device is not None and os.path.islink(device):
device = os.path.realpath(device)
is_link = True
else:
is_link = False
self.name = os.path.basename(device)
self.usb_device_path = None
if os.path.exists('/sys/class/tty/{}/device'.format(self.name)):
self.device_path = os.path.realpath('/sys/class/tty/{}/device'.format(self.name))
self.subsystem = os.path.basename(os.path.realpath(os.path.join(self.device_path, 'subsystem')))
else:
self.device_path = None
self.subsystem = None
# check device type
if self.subsystem == 'usb-serial':
self.usb_interface_path = os.path.dirname(self.device_path)
elif self.subsystem == 'usb':
self.usb_interface_path = self.device_path
else:
self.usb_interface_path = None
# fill-in info for USB devices
if self.usb_interface_path is not None:
self.usb_device_path = os.path.dirname(self.usb_interface_path)
try:
num_if = int(self.read_line(self.usb_device_path, 'bNumInterfaces'))
except ValueError:
num_if = 1
self.vid = int(self.read_line(self.usb_device_path, 'idVendor'), 16)
self.pid = int(self.read_line(self.usb_device_path, 'idProduct'), 16)
self.serial_number = self.read_line(self.usb_device_path, 'serial')
if num_if > 1: # multi interface devices like FT4232
self.location = os.path.basename(self.usb_interface_path)
else:
self.location = os.path.basename(self.usb_device_path)
self.manufacturer = self.read_line(self.usb_device_path, 'manufacturer')
self.product = self.read_line(self.usb_device_path, 'product')
self.interface = self.read_line(self.device_path, 'interface')
if self.subsystem in ('usb', 'usb-serial'):
self.apply_usb_info()
#~ elif self.subsystem in ('pnp', 'amba'): # PCI based devices, raspi
elif self.subsystem == 'pnp': # PCI based devices
self.description = self.name
self.hwid = self.read_line(self.device_path, 'id')
elif self.subsystem == 'amba': # raspi
self.description = self.name
self.hwid = os.path.basename(self.device_path)
if is_link:
self.hwid += ' LINK={}'.format(device)
def read_line(self, *args):
"""\
Helper function to read a single line from a file.
One or more parameters are allowed, they are joined with os.path.join.
Returns None on errors..
"""
try:
with open(os.path.join(*args)) as f:
line = f.readline().strip()
return line
except IOError:
return None
def comports(include_links=False):
devices = glob.glob('/dev/ttyS*') # built-in serial ports
devices.extend(glob.glob('/dev/ttyUSB*')) # usb-serial with own driver
devices.extend(glob.glob('/dev/ttyACM*')) # usb-serial with CDC-ACM profile
devices.extend(glob.glob('/dev/ttyAMA*')) # ARM internal port (raspi)
devices.extend(glob.glob('/dev/rfcomm*')) # BT serial devices
devices.extend(glob.glob('/dev/ttyAP*')) # Advantech multi-port serial controllers
if include_links:
devices.extend(list_ports_common.list_links(devices))
return [info
for info in [SysFS(d) for d in devices]
if info.subsystem != "platform"] # hide non-present internal serial ports
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# test
if __name__ == '__main__':
for port, desc, hwid in sorted(comports()):
print("{}: {} [{}]".format(port, desc, hwid))
| gpl-3.0 |
ojengwa/odoo | addons/crm/report/crm_phonecall_report.py | 309 | 3982 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
from openerp.addons.crm import crm
from openerp.osv import fields, osv
AVAILABLE_STATES = [
('draft', 'Draft'),
('open', 'Todo'),
('cancel', 'Cancelled'),
('done', 'Held'),
('pending', 'Pending')
]
class crm_phonecall_report(osv.osv):
""" Phone calls by user and section """
_name = "crm.phonecall.report"
_description = "Phone calls by user and section"
_auto = False
_columns = {
'user_id':fields.many2one('res.users', 'User', readonly=True),
'section_id':fields.many2one('crm.case.section', 'Section', readonly=True),
'priority': fields.selection([('0','Low'), ('1','Normal'), ('2','High')], 'Priority'),
'nbr': fields.integer('# of Cases', readonly=True), # TDE FIXME master: rename into nbr_cases
'state': fields.selection(AVAILABLE_STATES, 'Status', readonly=True),
'create_date': fields.datetime('Create Date', readonly=True, select=True),
'delay_close': fields.float('Delay to close', digits=(16,2),readonly=True, group_operator="avg",help="Number of Days to close the case"),
'duration': fields.float('Duration', digits=(16,2),readonly=True, group_operator="avg"),
'delay_open': fields.float('Delay to open',digits=(16,2),readonly=True, group_operator="avg",help="Number of Days to open the case"),
'categ_id': fields.many2one('crm.case.categ', 'Category', \
domain="[('section_id','=',section_id),\
('object_id.model', '=', 'crm.phonecall')]"),
'partner_id': fields.many2one('res.partner', 'Partner' , readonly=True),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'opening_date': fields.datetime('Opening Date', readonly=True, select=True),
'date_closed': fields.datetime('Close Date', readonly=True, select=True),
}
def init(self, cr):
""" Phone Calls By User And Section
@param cr: the current row, from the database cursor,
"""
tools.drop_view_if_exists(cr, 'crm_phonecall_report')
cr.execute("""
create or replace view crm_phonecall_report as (
select
id,
c.date_open as opening_date,
c.date_closed as date_closed,
c.state,
c.user_id,
c.section_id,
c.categ_id,
c.partner_id,
c.duration,
c.company_id,
c.priority,
1 as nbr,
c.create_date as create_date,
extract('epoch' from (c.date_closed-c.create_date))/(3600*24) as delay_close,
extract('epoch' from (c.date_open-c.create_date))/(3600*24) as delay_open
from
crm_phonecall c
)""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
PeterWangIntel/chromium-crosswalk | third_party/libxml/src/check-relaxng-test-suite2.py | 343 | 10578 | #!/usr/bin/python
import sys
import time
import os
import string
import StringIO
sys.path.insert(0, "python")
import libxml2
# Memory debug specific
libxml2.debugMemory(1)
debug = 0
quiet = 1
#
# the testsuite description
#
CONF=os.path.join(os.path.dirname(__file__), "test/relaxng/testsuite.xml")
LOG="check-relaxng-test-suite2.log"
log = open(LOG, "w")
nb_schemas_tests = 0
nb_schemas_success = 0
nb_schemas_failed = 0
nb_instances_tests = 0
nb_instances_success = 0
nb_instances_failed = 0
libxml2.lineNumbersDefault(1)
#
# Resolver callback
#
resources = {}
def resolver(URL, ID, ctxt):
global resources
if resources.has_key(URL):
return(StringIO.StringIO(resources[URL]))
log.write("Resolver failure: asked %s\n" % (URL))
log.write("resources: %s\n" % (resources))
return None
#
# Load the previous results
#
#results = {}
#previous = {}
#
#try:
# res = libxml2.parseFile(RES)
#except:
# log.write("Could not parse %s" % (RES))
#
# handle a valid instance
#
def handle_valid(node, schema):
global log
global nb_instances_success
global nb_instances_failed
instance = node.prop("dtd")
if instance == None:
instance = ""
child = node.children
while child != None:
if child.type != 'text':
instance = instance + child.serialize()
child = child.next
# mem = libxml2.debugMemory(1);
try:
doc = libxml2.parseDoc(instance)
except:
doc = None
if doc == None:
log.write("\nFailed to parse correct instance:\n-----\n")
log.write(instance)
log.write("\n-----\n")
nb_instances_failed = nb_instances_failed + 1
return
if debug:
print "instance line %d" % (node.lineNo())
try:
ctxt = schema.relaxNGNewValidCtxt()
ret = doc.relaxNGValidateDoc(ctxt)
del ctxt
except:
ret = -1
doc.freeDoc()
# if mem != libxml2.debugMemory(1):
# print "validating instance %d line %d leaks" % (
# nb_instances_tests, node.lineNo())
if ret != 0:
log.write("\nFailed to validate correct instance:\n-----\n")
log.write(instance)
log.write("\n-----\n")
nb_instances_failed = nb_instances_failed + 1
else:
nb_instances_success = nb_instances_success + 1
#
# handle an invalid instance
#
def handle_invalid(node, schema):
global log
global nb_instances_success
global nb_instances_failed
instance = node.prop("dtd")
if instance == None:
instance = ""
child = node.children
while child != None:
if child.type != 'text':
instance = instance + child.serialize()
child = child.next
# mem = libxml2.debugMemory(1);
try:
doc = libxml2.parseDoc(instance)
except:
doc = None
if doc == None:
log.write("\nStrange: failed to parse incorrect instance:\n-----\n")
log.write(instance)
log.write("\n-----\n")
return
if debug:
print "instance line %d" % (node.lineNo())
try:
ctxt = schema.relaxNGNewValidCtxt()
ret = doc.relaxNGValidateDoc(ctxt)
del ctxt
except:
ret = -1
doc.freeDoc()
# mem2 = libxml2.debugMemory(1)
# if mem != mem2:
# print "validating instance %d line %d leaks %d bytes" % (
# nb_instances_tests, node.lineNo(), mem2 - mem)
if ret == 0:
log.write("\nFailed to detect validation problem in instance:\n-----\n")
log.write(instance)
log.write("\n-----\n")
nb_instances_failed = nb_instances_failed + 1
else:
nb_instances_success = nb_instances_success + 1
#
# handle an incorrect test
#
def handle_correct(node):
global log
global nb_schemas_success
global nb_schemas_failed
schema = ""
child = node.children
while child != None:
if child.type != 'text':
schema = schema + child.serialize()
child = child.next
try:
rngp = libxml2.relaxNGNewMemParserCtxt(schema, len(schema))
rngs = rngp.relaxNGParse()
except:
rngs = None
if rngs == None:
log.write("\nFailed to compile correct schema:\n-----\n")
log.write(schema)
log.write("\n-----\n")
nb_schemas_failed = nb_schemas_failed + 1
else:
nb_schemas_success = nb_schemas_success + 1
return rngs
def handle_incorrect(node):
global log
global nb_schemas_success
global nb_schemas_failed
schema = ""
child = node.children
while child != None:
if child.type != 'text':
schema = schema + child.serialize()
child = child.next
try:
rngp = libxml2.relaxNGNewMemParserCtxt(schema, len(schema))
rngs = rngp.relaxNGParse()
except:
rngs = None
if rngs != None:
log.write("\nFailed to detect schema error in:\n-----\n")
log.write(schema)
log.write("\n-----\n")
nb_schemas_failed = nb_schemas_failed + 1
else:
# log.write("\nSuccess detecting schema error in:\n-----\n")
# log.write(schema)
# log.write("\n-----\n")
nb_schemas_success = nb_schemas_success + 1
return None
#
# resource handling: keep a dictionary of URL->string mappings
#
def handle_resource(node, dir):
global resources
try:
name = node.prop('name')
except:
name = None
if name == None or name == '':
log.write("resource has no name")
return;
if dir != None:
# name = libxml2.buildURI(name, dir)
name = dir + '/' + name
res = ""
child = node.children
while child != None:
if child.type != 'text':
res = res + child.serialize()
child = child.next
resources[name] = res
#
# dir handling: pseudo directory resources
#
def handle_dir(node, dir):
try:
name = node.prop('name')
except:
name = None
if name == None or name == '':
log.write("resource has no name")
return;
if dir != None:
# name = libxml2.buildURI(name, dir)
name = dir + '/' + name
dirs = node.xpathEval('dir')
for dir in dirs:
handle_dir(dir, name)
res = node.xpathEval('resource')
for r in res:
handle_resource(r, name)
#
# handle a testCase element
#
def handle_testCase(node):
global nb_schemas_tests
global nb_instances_tests
global resources
sections = node.xpathEval('string(section)')
log.write("\n ======== test %d line %d section %s ==========\n" % (
nb_schemas_tests, node.lineNo(), sections))
resources = {}
if debug:
print "test %d line %d" % (nb_schemas_tests, node.lineNo())
dirs = node.xpathEval('dir')
for dir in dirs:
handle_dir(dir, None)
res = node.xpathEval('resource')
for r in res:
handle_resource(r, None)
tsts = node.xpathEval('incorrect')
if tsts != []:
if len(tsts) != 1:
print "warning test line %d has more than one <incorrect> example" %(node.lineNo())
schema = handle_incorrect(tsts[0])
else:
tsts = node.xpathEval('correct')
if tsts != []:
if len(tsts) != 1:
print "warning test line %d has more than one <correct> example"% (node.lineNo())
schema = handle_correct(tsts[0])
else:
print "warning <testCase> line %d has no <correct> nor <incorrect> child" % (node.lineNo())
nb_schemas_tests = nb_schemas_tests + 1;
valids = node.xpathEval('valid')
invalids = node.xpathEval('invalid')
nb_instances_tests = nb_instances_tests + len(valids) + len(invalids)
if schema != None:
for valid in valids:
handle_valid(valid, schema)
for invalid in invalids:
handle_invalid(invalid, schema)
#
# handle a testSuite element
#
def handle_testSuite(node, level = 0):
global nb_schemas_tests, nb_schemas_success, nb_schemas_failed
global nb_instances_tests, nb_instances_success, nb_instances_failed
if level >= 1:
old_schemas_tests = nb_schemas_tests
old_schemas_success = nb_schemas_success
old_schemas_failed = nb_schemas_failed
old_instances_tests = nb_instances_tests
old_instances_success = nb_instances_success
old_instances_failed = nb_instances_failed
docs = node.xpathEval('documentation')
authors = node.xpathEval('author')
if docs != []:
msg = ""
for doc in docs:
msg = msg + doc.content + " "
if authors != []:
msg = msg + "written by "
for author in authors:
msg = msg + author.content + " "
if quiet == 0:
print msg
sections = node.xpathEval('section')
if sections != [] and level <= 0:
msg = ""
for section in sections:
msg = msg + section.content + " "
if quiet == 0:
print "Tests for section %s" % (msg)
for test in node.xpathEval('testCase'):
handle_testCase(test)
for test in node.xpathEval('testSuite'):
handle_testSuite(test, level + 1)
if level >= 1 and sections != []:
msg = ""
for section in sections:
msg = msg + section.content + " "
print "Result of tests for section %s" % (msg)
if nb_schemas_tests != old_schemas_tests:
print "found %d test schemas: %d success %d failures" % (
nb_schemas_tests - old_schemas_tests,
nb_schemas_success - old_schemas_success,
nb_schemas_failed - old_schemas_failed)
if nb_instances_tests != old_instances_tests:
print "found %d test instances: %d success %d failures" % (
nb_instances_tests - old_instances_tests,
nb_instances_success - old_instances_success,
nb_instances_failed - old_instances_failed)
#
# Parse the conf file
#
libxml2.substituteEntitiesDefault(1);
testsuite = libxml2.parseFile(CONF)
#
# Error and warnng callbacks
#
def callback(ctx, str):
global log
log.write("%s%s" % (ctx, str))
libxml2.registerErrorHandler(callback, "")
libxml2.setEntityLoader(resolver)
root = testsuite.getRootElement()
if root.name != 'testSuite':
print "%s doesn't start with a testSuite element, aborting" % (CONF)
sys.exit(1)
if quiet == 0:
print "Running Relax NG testsuite"
handle_testSuite(root)
if quiet == 0:
print "\nTOTAL:\n"
if quiet == 0 or nb_schemas_failed != 0:
print "found %d test schemas: %d success %d failures" % (
nb_schemas_tests, nb_schemas_success, nb_schemas_failed)
if quiet == 0 or nb_instances_failed != 0:
print "found %d test instances: %d success %d failures" % (
nb_instances_tests, nb_instances_success, nb_instances_failed)
testsuite.freeDoc()
# Memory debug specific
libxml2.relaxNGCleanupTypes()
libxml2.cleanupParser()
if libxml2.debugMemory(1) == 0:
if quiet == 0:
print "OK"
else:
print "Memory leak %d bytes" % (libxml2.debugMemory(1))
libxml2.dumpMemory()
| bsd-3-clause |
jodal/comics | comics/core/migrations/0001_initial.py | 1 | 5776 | import django.core.files.storage
from django.db import migrations, models
import comics.core.models
class Migration(migrations.Migration):
dependencies = []
operations = [
migrations.CreateModel(
name="Comic",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
(
"name",
models.CharField(help_text="Name of the comic", max_length=100),
),
(
"slug",
models.SlugField(
help_text="For file paths and URLs",
unique=True,
max_length=100,
verbose_name="Short name",
),
),
(
"language",
models.CharField(
help_text="The language of the comic",
max_length=2,
choices=[("en", "English"), ("no", "Norwegian")],
),
),
(
"url",
models.URLField(
help_text="URL to the official website",
verbose_name="URL",
blank=True,
),
),
(
"active",
models.BooleanField(
default=True,
help_text="Wheter the comic is still being crawled",
),
),
(
"start_date",
models.DateField(
help_text="First published at", null=True, blank=True
),
),
(
"end_date",
models.DateField(
help_text="Last published at, if comic has been cancelled",
null=True,
blank=True,
),
),
(
"rights",
models.CharField(
help_text="Author, copyright, and/or licensing " "information",
max_length=100,
blank=True,
),
),
(
"added",
models.DateTimeField(
help_text="Time the comic was added to the site",
auto_now_add=True,
),
),
],
options={
"ordering": ["name"],
"db_table": "comics_comic",
},
bases=(models.Model,),
),
migrations.CreateModel(
name="Image",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
(
"file",
models.ImageField(
height_field="height",
storage=django.core.files.storage.FileSystemStorage(
base_url="/media/",
location="/home/jodal/dev/comics/media",
),
width_field="width",
upload_to=comics.core.models.image_file_path,
),
),
("checksum", models.CharField(max_length=64, db_index=True)),
("title", models.CharField(max_length=255, blank=True)),
("text", models.TextField(blank=True)),
("fetched", models.DateTimeField(auto_now_add=True)),
("height", models.IntegerField()),
("width", models.IntegerField()),
(
"comic",
models.ForeignKey(to="core.Comic", on_delete=models.CASCADE),
),
],
options={
"db_table": "comics_image",
},
bases=(models.Model,),
),
migrations.CreateModel(
name="Release",
fields=[
(
"id",
models.AutoField(
verbose_name="ID",
serialize=False,
auto_created=True,
primary_key=True,
),
),
(
"pub_date",
models.DateField(verbose_name="publication date", db_index=True),
),
(
"fetched",
models.DateTimeField(auto_now_add=True, db_index=True),
),
(
"comic",
models.ForeignKey(to="core.Comic", on_delete=models.CASCADE),
),
(
"images",
models.ManyToManyField(related_name="releases", to="core.Image"),
),
],
options={
"db_table": "comics_release",
"get_latest_by": "pub_date",
},
bases=(models.Model,),
),
]
| agpl-3.0 |
pbrod/scipy | scipy/interpolate/tests/test_ndgriddata.py | 50 | 7259 | from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import (assert_equal, assert_array_equal, assert_allclose,
run_module_suite, assert_raises)
from scipy.interpolate import griddata, NearestNDInterpolator
class TestGriddata(object):
def test_fill_value(self):
x = [(0,0), (0,1), (1,0)]
y = [1, 2, 3]
yi = griddata(x, y, [(1,1), (1,2), (0,0)], fill_value=-1)
assert_array_equal(yi, [-1., -1, 1])
yi = griddata(x, y, [(1,1), (1,2), (0,0)])
assert_array_equal(yi, [np.nan, np.nan, 1])
def test_alternative_call(self):
x = np.array([(0,0), (-0.5,-0.5), (-0.5,0.5), (0.5, 0.5), (0.25, 0.3)],
dtype=np.double)
y = (np.arange(x.shape[0], dtype=np.double)[:,None]
+ np.array([0,1])[None,:])
for method in ('nearest', 'linear', 'cubic'):
for rescale in (True, False):
msg = repr((method, rescale))
yi = griddata((x[:,0], x[:,1]), y, (x[:,0], x[:,1]), method=method,
rescale=rescale)
assert_allclose(y, yi, atol=1e-14, err_msg=msg)
def test_multivalue_2d(self):
x = np.array([(0,0), (-0.5,-0.5), (-0.5,0.5), (0.5, 0.5), (0.25, 0.3)],
dtype=np.double)
y = (np.arange(x.shape[0], dtype=np.double)[:,None]
+ np.array([0,1])[None,:])
for method in ('nearest', 'linear', 'cubic'):
for rescale in (True, False):
msg = repr((method, rescale))
yi = griddata(x, y, x, method=method, rescale=rescale)
assert_allclose(y, yi, atol=1e-14, err_msg=msg)
def test_multipoint_2d(self):
x = np.array([(0,0), (-0.5,-0.5), (-0.5,0.5), (0.5, 0.5), (0.25, 0.3)],
dtype=np.double)
y = np.arange(x.shape[0], dtype=np.double)
xi = x[:,None,:] + np.array([0,0,0])[None,:,None]
for method in ('nearest', 'linear', 'cubic'):
for rescale in (True, False):
msg = repr((method, rescale))
yi = griddata(x, y, xi, method=method, rescale=rescale)
assert_equal(yi.shape, (5, 3), err_msg=msg)
assert_allclose(yi, np.tile(y[:,None], (1, 3)),
atol=1e-14, err_msg=msg)
def test_complex_2d(self):
x = np.array([(0,0), (-0.5,-0.5), (-0.5,0.5), (0.5, 0.5), (0.25, 0.3)],
dtype=np.double)
y = np.arange(x.shape[0], dtype=np.double)
y = y - 2j*y[::-1]
xi = x[:,None,:] + np.array([0,0,0])[None,:,None]
for method in ('nearest', 'linear', 'cubic'):
for rescale in (True, False):
msg = repr((method, rescale))
yi = griddata(x, y, xi, method=method, rescale=rescale)
assert_equal(yi.shape, (5, 3), err_msg=msg)
assert_allclose(yi, np.tile(y[:,None], (1, 3)),
atol=1e-14, err_msg=msg)
def test_1d(self):
x = np.array([1, 2.5, 3, 4.5, 5, 6])
y = np.array([1, 2, 0, 3.9, 2, 1])
for method in ('nearest', 'linear', 'cubic'):
assert_allclose(griddata(x, y, x, method=method), y,
err_msg=method, atol=1e-14)
assert_allclose(griddata(x.reshape(6, 1), y, x, method=method), y,
err_msg=method, atol=1e-14)
assert_allclose(griddata((x,), y, (x,), method=method), y,
err_msg=method, atol=1e-14)
def test_1d_borders(self):
# Test for nearest neighbor case with xi outside
# the range of the values.
x = np.array([1, 2.5, 3, 4.5, 5, 6])
y = np.array([1, 2, 0, 3.9, 2, 1])
xi = np.array([0.9, 6.5])
yi_should = np.array([1.0, 1.0])
method = 'nearest'
assert_allclose(griddata(x, y, xi,
method=method), yi_should,
err_msg=method,
atol=1e-14)
assert_allclose(griddata(x.reshape(6, 1), y, xi,
method=method), yi_should,
err_msg=method,
atol=1e-14)
assert_allclose(griddata((x, ), y, (xi, ),
method=method), yi_should,
err_msg=method,
atol=1e-14)
def test_1d_unsorted(self):
x = np.array([2.5, 1, 4.5, 5, 6, 3])
y = np.array([1, 2, 0, 3.9, 2, 1])
for method in ('nearest', 'linear', 'cubic'):
assert_allclose(griddata(x, y, x, method=method), y,
err_msg=method, atol=1e-10)
assert_allclose(griddata(x.reshape(6, 1), y, x, method=method), y,
err_msg=method, atol=1e-10)
assert_allclose(griddata((x,), y, (x,), method=method), y,
err_msg=method, atol=1e-10)
def test_square_rescale_manual(self):
points = np.array([(0,0), (0,100), (10,100), (10,0), (1, 5)], dtype=np.double)
points_rescaled = np.array([(0,0), (0,1), (1,1), (1,0), (0.1, 0.05)], dtype=np.double)
values = np.array([1., 2., -3., 5., 9.], dtype=np.double)
xx, yy = np.broadcast_arrays(np.linspace(0, 10, 14)[:,None],
np.linspace(0, 100, 14)[None,:])
xx = xx.ravel()
yy = yy.ravel()
xi = np.array([xx, yy]).T.copy()
for method in ('nearest', 'linear', 'cubic'):
msg = method
zi = griddata(points_rescaled, values, xi/np.array([10, 100.]),
method=method)
zi_rescaled = griddata(points, values, xi, method=method,
rescale=True)
assert_allclose(zi, zi_rescaled, err_msg=msg,
atol=1e-12)
def test_xi_1d(self):
# Check that 1-D xi is interpreted as a coordinate
x = np.array([(0,0), (-0.5,-0.5), (-0.5,0.5), (0.5, 0.5), (0.25, 0.3)],
dtype=np.double)
y = np.arange(x.shape[0], dtype=np.double)
y = y - 2j*y[::-1]
xi = np.array([0.5, 0.5])
for method in ('nearest', 'linear', 'cubic'):
p1 = griddata(x, y, xi, method=method)
p2 = griddata(x, y, xi[None,:], method=method)
assert_allclose(p1, p2, err_msg=method)
xi1 = np.array([0.5])
xi3 = np.array([0.5, 0.5, 0.5])
assert_raises(ValueError, griddata, x, y, xi1,
method=method)
assert_raises(ValueError, griddata, x, y, xi3,
method=method)
def test_nearest_options():
# smoke test that NearestNDInterpolator accept cKDTree options
npts, nd = 4, 3
x = np.arange(npts*nd).reshape((npts, nd))
y = np.arange(npts)
nndi = NearestNDInterpolator(x, y)
opts = {'balanced_tree': False, 'compact_nodes': False}
nndi_o = NearestNDInterpolator(x, y, tree_options=opts)
assert_allclose(nndi(x), nndi_o(x), atol=1e-14)
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause |
dcroc16/skunk_works | google_appengine/lib/yaml-3.10/yaml/resolver.py | 474 | 8972 |
__all__ = ['BaseResolver', 'Resolver']
from error import *
from nodes import *
import re
class ResolverError(YAMLError):
pass
class BaseResolver(object):
DEFAULT_SCALAR_TAG = u'tag:yaml.org,2002:str'
DEFAULT_SEQUENCE_TAG = u'tag:yaml.org,2002:seq'
DEFAULT_MAPPING_TAG = u'tag:yaml.org,2002:map'
yaml_implicit_resolvers = {}
yaml_path_resolvers = {}
def __init__(self):
self.resolver_exact_paths = []
self.resolver_prefix_paths = []
def add_implicit_resolver(cls, tag, regexp, first):
if not 'yaml_implicit_resolvers' in cls.__dict__:
cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy()
if first is None:
first = [None]
for ch in first:
cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
add_implicit_resolver = classmethod(add_implicit_resolver)
def add_path_resolver(cls, tag, path, kind=None):
# Note: `add_path_resolver` is experimental. The API could be changed.
# `new_path` is a pattern that is matched against the path from the
# root to the node that is being considered. `node_path` elements are
# tuples `(node_check, index_check)`. `node_check` is a node class:
# `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None`
# matches any kind of a node. `index_check` could be `None`, a boolean
# value, a string value, or a number. `None` and `False` match against
# any _value_ of sequence and mapping nodes. `True` matches against
# any _key_ of a mapping node. A string `index_check` matches against
# a mapping value that corresponds to a scalar key which content is
# equal to the `index_check` value. An integer `index_check` matches
# against a sequence value with the index equal to `index_check`.
if not 'yaml_path_resolvers' in cls.__dict__:
cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
new_path = []
for element in path:
if isinstance(element, (list, tuple)):
if len(element) == 2:
node_check, index_check = element
elif len(element) == 1:
node_check = element[0]
index_check = True
else:
raise ResolverError("Invalid path element: %s" % element)
else:
node_check = None
index_check = element
if node_check is str:
node_check = ScalarNode
elif node_check is list:
node_check = SequenceNode
elif node_check is dict:
node_check = MappingNode
elif node_check not in [ScalarNode, SequenceNode, MappingNode] \
and not isinstance(node_check, basestring) \
and node_check is not None:
raise ResolverError("Invalid node checker: %s" % node_check)
if not isinstance(index_check, (basestring, int)) \
and index_check is not None:
raise ResolverError("Invalid index checker: %s" % index_check)
new_path.append((node_check, index_check))
if kind is str:
kind = ScalarNode
elif kind is list:
kind = SequenceNode
elif kind is dict:
kind = MappingNode
elif kind not in [ScalarNode, SequenceNode, MappingNode] \
and kind is not None:
raise ResolverError("Invalid node kind: %s" % kind)
cls.yaml_path_resolvers[tuple(new_path), kind] = tag
add_path_resolver = classmethod(add_path_resolver)
def descend_resolver(self, current_node, current_index):
if not self.yaml_path_resolvers:
return
exact_paths = {}
prefix_paths = []
if current_node:
depth = len(self.resolver_prefix_paths)
for path, kind in self.resolver_prefix_paths[-1]:
if self.check_resolver_prefix(depth, path, kind,
current_node, current_index):
if len(path) > depth:
prefix_paths.append((path, kind))
else:
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
else:
for path, kind in self.yaml_path_resolvers:
if not path:
exact_paths[kind] = self.yaml_path_resolvers[path, kind]
else:
prefix_paths.append((path, kind))
self.resolver_exact_paths.append(exact_paths)
self.resolver_prefix_paths.append(prefix_paths)
def ascend_resolver(self):
if not self.yaml_path_resolvers:
return
self.resolver_exact_paths.pop()
self.resolver_prefix_paths.pop()
def check_resolver_prefix(self, depth, path, kind,
current_node, current_index):
node_check, index_check = path[depth-1]
if isinstance(node_check, basestring):
if current_node.tag != node_check:
return
elif node_check is not None:
if not isinstance(current_node, node_check):
return
if index_check is True and current_index is not None:
return
if (index_check is False or index_check is None) \
and current_index is None:
return
if isinstance(index_check, basestring):
if not (isinstance(current_index, ScalarNode)
and index_check == current_index.value):
return
elif isinstance(index_check, int) and not isinstance(index_check, bool):
if index_check != current_index:
return
return True
def resolve(self, kind, value, implicit):
if kind is ScalarNode and implicit[0]:
if value == u'':
resolvers = self.yaml_implicit_resolvers.get(u'', [])
else:
resolvers = self.yaml_implicit_resolvers.get(value[0], [])
resolvers += self.yaml_implicit_resolvers.get(None, [])
for tag, regexp in resolvers:
if regexp.match(value):
return tag
implicit = implicit[1]
if self.yaml_path_resolvers:
exact_paths = self.resolver_exact_paths[-1]
if kind in exact_paths:
return exact_paths[kind]
if None in exact_paths:
return exact_paths[None]
if kind is ScalarNode:
return self.DEFAULT_SCALAR_TAG
elif kind is SequenceNode:
return self.DEFAULT_SEQUENCE_TAG
elif kind is MappingNode:
return self.DEFAULT_MAPPING_TAG
class Resolver(BaseResolver):
pass
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:bool',
re.compile(ur'''^(?:yes|Yes|YES|no|No|NO
|true|True|TRUE|false|False|FALSE
|on|On|ON|off|Off|OFF)$''', re.X),
list(u'yYnNtTfFoO'))
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:float',
re.compile(ur'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)?
|\.[0-9_]+(?:[eE][-+][0-9]+)?
|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*
|[-+]?\.(?:inf|Inf|INF)
|\.(?:nan|NaN|NAN))$''', re.X),
list(u'-+0123456789.'))
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:int',
re.compile(ur'''^(?:[-+]?0b[0-1_]+
|[-+]?0[0-7_]+
|[-+]?(?:0|[1-9][0-9_]*)
|[-+]?0x[0-9a-fA-F_]+
|[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X),
list(u'-+0123456789'))
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:merge',
re.compile(ur'^(?:<<)$'),
[u'<'])
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:null',
re.compile(ur'''^(?: ~
|null|Null|NULL
| )$''', re.X),
[u'~', u'n', u'N', u''])
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:timestamp',
re.compile(ur'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
|[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
(?:[Tt]|[ \t]+)[0-9][0-9]?
:[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)?
(?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
list(u'0123456789'))
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:value',
re.compile(ur'^(?:=)$'),
[u'='])
# The following resolver is only for documentation purposes. It cannot work
# because plain scalars cannot start with '!', '&', or '*'.
Resolver.add_implicit_resolver(
u'tag:yaml.org,2002:yaml',
re.compile(ur'^(?:!|&|\*)$'),
list(u'!&*'))
| mit |
matteoalessiocarrara/lib-figafind | src/filter_rules.py | 2 | 2575 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2016 Matteo Alessio Carrara <sw.matteoac@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
""" Regole per il filtro lib.fbfilter.src.fbfilter.FbFilter """
import logging
from lib.fbfilter.src import filter_components
import version
# Configurazione del sistema di logging
logger = logging.getLogger(version.lib_name)
logger.addHandler(logging.NullHandler())
# TODO Controllare versione librerie
class FigaFind(filter_components.FilterRules):
def required_property_custom_test(self, profile):
# Tenere docstring sincronizzata con quella di
# filter_components.FilterRules.required_property_custom_test()
"""
Controlla se il profilo soddisfa i requisiti richiesti, con dei test
personalizzati
Restituisce True o False
Parametri:
profile: lib.htmlfbapi.fbobj.Profile
Il profilo da controllare
"""
try:
ok = (profile.gender['Italiano'] == "Donna")
except KeyError:
ok = False
logger.warning("Impossibile valutare il genere di %s, perché '%s' non ha una traduzione in Italiano", profile.nickname, profile.gender[profile.fbobj.my_profile.lang])
return ok
class CazzoFind(filter_components.FilterRules):
def required_property_custom_test(self, profile):
# Tenere docstring sincronizzata con quella di
# filter_components.FilterRules.required_property_custom_test()
"""
Controlla se il profilo soddisfa i requisiti richiesti, con dei test
personalizzati
Restituisce True o False
Parametri:
profile: lib.htmlfbapi.fbobj.Profile
Il profilo da controllare
"""
try:
ok = (profile.gender['Italiano'] == "Uomo")
except KeyError:
ok = False
logger.warning("Impossibile valutare il genere di %s, perché '%s' non ha una traduzione in Italiano", profile.nickname, profile.gender[profile.fbobj.my_profile.lang])
return ok
| gpl-3.0 |
jjscarafia/odoo | addons/hr_recruitment/wizard/__init__.py | 381 | 1095 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-Today OpenERP (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_recruitment_create_partner_job
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
2013Commons/HUE-SHARK | build/env/lib/python2.7/site-packages/Django-1.2.3-py2.7.egg/django/db/models/fields/related.py | 6 | 54548 | from django.conf import settings
from django.db import connection, router, transaction
from django.db.backends import util
from django.db.models import signals, get_model
from django.db.models.fields import (AutoField, Field, IntegerField,
PositiveIntegerField, PositiveSmallIntegerField, FieldDoesNotExist)
from django.db.models.related import RelatedObject
from django.db.models.query import QuerySet
from django.db.models.query_utils import QueryWrapper
from django.utils.encoding import smart_unicode
from django.utils.translation import ugettext_lazy as _, string_concat, ungettext, ugettext
from django.utils.functional import curry
from django.core import exceptions
from django import forms
RECURSIVE_RELATIONSHIP_CONSTANT = 'self'
pending_lookups = {}
def add_lazy_relation(cls, field, relation, operation):
"""
Adds a lookup on ``cls`` when a related field is defined using a string,
i.e.::
class MyModel(Model):
fk = ForeignKey("AnotherModel")
This string can be:
* RECURSIVE_RELATIONSHIP_CONSTANT (i.e. "self") to indicate a recursive
relation.
* The name of a model (i.e "AnotherModel") to indicate another model in
the same app.
* An app-label and model name (i.e. "someapp.AnotherModel") to indicate
another model in a different app.
If the other model hasn't yet been loaded -- almost a given if you're using
lazy relationships -- then the relation won't be set up until the
class_prepared signal fires at the end of model initialization.
operation is the work that must be performed once the relation can be resolved.
"""
# Check for recursive relations
if relation == RECURSIVE_RELATIONSHIP_CONSTANT:
app_label = cls._meta.app_label
model_name = cls.__name__
else:
# Look for an "app.Model" relation
try:
app_label, model_name = relation.split(".")
except ValueError:
# If we can't split, assume a model in current app
app_label = cls._meta.app_label
model_name = relation
except AttributeError:
# If it doesn't have a split it's actually a model class
app_label = relation._meta.app_label
model_name = relation._meta.object_name
# Try to look up the related model, and if it's already loaded resolve the
# string right away. If get_model returns None, it means that the related
# model isn't loaded yet, so we need to pend the relation until the class
# is prepared.
model = get_model(app_label, model_name, False)
if model:
operation(field, model, cls)
else:
key = (app_label, model_name)
value = (cls, field, operation)
pending_lookups.setdefault(key, []).append(value)
def do_pending_lookups(sender, **kwargs):
"""
Handle any pending relations to the sending model. Sent from class_prepared.
"""
key = (sender._meta.app_label, sender.__name__)
for cls, field, operation in pending_lookups.pop(key, []):
operation(field, sender, cls)
signals.class_prepared.connect(do_pending_lookups)
#HACK
class RelatedField(object):
def contribute_to_class(self, cls, name):
sup = super(RelatedField, self)
# Store the opts for related_query_name()
self.opts = cls._meta
if hasattr(sup, 'contribute_to_class'):
sup.contribute_to_class(cls, name)
if not cls._meta.abstract and self.rel.related_name:
self.rel.related_name = self.rel.related_name % {
'class': cls.__name__.lower(),
'app_label': cls._meta.app_label.lower(),
}
other = self.rel.to
if isinstance(other, basestring) or other._meta.pk is None:
def resolve_related_class(field, model, cls):
field.rel.to = model
field.do_related_class(model, cls)
add_lazy_relation(cls, self, other, resolve_related_class)
else:
self.do_related_class(other, cls)
def set_attributes_from_rel(self):
self.name = self.name or (self.rel.to._meta.object_name.lower() + '_' + self.rel.to._meta.pk.name)
if self.verbose_name is None:
self.verbose_name = self.rel.to._meta.verbose_name
self.rel.field_name = self.rel.field_name or self.rel.to._meta.pk.name
def do_related_class(self, other, cls):
self.set_attributes_from_rel()
self.related = RelatedObject(other, cls, self)
if not cls._meta.abstract:
self.contribute_to_related_class(other, self.related)
def get_prep_lookup(self, lookup_type, value):
if hasattr(value, 'prepare'):
return value.prepare()
if hasattr(value, '_prepare'):
return value._prepare()
# FIXME: lt and gt are explicitly allowed to make
# get_(next/prev)_by_date work; other lookups are not allowed since that
# gets messy pretty quick. This is a good candidate for some refactoring
# in the future.
if lookup_type in ['exact', 'gt', 'lt', 'gte', 'lte']:
return self._pk_trace(value, 'get_prep_lookup', lookup_type)
if lookup_type in ('range', 'in'):
return [self._pk_trace(v, 'get_prep_lookup', lookup_type) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Related Field has invalid lookup: %s" % lookup_type)
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
if not prepared:
value = self.get_prep_lookup(lookup_type, value)
if hasattr(value, 'get_compiler'):
value = value.get_compiler(connection=connection)
if hasattr(value, 'as_sql') or hasattr(value, '_as_sql'):
# If the value has a relabel_aliases method, it will need to
# be invoked before the final SQL is evaluated
if hasattr(value, 'relabel_aliases'):
return value
if hasattr(value, 'as_sql'):
sql, params = value.as_sql()
else:
sql, params = value._as_sql(connection=connection)
return QueryWrapper(('(%s)' % sql), params)
# FIXME: lt and gt are explicitly allowed to make
# get_(next/prev)_by_date work; other lookups are not allowed since that
# gets messy pretty quick. This is a good candidate for some refactoring
# in the future.
if lookup_type in ['exact', 'gt', 'lt', 'gte', 'lte']:
return [self._pk_trace(value, 'get_db_prep_lookup', lookup_type,
connection=connection, prepared=prepared)]
if lookup_type in ('range', 'in'):
return [self._pk_trace(v, 'get_db_prep_lookup', lookup_type,
connection=connection, prepared=prepared)
for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Related Field has invalid lookup: %s" % lookup_type)
def _pk_trace(self, value, prep_func, lookup_type, **kwargs):
# Value may be a primary key, or an object held in a relation.
# If it is an object, then we need to get the primary key value for
# that object. In certain conditions (especially one-to-one relations),
# the primary key may itself be an object - so we need to keep drilling
# down until we hit a value that can be used for a comparison.
v = value
try:
while True:
v = getattr(v, v._meta.pk.name)
except AttributeError:
pass
except exceptions.ObjectDoesNotExist:
v = None
field = self
while field.rel:
if hasattr(field.rel, 'field_name'):
field = field.rel.to._meta.get_field(field.rel.field_name)
else:
field = field.rel.to._meta.pk
if lookup_type in ('range', 'in'):
v = [v]
v = getattr(field, prep_func)(lookup_type, v, **kwargs)
if isinstance(v, list):
v = v[0]
return v
def related_query_name(self):
# This method defines the name that can be used to identify this
# related object in a table-spanning query. It uses the lower-cased
# object_name by default, but this can be overridden with the
# "related_name" option.
return self.rel.related_name or self.opts.object_name.lower()
class SingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class pointed to by a related field.
# In the example "place.restaurant", the restaurant attribute is a
# SingleRelatedObjectDescriptor instance.
def __init__(self, related):
self.related = related
self.cache_name = related.get_cache_name()
def __get__(self, instance, instance_type=None):
if instance is None:
return self
try:
return getattr(instance, self.cache_name)
except AttributeError:
params = {'%s__pk' % self.related.field.name: instance._get_pk_val()}
db = router.db_for_read(self.related.model, instance=instance)
rel_obj = self.related.model._base_manager.using(db).get(**params)
setattr(instance, self.cache_name, rel_obj)
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError("%s must be accessed via instance" % self.related.opts.object_name)
# The similarity of the code below to the code in
# ReverseSingleRelatedObjectDescriptor is annoying, but there's a bunch
# of small differences that would make a common base class convoluted.
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.related.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.related.get_accessor_name()))
elif value is not None and not isinstance(value, self.related.model):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.related.get_accessor_name(), self.related.opts.object_name))
elif value is not None:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": instance is on database "%s", value is on database "%s"' %
(value, instance._state.db, value._state.db))
# Set the value of the related field to the value of the related object's related field
setattr(value, self.related.field.attname, getattr(instance, self.related.field.rel.get_related_field().attname))
# Since we already know what the related object is, seed the related
# object caches now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.cache_name, value)
setattr(value, self.related.field.get_cache_name(), instance)
class ReverseSingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class that defines the related field.
# In the example "choice.poll", the poll attribute is a
# ReverseSingleRelatedObjectDescriptor instance.
def __init__(self, field_with_rel):
self.field = field_with_rel
def __get__(self, instance, instance_type=None):
if instance is None:
return self
cache_name = self.field.get_cache_name()
try:
return getattr(instance, cache_name)
except AttributeError:
val = getattr(instance, self.field.attname)
if val is None:
# If NULL is an allowed value, return it.
if self.field.null:
return None
raise self.field.rel.to.DoesNotExist
other_field = self.field.rel.get_related_field()
if other_field.rel:
params = {'%s__pk' % self.field.rel.field_name: val}
else:
params = {'%s__exact' % self.field.rel.field_name: val}
# If the related manager indicates that it should be used for
# related fields, respect that.
rel_mgr = self.field.rel.to._default_manager
db = router.db_for_read(self.field.rel.to, instance=instance)
if getattr(rel_mgr, 'use_for_related_fields', False):
rel_obj = rel_mgr.using(db).get(**params)
else:
rel_obj = QuerySet(self.field.rel.to).using(db).get(**params)
setattr(instance, cache_name, rel_obj)
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError("%s must be accessed via instance" % self._field.name)
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.field.name))
elif value is not None and not isinstance(value, self.field.rel.to):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.field.name, self.field.rel.to._meta.object_name))
elif value is not None:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": instance is on database "%s", value is on database "%s"' %
(value, instance._state.db, value._state.db))
# If we're setting the value of a OneToOneField to None, we need to clear
# out the cache on any old related object. Otherwise, deleting the
# previously-related object will also cause this object to be deleted,
# which is wrong.
if value is None:
# Look up the previously-related object, which may still be available
# since we've not yet cleared out the related field.
# Use the cache directly, instead of the accessor; if we haven't
# populated the cache, then we don't care - we're only accessing
# the object to invalidate the accessor cache, so there's no
# need to populate the cache just to expire it again.
related = getattr(instance, self.field.get_cache_name(), None)
# If we've got an old related object, we need to clear out its
# cache. This cache also might not exist if the related object
# hasn't been accessed yet.
if related:
cache_name = self.field.related.get_cache_name()
try:
delattr(related, cache_name)
except AttributeError:
pass
# Set the value of the related field
try:
val = getattr(value, self.field.rel.get_related_field().attname)
except AttributeError:
val = None
setattr(instance, self.field.attname, val)
# Since we already know what the related object is, seed the related
# object cache now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.field.get_cache_name(), value)
class ForeignRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ForeignKey pointed at them by
# some other model. In the example "poll.choice_set", the choice_set
# attribute is a ForeignRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
def __get__(self, instance, instance_type=None):
if instance is None:
return self
return self.create_manager(instance,
self.related.model._default_manager.__class__)
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
manager = self.__get__(instance)
# If the foreign key can support nulls, then completely clear the related set.
# Otherwise, just move the named objects into the set.
if self.related.field.null:
manager.clear()
manager.add(*value)
def delete_manager(self, instance):
"""
Returns a queryset based on the related model's base manager (rather
than the default manager, as returned by __get__). Used by
Model.delete().
"""
return self.create_manager(instance,
self.related.model._base_manager.__class__)
def create_manager(self, instance, superclass):
"""
Creates the managers used by other methods (__get__() and delete()).
"""
rel_field = self.related.field
rel_model = self.related.model
class RelatedManager(superclass):
def get_query_set(self):
db = self._db or router.db_for_read(rel_model, instance=instance)
return superclass.get_query_set(self).using(db).filter(**(self.core_filters))
def add(self, *objs):
for obj in objs:
if not isinstance(obj, self.model):
raise TypeError("'%s' instance expected" % self.model._meta.object_name)
setattr(obj, rel_field.name, instance)
obj.save()
add.alters_data = True
def create(self, **kwargs):
kwargs.update({rel_field.name: instance})
db = router.db_for_write(rel_model, instance=instance)
return super(RelatedManager, self).using(db).create(**kwargs)
create.alters_data = True
def get_or_create(self, **kwargs):
# Update kwargs with the related object that this
# ForeignRelatedObjectsDescriptor knows about.
kwargs.update({rel_field.name: instance})
db = router.db_for_write(rel_model, instance=instance)
return super(RelatedManager, self).using(db).get_or_create(**kwargs)
get_or_create.alters_data = True
# remove() and clear() are only provided if the ForeignKey can have a value of null.
if rel_field.null:
def remove(self, *objs):
val = getattr(instance, rel_field.rel.get_related_field().attname)
for obj in objs:
# Is obj actually part of this descriptor set?
if getattr(obj, rel_field.attname) == val:
setattr(obj, rel_field.name, None)
obj.save()
else:
raise rel_field.rel.to.DoesNotExist("%r is not related to %r." % (obj, instance))
remove.alters_data = True
def clear(self):
for obj in self.all():
setattr(obj, rel_field.name, None)
obj.save()
clear.alters_data = True
manager = RelatedManager()
attname = rel_field.rel.get_related_field().name
manager.core_filters = {'%s__%s' % (rel_field.name, attname):
getattr(instance, attname)}
manager.model = self.related.model
return manager
def create_many_related_manager(superclass, rel=False):
"""Creates a manager that subclasses 'superclass' (which is a Manager)
and adds behavior for many-to-many related objects."""
through = rel.through
class ManyRelatedManager(superclass):
def __init__(self, model=None, core_filters=None, instance=None, symmetrical=None,
join_table=None, source_field_name=None, target_field_name=None,
reverse=False):
super(ManyRelatedManager, self).__init__()
self.core_filters = core_filters
self.model = model
self.symmetrical = symmetrical
self.instance = instance
self.source_field_name = source_field_name
self.target_field_name = target_field_name
self.through = through
self._pk_val = self.instance.pk
self.reverse = reverse
if self._pk_val is None:
raise ValueError("%r instance needs to have a primary key value before a many-to-many relationship can be used." % instance.__class__.__name__)
def get_query_set(self):
db = self._db or router.db_for_read(self.instance.__class__, instance=self.instance)
return superclass.get_query_set(self).using(db)._next_is_sticky().filter(**(self.core_filters))
# If the ManyToMany relation has an intermediary model,
# the add and remove methods do not exist.
if rel.through._meta.auto_created:
def add(self, *objs):
self._add_items(self.source_field_name, self.target_field_name, *objs)
# If this is a symmetrical m2m relation to self, add the mirror entry in the m2m table
if self.symmetrical:
self._add_items(self.target_field_name, self.source_field_name, *objs)
add.alters_data = True
def remove(self, *objs):
self._remove_items(self.source_field_name, self.target_field_name, *objs)
# If this is a symmetrical m2m relation to self, remove the mirror entry in the m2m table
if self.symmetrical:
self._remove_items(self.target_field_name, self.source_field_name, *objs)
remove.alters_data = True
def clear(self):
self._clear_items(self.source_field_name)
# If this is a symmetrical m2m relation to self, clear the mirror entry in the m2m table
if self.symmetrical:
self._clear_items(self.target_field_name)
clear.alters_data = True
def create(self, **kwargs):
# This check needs to be done here, since we can't later remove this
# from the method lookup table, as we do with add and remove.
if not rel.through._meta.auto_created:
opts = through._meta
raise AttributeError("Cannot use create() on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
db = router.db_for_write(self.instance.__class__, instance=self.instance)
new_obj = super(ManyRelatedManager, self).using(db).create(**kwargs)
self.add(new_obj)
return new_obj
create.alters_data = True
def get_or_create(self, **kwargs):
db = router.db_for_write(self.instance.__class__, instance=self.instance)
obj, created = \
super(ManyRelatedManager, self).using(db).get_or_create(**kwargs)
# We only need to add() if created because if we got an object back
# from get() then the relationship already exists.
if created:
self.add(obj)
return obj, created
get_or_create.alters_data = True
def _add_items(self, source_field_name, target_field_name, *objs):
# join_table: name of the m2m link table
# source_field_name: the PK fieldname in join_table for the source object
# target_field_name: the PK fieldname in join_table for the target object
# *objs - objects to add. Either object instances, or primary keys of object instances.
# If there aren't any objects, there is nothing to do.
from django.db.models import Model
if objs:
new_ids = set()
for obj in objs:
if isinstance(obj, self.model):
if not router.allow_relation(obj, self.instance):
raise ValueError('Cannot add "%r": instance is on database "%s", value is on database "%s"' %
(obj, self.instance._state.db, obj._state.db))
new_ids.add(obj.pk)
elif isinstance(obj, Model):
raise TypeError("'%s' instance expected" % self.model._meta.object_name)
else:
new_ids.add(obj)
db = router.db_for_write(self.through.__class__, instance=self.instance)
vals = self.through._default_manager.using(db).values_list(target_field_name, flat=True)
vals = vals.filter(**{
source_field_name: self._pk_val,
'%s__in' % target_field_name: new_ids,
})
new_ids = new_ids - set(vals)
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action='pre_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids)
# Add the ones that aren't there already
for obj_id in new_ids:
self.through._default_manager.using(db).create(**{
'%s_id' % source_field_name: self._pk_val,
'%s_id' % target_field_name: obj_id,
})
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action='post_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids)
def _remove_items(self, source_field_name, target_field_name, *objs):
# source_col_name: the PK colname in join_table for the source object
# target_col_name: the PK colname in join_table for the target object
# *objs - objects to remove
# If there aren't any objects, there is nothing to do.
if objs:
# Check that all the objects are of the right type
old_ids = set()
for obj in objs:
if isinstance(obj, self.model):
old_ids.add(obj.pk)
else:
old_ids.add(obj)
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are deleting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action="pre_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids)
# Remove the specified objects from the join table
db = router.db_for_write(self.through.__class__, instance=self.instance)
self.through._default_manager.using(db).filter(**{
source_field_name: self._pk_val,
'%s__in' % target_field_name: old_ids
}).delete()
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are deleting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action="post_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids)
def _clear_items(self, source_field_name):
# source_col_name: the PK colname in join_table for the source object
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are clearing the
# duplicate data rows for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action="pre_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None)
db = router.db_for_write(self.through.__class__, instance=self.instance)
self.through._default_manager.using(db).filter(**{
source_field_name: self._pk_val
}).delete()
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are clearing the
# duplicate data rows for symmetrical reverse entries.
signals.m2m_changed.send(sender=rel.through, action="post_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None)
return ManyRelatedManager
class ManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField pointed at them by
# some other model (rather than having a ManyToManyField themselves).
# In the example "publication.article_set", the article_set attribute is a
# ManyRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
def __get__(self, instance, instance_type=None):
if instance is None:
return self
# Dynamically create a class that subclasses the related
# model's default manager.
rel_model = self.related.model
superclass = rel_model._default_manager.__class__
RelatedManager = create_many_related_manager(superclass, self.related.field.rel)
manager = RelatedManager(
model=rel_model,
core_filters={'%s__pk' % self.related.field.name: instance._get_pk_val()},
instance=instance,
symmetrical=False,
source_field_name=self.related.field.m2m_reverse_field_name(),
target_field_name=self.related.field.m2m_field_name(),
reverse=True
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
if not self.related.field.rel.through._meta.auto_created:
opts = self.related.field.rel.through._meta
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ReverseManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField defined in their
# model (rather than having another model pointed *at* them).
# In the example "article.publications", the publications attribute is a
# ReverseManyRelatedObjectsDescriptor instance.
def __init__(self, m2m_field):
self.field = m2m_field
def _through(self):
# through is provided so that you have easy access to the through
# model (Book.authors.through) for inlines, etc. This is done as
# a property to ensure that the fully resolved value is returned.
return self.field.rel.through
through = property(_through)
def __get__(self, instance, instance_type=None):
if instance is None:
return self
# Dynamically create a class that subclasses the related
# model's default manager.
rel_model=self.field.rel.to
superclass = rel_model._default_manager.__class__
RelatedManager = create_many_related_manager(superclass, self.field.rel)
manager = RelatedManager(
model=rel_model,
core_filters={'%s__pk' % self.field.related_query_name(): instance._get_pk_val()},
instance=instance,
symmetrical=self.field.rel.symmetrical,
source_field_name=self.field.m2m_field_name(),
target_field_name=self.field.m2m_reverse_field_name(),
reverse=False
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
if not self.field.rel.through._meta.auto_created:
opts = self.field.rel.through._meta
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ManyToOneRel(object):
def __init__(self, to, field_name, related_name=None,
limit_choices_to=None, lookup_overrides=None, parent_link=False):
try:
to._meta
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, basestring), "'to' must be either a model, a model name or the string %r" % RECURSIVE_RELATIONSHIP_CONSTANT
self.to, self.field_name = to, field_name
self.related_name = related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.lookup_overrides = lookup_overrides or {}
self.multiple = True
self.parent_link = parent_link
def is_hidden(self):
"Should the related object be hidden?"
return self.related_name and self.related_name[-1] == '+'
def get_related_field(self):
"""
Returns the Field in the 'to' object to which this relationship is
tied.
"""
data = self.to._meta.get_field_by_name(self.field_name)
if not data[2]:
raise FieldDoesNotExist("No related field named '%s'" %
self.field_name)
return data[0]
class OneToOneRel(ManyToOneRel):
def __init__(self, to, field_name, related_name=None,
limit_choices_to=None, lookup_overrides=None, parent_link=False):
super(OneToOneRel, self).__init__(to, field_name,
related_name=related_name, limit_choices_to=limit_choices_to,
lookup_overrides=lookup_overrides, parent_link=parent_link)
self.multiple = False
class ManyToManyRel(object):
def __init__(self, to, related_name=None, limit_choices_to=None,
symmetrical=True, through=None):
self.to = to
self.related_name = related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.symmetrical = symmetrical
self.multiple = True
self.through = through
def is_hidden(self):
"Should the related object be hidden?"
return self.related_name and self.related_name[-1] == '+'
def get_related_field(self):
"""
Returns the field in the to' object to which this relationship is tied
(this is always the primary key on the target model). Provided for
symmetry with ManyToOneRel.
"""
return self.to._meta.pk
class ForeignKey(RelatedField, Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _('Model %(model)s with pk %(pk)r does not exist.')
}
description = _("Foreign Key (type determined by related field)")
def __init__(self, to, to_field=None, rel_class=ManyToOneRel, **kwargs):
try:
to_name = to._meta.object_name.lower()
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, basestring), "%s(%r) is invalid. First parameter to ForeignKey must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
else:
assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
# For backwards compatibility purposes, we need to *try* and set
# the to_field during FK construction. It won't be guaranteed to
# be correct until contribute_to_class is called. Refs #12190.
to_field = to_field or (to._meta.pk and to._meta.pk.name)
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
if 'db_index' not in kwargs:
kwargs['db_index'] = True
kwargs['rel'] = rel_class(to, to_field,
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
lookup_overrides=kwargs.pop('lookup_overrides', None),
parent_link=kwargs.pop('parent_link', False))
Field.__init__(self, **kwargs)
def validate(self, value, model_instance):
if self.rel.parent_link:
return
super(ForeignKey, self).validate(value, model_instance)
if value is None:
return
qs = self.rel.to._default_manager.filter(**{self.rel.field_name:value})
qs = qs.complex_filter(self.rel.limit_choices_to)
if not qs.exists():
raise exceptions.ValidationError(self.error_messages['invalid'] % {
'model': self.rel.to._meta.verbose_name, 'pk': value})
def get_attname(self):
return '%s_id' % self.name
def get_validator_unique_lookup_type(self):
return '%s__%s__exact' % (self.name, self.rel.get_related_field().name)
def get_default(self):
"Here we check if the default value is an object and return the to_field if so."
field_default = super(ForeignKey, self).get_default()
if isinstance(field_default, self.rel.to):
return getattr(field_default, self.rel.get_related_field().attname)
return field_default
def get_db_prep_save(self, value, connection):
if value == '' or value == None:
return None
else:
return self.rel.get_related_field().get_db_prep_save(value,
connection=connection)
def value_to_string(self, obj):
if not obj:
# In required many-to-one fields with only one available choice,
# select that one available choice. Note: For SelectFields
# we have to check that the length of choices is *2*, not 1,
# because SelectFields always have an initial "blank" value.
if not self.blank and self.choices:
choice_list = self.get_choices_default()
if len(choice_list) == 2:
return smart_unicode(choice_list[1][0])
return Field.value_to_string(self, obj)
def contribute_to_class(self, cls, name):
super(ForeignKey, self).contribute_to_class(cls, name)
setattr(cls, self.name, ReverseSingleRelatedObjectDescriptor(self))
if isinstance(self.rel.to, basestring):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "o2m")
def contribute_to_related_class(self, cls, related):
# Internal FK's - i.e., those with a related name ending with '+' -
# don't get a related descriptor.
if not self.rel.is_hidden():
setattr(cls, related.get_accessor_name(), ForeignRelatedObjectsDescriptor(related))
if self.rel.field_name is None:
self.rel.field_name = cls._meta.pk.name
def formfield(self, **kwargs):
db = kwargs.pop('using', None)
defaults = {
'form_class': forms.ModelChoiceField,
'queryset': self.rel.to._default_manager.using(db).complex_filter(self.rel.limit_choices_to),
'to_field_name': self.rel.field_name,
}
defaults.update(kwargs)
return super(ForeignKey, self).formfield(**defaults)
def db_type(self, connection):
# The database column type of a ForeignKey is the column type
# of the field to which it points. An exception is if the ForeignKey
# points to an AutoField/PositiveIntegerField/PositiveSmallIntegerField,
# in which case the column type is simply that of an IntegerField.
# If the database needs similar types for key fields however, the only
# thing we can do is making AutoField an IntegerField.
rel_field = self.rel.get_related_field()
if (isinstance(rel_field, AutoField) or
(not connection.features.related_fields_match_type and
isinstance(rel_field, (PositiveIntegerField,
PositiveSmallIntegerField)))):
return IntegerField().db_type(connection=connection)
return rel_field.db_type(connection=connection)
class OneToOneField(ForeignKey):
"""
A OneToOneField is essentially the same as a ForeignKey, with the exception
that always carries a "unique" constraint with it and the reverse relation
always returns the object pointed to (since there will only ever be one),
rather than returning a list.
"""
description = _("One-to-one relationship")
def __init__(self, to, to_field=None, **kwargs):
kwargs['unique'] = True
super(OneToOneField, self).__init__(to, to_field, OneToOneRel, **kwargs)
def contribute_to_related_class(self, cls, related):
setattr(cls, related.get_accessor_name(),
SingleRelatedObjectDescriptor(related))
def formfield(self, **kwargs):
if self.rel.parent_link:
return None
return super(OneToOneField, self).formfield(**kwargs)
def save_form_data(self, instance, data):
if isinstance(data, self.rel.to):
setattr(instance, self.name, data)
else:
setattr(instance, self.attname, data)
def create_many_to_many_intermediary_model(field, klass):
from django.db import models
managed = True
if isinstance(field.rel.to, basestring) and field.rel.to != RECURSIVE_RELATIONSHIP_CONSTANT:
to_model = field.rel.to
to = to_model.split('.')[-1]
def set_managed(field, model, cls):
field.rel.through._meta.managed = model._meta.managed or cls._meta.managed
add_lazy_relation(klass, field, to_model, set_managed)
elif isinstance(field.rel.to, basestring):
to = klass._meta.object_name
to_model = klass
managed = klass._meta.managed
else:
to = field.rel.to._meta.object_name
to_model = field.rel.to
managed = klass._meta.managed or to_model._meta.managed
name = '%s_%s' % (klass._meta.object_name, field.name)
if field.rel.to == RECURSIVE_RELATIONSHIP_CONSTANT or to == klass._meta.object_name:
from_ = 'from_%s' % to.lower()
to = 'to_%s' % to.lower()
else:
from_ = klass._meta.object_name.lower()
to = to.lower()
meta = type('Meta', (object,), {
'db_table': field._get_m2m_db_table(klass._meta),
'managed': managed,
'auto_created': klass,
'app_label': klass._meta.app_label,
'unique_together': (from_, to),
'verbose_name': '%(from)s-%(to)s relationship' % {'from': from_, 'to': to},
'verbose_name_plural': '%(from)s-%(to)s relationships' % {'from': from_, 'to': to},
})
# Construct and return the new class.
return type(name, (models.Model,), {
'Meta': meta,
'__module__': klass.__module__,
from_: models.ForeignKey(klass, related_name='%s+' % name),
to: models.ForeignKey(to_model, related_name='%s+' % name)
})
class ManyToManyField(RelatedField, Field):
description = _("Many-to-many relationship")
def __init__(self, to, **kwargs):
try:
assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, basestring), "%s(%r) is invalid. First parameter to ManyToManyField must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
kwargs['rel'] = ManyToManyRel(to,
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
symmetrical=kwargs.pop('symmetrical', to==RECURSIVE_RELATIONSHIP_CONSTANT),
through=kwargs.pop('through', None))
self.db_table = kwargs.pop('db_table', None)
if kwargs['rel'].through is not None:
assert self.db_table is None, "Cannot specify a db_table if an intermediary model is used."
Field.__init__(self, **kwargs)
msg = _('Hold down "Control", or "Command" on a Mac, to select more than one.')
self.help_text = string_concat(self.help_text, ' ', msg)
def get_choices_default(self):
return Field.get_choices(self, include_blank=False)
def _get_m2m_db_table(self, opts):
"Function that can be curried to provide the m2m table name for this relation"
if self.rel.through is not None:
return self.rel.through._meta.db_table
elif self.db_table:
return self.db_table
else:
return util.truncate_name('%s_%s' % (opts.db_table, self.name),
connection.ops.max_name_length())
def _get_m2m_attr(self, related, attr):
"Function that can be curried to provide the source accessor or DB column name for the m2m table"
cache_attr = '_m2m_%s_cache' % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
for f in self.rel.through._meta.fields:
if hasattr(f,'rel') and f.rel and f.rel.to == related.model:
setattr(self, cache_attr, getattr(f, attr))
return getattr(self, cache_attr)
def _get_m2m_reverse_attr(self, related, attr):
"Function that can be curried to provide the related accessor or DB column name for the m2m table"
cache_attr = '_m2m_reverse_%s_cache' % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
found = False
for f in self.rel.through._meta.fields:
if hasattr(f,'rel') and f.rel and f.rel.to == related.parent_model:
if related.model == related.parent_model:
# If this is an m2m-intermediate to self,
# the first foreign key you find will be
# the source column. Keep searching for
# the second foreign key.
if found:
setattr(self, cache_attr, getattr(f, attr))
break
else:
found = True
else:
setattr(self, cache_attr, getattr(f, attr))
break
return getattr(self, cache_attr)
def isValidIDList(self, field_data, all_data):
"Validates that the value is a valid list of foreign keys"
mod = self.rel.to
try:
pks = map(int, field_data.split(','))
except ValueError:
# the CommaSeparatedIntegerField validator will catch this error
return
objects = mod._default_manager.in_bulk(pks)
if len(objects) != len(pks):
badkeys = [k for k in pks if k not in objects]
raise exceptions.ValidationError(
ungettext("Please enter valid %(self)s IDs. The value %(value)r is invalid.",
"Please enter valid %(self)s IDs. The values %(value)r are invalid.",
len(badkeys)) % {
'self': self.verbose_name,
'value': len(badkeys) == 1 and badkeys[0] or tuple(badkeys),
})
def value_to_string(self, obj):
data = ''
if obj:
qs = getattr(obj, self.name).all()
data = [instance._get_pk_val() for instance in qs]
else:
# In required many-to-many fields with only one available choice,
# select that one available choice.
if not self.blank:
choices_list = self.get_choices_default()
if len(choices_list) == 1:
data = [choices_list[0][0]]
return smart_unicode(data)
def contribute_to_class(self, cls, name):
# To support multiple relations to self, it's useful to have a non-None
# related name on symmetrical relations for internal reasons. The
# concept doesn't make a lot of sense externally ("you want me to
# specify *what* on my non-reversible relation?!"), so we set it up
# automatically. The funky name reduces the chance of an accidental
# clash.
if self.rel.symmetrical and (self.rel.to == "self" or self.rel.to == cls._meta.object_name):
self.rel.related_name = "%s_rel_+" % name
super(ManyToManyField, self).contribute_to_class(cls, name)
# The intermediate m2m model is not auto created if:
# 1) There is a manually specified intermediate, or
# 2) The class owning the m2m field is abstract.
if not self.rel.through and not cls._meta.abstract:
self.rel.through = create_many_to_many_intermediary_model(self, cls)
# Add the descriptor for the m2m relation
setattr(cls, self.name, ReverseManyRelatedObjectsDescriptor(self))
# Set up the accessor for the m2m table name for the relation
self.m2m_db_table = curry(self._get_m2m_db_table, cls._meta)
# Populate some necessary rel arguments so that cross-app relations
# work correctly.
if isinstance(self.rel.through, basestring):
def resolve_through_model(field, model, cls):
field.rel.through = model
add_lazy_relation(cls, self, self.rel.through, resolve_through_model)
if isinstance(self.rel.to, basestring):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "m2m")
def contribute_to_related_class(self, cls, related):
# Internal M2Ms (i.e., those with a related name ending with '+')
# don't get a related descriptor.
if not self.rel.is_hidden():
setattr(cls, related.get_accessor_name(), ManyRelatedObjectsDescriptor(related))
# Set up the accessors for the column names on the m2m table
self.m2m_column_name = curry(self._get_m2m_attr, related, 'column')
self.m2m_reverse_name = curry(self._get_m2m_reverse_attr, related, 'column')
self.m2m_field_name = curry(self._get_m2m_attr, related, 'name')
self.m2m_reverse_field_name = curry(self._get_m2m_reverse_attr, related, 'name')
def set_attributes_from_rel(self):
pass
def value_from_object(self, obj):
"Returns the value of this field in the given model instance."
return getattr(obj, self.attname).all()
def save_form_data(self, instance, data):
setattr(instance, self.attname, data)
def formfield(self, **kwargs):
db = kwargs.pop('using', None)
defaults = {
'form_class': forms.ModelMultipleChoiceField,
'queryset': self.rel.to._default_manager.using(db).complex_filter(self.rel.limit_choices_to)
}
defaults.update(kwargs)
# If initial is passed in, it's a list of related objects, but the
# MultipleChoiceField takes a list of IDs.
if defaults.get('initial') is not None:
initial = defaults['initial']
if callable(initial):
initial = initial()
defaults['initial'] = [i._get_pk_val() for i in initial]
return super(ManyToManyField, self).formfield(**defaults)
def db_type(self, connection):
# A ManyToManyField is not represented by a single column,
# so return None.
return None
| apache-2.0 |
ndn/somacube | python/somacube.py | 1 | 1941 | #!/usr/bin/python
import sys
import argparse
from threading import Thread
import problems
from solver import Solver
if __name__ != "__main__":
sys.exit()
parser = argparse.ArgumentParser(description="Kloetze algorithm.")
parser.add_argument("-p", "--problem")
parser.add_argument("-a", "--all", action="store_true", help="Solve all problems starting with the given one.")
parser.add_argument("-c", "--complete", action="store_true", help="Find all solutions.")
parser.add_argument("-j", "--javascript", action="store_true", help="Generate java script arrays for solutions")
parser.add_argument("-m", "--multicore", action="store_true", help="Use parallel execution on all cores.")
args = parser.parse_args()
problems = problems.load()
if args.problem == None:
print("Specify problem to work on. (0-{})".format(len(problems) - 1))
sys.exit()
problem = int(args.problem)
assert problem >= 0 and problem < len(problems), "-p or --problem out of range"
if args.all:
problem_range = range(problem, len(problems))
else:
problem_range = range(problem, problem + 1)
solutions = []
if args.multicore:
# TODO: See what happens on systems where zmq is not installed.
import parallel_solver
try:
for p in problem_range:
print("Solving problem {}".format(p))
if not args.multicore:
solver = Solver(problems[p], complete=args.complete)
solutions.append(solver.solve())
else:
solutions.append(parallel_solver.solve(problems[p], args.complete))
except:
raise
finally:
if args.multicore:
parallel_solver.quit()
print("")
if args.javascript:
import json
with open("../js/solutions.js", "w") as f:
f.write("solutions =")
f.write(json.dumps(solutions))
f.write(";")
with open("../js/problems.js", "w") as f:
f.write("problems =")
f.write(json.dumps(problems))
f.write(";")
| mit |
RO-ny9/python-for-android | python-modules/twisted/twisted/internet/_sslverify.py | 49 | 24224 | # -*- test-case-name: twisted.test.test_sslverify -*-
# Copyright (c) 2005 Divmod, Inc.
# Copyright (c) 2008 Twisted Matrix Laboratories.
# See LICENSE for details.
# Copyright (c) 2005-2008 Twisted Matrix Laboratories.
import itertools
from OpenSSL import SSL, crypto
from twisted.python import reflect, util
from twisted.python.hashlib import md5
from twisted.internet.defer import Deferred
from twisted.internet.error import VerifyError, CertificateError
# Private - shared between all OpenSSLCertificateOptions, counts up to provide
# a unique session id for each context
_sessionCounter = itertools.count().next
_x509names = {
'CN': 'commonName',
'commonName': 'commonName',
'O': 'organizationName',
'organizationName': 'organizationName',
'OU': 'organizationalUnitName',
'organizationalUnitName': 'organizationalUnitName',
'L': 'localityName',
'localityName': 'localityName',
'ST': 'stateOrProvinceName',
'stateOrProvinceName': 'stateOrProvinceName',
'C': 'countryName',
'countryName': 'countryName',
'emailAddress': 'emailAddress'}
class DistinguishedName(dict):
"""
Identify and describe an entity.
Distinguished names are used to provide a minimal amount of identifying
information about a certificate issuer or subject. They are commonly
created with one or more of the following fields::
commonName (CN)
organizationName (O)
organizationalUnitName (OU)
localityName (L)
stateOrProvinceName (ST)
countryName (C)
emailAddress
"""
__slots__ = ()
def __init__(self, **kw):
for k, v in kw.iteritems():
setattr(self, k, v)
def _copyFrom(self, x509name):
d = {}
for name in _x509names:
value = getattr(x509name, name, None)
if value is not None:
setattr(self, name, value)
def _copyInto(self, x509name):
for k, v in self.iteritems():
setattr(x509name, k, v)
def __repr__(self):
return '<DN %s>' % (dict.__repr__(self)[1:-1])
def __getattr__(self, attr):
try:
return self[_x509names[attr]]
except KeyError:
raise AttributeError(attr)
def __setattr__(self, attr, value):
assert type(attr) is str
if not attr in _x509names:
raise AttributeError("%s is not a valid OpenSSL X509 name field" % (attr,))
realAttr = _x509names[attr]
value = value.encode('ascii')
assert type(value) is str
self[realAttr] = value
def inspect(self):
"""
Return a multi-line, human-readable representation of this DN.
"""
l = []
lablen = 0
def uniqueValues(mapping):
return dict.fromkeys(mapping.itervalues()).keys()
for k in uniqueValues(_x509names):
label = util.nameToLabel(k)
lablen = max(len(label), lablen)
v = getattr(self, k, None)
if v is not None:
l.append((label, v))
lablen += 2
for n, (label, attr) in enumerate(l):
l[n] = (label.rjust(lablen)+': '+ attr)
return '\n'.join(l)
DN = DistinguishedName
class CertBase:
def __init__(self, original):
self.original = original
def _copyName(self, suffix):
dn = DistinguishedName()
dn._copyFrom(getattr(self.original, 'get_'+suffix)())
return dn
def getSubject(self):
"""
Retrieve the subject of this certificate.
@rtype: L{DistinguishedName}
@return: A copy of the subject of this certificate.
"""
return self._copyName('subject')
def _handleattrhelper(Class, transport, methodName):
"""
(private) Helper for L{Certificate.peerFromTransport} and
L{Certificate.hostFromTransport} which checks for incompatible handle types
and null certificates and raises the appropriate exception or returns the
appropriate certificate object.
"""
method = getattr(transport.getHandle(),
"get_%s_certificate" % (methodName,), None)
if method is None:
raise CertificateError(
"non-TLS transport %r did not have %s certificate" % (transport, methodName))
cert = method()
if cert is None:
raise CertificateError(
"TLS transport %r did not have %s certificate" % (transport, methodName))
return Class(cert)
class Certificate(CertBase):
"""
An x509 certificate.
"""
def __repr__(self):
return '<%s Subject=%s Issuer=%s>' % (self.__class__.__name__,
self.getSubject().commonName,
self.getIssuer().commonName)
def __eq__(self, other):
if isinstance(other, Certificate):
return self.dump() == other.dump()
return False
def __ne__(self, other):
return not self.__eq__(other)
def load(Class, requestData, format=crypto.FILETYPE_ASN1, args=()):
"""
Load a certificate from an ASN.1- or PEM-format string.
@rtype: C{Class}
"""
return Class(crypto.load_certificate(format, requestData), *args)
load = classmethod(load)
_load = load
def dumpPEM(self):
"""
Dump this certificate to a PEM-format data string.
@rtype: C{str}
"""
return self.dump(crypto.FILETYPE_PEM)
def loadPEM(Class, data):
"""
Load a certificate from a PEM-format data string.
@rtype: C{Class}
"""
return Class.load(data, crypto.FILETYPE_PEM)
loadPEM = classmethod(loadPEM)
def peerFromTransport(Class, transport):
"""
Get the certificate for the remote end of the given transport.
@type: L{ISystemHandle}
@rtype: C{Class}
@raise: L{CertificateError}, if the given transport does not have a peer
certificate.
"""
return _handleattrhelper(Class, transport, 'peer')
peerFromTransport = classmethod(peerFromTransport)
def hostFromTransport(Class, transport):
"""
Get the certificate for the local end of the given transport.
@param transport: an L{ISystemHandle} provider; the transport we will
@rtype: C{Class}
@raise: L{CertificateError}, if the given transport does not have a host
certificate.
"""
return _handleattrhelper(Class, transport, 'host')
hostFromTransport = classmethod(hostFromTransport)
def getPublicKey(self):
"""
Get the public key for this certificate.
@rtype: L{PublicKey}
"""
return PublicKey(self.original.get_pubkey())
def dump(self, format=crypto.FILETYPE_ASN1):
return crypto.dump_certificate(format, self.original)
def serialNumber(self):
"""
Retrieve the serial number of this certificate.
@rtype: C{int}
"""
return self.original.get_serial_number()
def digest(self, method='md5'):
"""
Return a digest hash of this certificate using the specified hash
algorithm.
@param method: One of C{'md5'} or C{'sha'}.
@rtype: C{str}
"""
return self.original.digest(method)
def _inspect(self):
return '\n'.join(['Certificate For Subject:',
self.getSubject().inspect(),
'\nIssuer:',
self.getIssuer().inspect(),
'\nSerial Number: %d' % self.serialNumber(),
'Digest: %s' % self.digest()])
def inspect(self):
"""
Return a multi-line, human-readable representation of this
Certificate, including information about the subject, issuer, and
public key.
"""
return '\n'.join((self._inspect(), self.getPublicKey().inspect()))
def getIssuer(self):
"""
Retrieve the issuer of this certificate.
@rtype: L{DistinguishedName}
@return: A copy of the issuer of this certificate.
"""
return self._copyName('issuer')
def options(self, *authorities):
raise NotImplementedError('Possible, but doubtful we need this yet')
class CertificateRequest(CertBase):
"""
An x509 certificate request.
Certificate requests are given to certificate authorities to be signed and
returned resulting in an actual certificate.
"""
def load(Class, requestData, requestFormat=crypto.FILETYPE_ASN1):
req = crypto.load_certificate_request(requestFormat, requestData)
dn = DistinguishedName()
dn._copyFrom(req.get_subject())
if not req.verify(req.get_pubkey()):
raise VerifyError("Can't verify that request for %r is self-signed." % (dn,))
return Class(req)
load = classmethod(load)
def dump(self, format=crypto.FILETYPE_ASN1):
return crypto.dump_certificate_request(format, self.original)
class PrivateCertificate(Certificate):
"""
An x509 certificate and private key.
"""
def __repr__(self):
return Certificate.__repr__(self) + ' with ' + repr(self.privateKey)
def _setPrivateKey(self, privateKey):
if not privateKey.matches(self.getPublicKey()):
raise VerifyError(
"Certificate public and private keys do not match.")
self.privateKey = privateKey
return self
def newCertificate(self, newCertData, format=crypto.FILETYPE_ASN1):
"""
Create a new L{PrivateCertificate} from the given certificate data and
this instance's private key.
"""
return self.load(newCertData, self.privateKey, format)
def load(Class, data, privateKey, format=crypto.FILETYPE_ASN1):
return Class._load(data, format)._setPrivateKey(privateKey)
load = classmethod(load)
def inspect(self):
return '\n'.join([Certificate._inspect(self),
self.privateKey.inspect()])
def dumpPEM(self):
"""
Dump both public and private parts of a private certificate to
PEM-format data.
"""
return self.dump(crypto.FILETYPE_PEM) + self.privateKey.dump(crypto.FILETYPE_PEM)
def loadPEM(Class, data):
"""
Load both private and public parts of a private certificate from a
chunk of PEM-format data.
"""
return Class.load(data, KeyPair.load(data, crypto.FILETYPE_PEM),
crypto.FILETYPE_PEM)
loadPEM = classmethod(loadPEM)
def fromCertificateAndKeyPair(Class, certificateInstance, privateKey):
privcert = Class(certificateInstance.original)
return privcert._setPrivateKey(privateKey)
fromCertificateAndKeyPair = classmethod(fromCertificateAndKeyPair)
def options(self, *authorities):
options = dict(privateKey=self.privateKey.original,
certificate=self.original)
if authorities:
options.update(dict(verify=True,
requireCertificate=True,
caCerts=[auth.original for auth in authorities]))
return OpenSSLCertificateOptions(**options)
def certificateRequest(self, format=crypto.FILETYPE_ASN1,
digestAlgorithm='md5'):
return self.privateKey.certificateRequest(
self.getSubject(),
format,
digestAlgorithm)
def signCertificateRequest(self,
requestData,
verifyDNCallback,
serialNumber,
requestFormat=crypto.FILETYPE_ASN1,
certificateFormat=crypto.FILETYPE_ASN1):
issuer = self.getSubject()
return self.privateKey.signCertificateRequest(
issuer,
requestData,
verifyDNCallback,
serialNumber,
requestFormat,
certificateFormat)
def signRequestObject(self, certificateRequest, serialNumber,
secondsToExpiry=60 * 60 * 24 * 365, # One year
digestAlgorithm='md5'):
return self.privateKey.signRequestObject(self.getSubject(),
certificateRequest,
serialNumber,
secondsToExpiry,
digestAlgorithm)
class PublicKey:
def __init__(self, osslpkey):
self.original = osslpkey
req1 = crypto.X509Req()
req1.set_pubkey(osslpkey)
self._emptyReq = crypto.dump_certificate_request(crypto.FILETYPE_ASN1, req1)
def matches(self, otherKey):
return self._emptyReq == otherKey._emptyReq
# XXX This could be a useful method, but sometimes it triggers a segfault,
# so we'll steer clear for now.
# def verifyCertificate(self, certificate):
# """
# returns None, or raises a VerifyError exception if the certificate
# could not be verified.
# """
# if not certificate.original.verify(self.original):
# raise VerifyError("We didn't sign that certificate.")
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.keyHash())
def keyHash(self):
"""
MD5 hex digest of signature on an empty certificate request with this
key.
"""
return md5(self._emptyReq).hexdigest()
def inspect(self):
return 'Public Key with Hash: %s' % (self.keyHash(),)
class KeyPair(PublicKey):
def load(Class, data, format=crypto.FILETYPE_ASN1):
return Class(crypto.load_privatekey(format, data))
load = classmethod(load)
def dump(self, format=crypto.FILETYPE_ASN1):
return crypto.dump_privatekey(format, self.original)
def __getstate__(self):
return self.dump()
def __setstate__(self, state):
self.__init__(crypto.load_privatekey(crypto.FILETYPE_ASN1, state))
def inspect(self):
t = self.original.type()
if t == crypto.TYPE_RSA:
ts = 'RSA'
elif t == crypto.TYPE_DSA:
ts = 'DSA'
else:
ts = '(Unknown Type!)'
L = (self.original.bits(), ts, self.keyHash())
return '%s-bit %s Key Pair with Hash: %s' % L
def generate(Class, kind=crypto.TYPE_RSA, size=1024):
pkey = crypto.PKey()
pkey.generate_key(kind, size)
return Class(pkey)
def newCertificate(self, newCertData, format=crypto.FILETYPE_ASN1):
return PrivateCertificate.load(newCertData, self, format)
generate = classmethod(generate)
def requestObject(self, distinguishedName, digestAlgorithm='md5'):
req = crypto.X509Req()
req.set_pubkey(self.original)
distinguishedName._copyInto(req.get_subject())
req.sign(self.original, digestAlgorithm)
return CertificateRequest(req)
def certificateRequest(self, distinguishedName,
format=crypto.FILETYPE_ASN1,
digestAlgorithm='md5'):
"""Create a certificate request signed with this key.
@return: a string, formatted according to the 'format' argument.
"""
return self.requestObject(distinguishedName, digestAlgorithm).dump(format)
def signCertificateRequest(self,
issuerDistinguishedName,
requestData,
verifyDNCallback,
serialNumber,
requestFormat=crypto.FILETYPE_ASN1,
certificateFormat=crypto.FILETYPE_ASN1,
secondsToExpiry=60 * 60 * 24 * 365, # One year
digestAlgorithm='md5'):
"""
Given a blob of certificate request data and a certificate authority's
DistinguishedName, return a blob of signed certificate data.
If verifyDNCallback returns a Deferred, I will return a Deferred which
fires the data when that Deferred has completed.
"""
hlreq = CertificateRequest.load(requestData, requestFormat)
dn = hlreq.getSubject()
vval = verifyDNCallback(dn)
def verified(value):
if not value:
raise VerifyError("DN callback %r rejected request DN %r" % (verifyDNCallback, dn))
return self.signRequestObject(issuerDistinguishedName, hlreq,
serialNumber, secondsToExpiry, digestAlgorithm).dump(certificateFormat)
if isinstance(vval, Deferred):
return vval.addCallback(verified)
else:
return verified(vval)
def signRequestObject(self,
issuerDistinguishedName,
requestObject,
serialNumber,
secondsToExpiry=60 * 60 * 24 * 365, # One year
digestAlgorithm='md5'):
"""
Sign a CertificateRequest instance, returning a Certificate instance.
"""
req = requestObject.original
dn = requestObject.getSubject()
cert = crypto.X509()
issuerDistinguishedName._copyInto(cert.get_issuer())
cert.set_subject(req.get_subject())
cert.set_pubkey(req.get_pubkey())
cert.gmtime_adj_notBefore(0)
cert.gmtime_adj_notAfter(secondsToExpiry)
cert.set_serial_number(serialNumber)
cert.sign(self.original, digestAlgorithm)
return Certificate(cert)
def selfSignedCert(self, serialNumber, **kw):
dn = DN(**kw)
return PrivateCertificate.fromCertificateAndKeyPair(
self.signRequestObject(dn, self.requestObject(dn), serialNumber),
self)
class OpenSSLCertificateOptions(object):
"""
A factory for SSL context objects for both SSL servers and clients.
"""
_context = None
# Older versions of PyOpenSSL didn't provide OP_ALL. Fudge it here, just in case.
_OP_ALL = getattr(SSL, 'OP_ALL', 0x0000FFFF)
# OP_NO_TICKET is not (yet) exposed by PyOpenSSL
_OP_NO_TICKET = 0x00004000
method = SSL.TLSv1_METHOD
def __init__(self,
privateKey=None,
certificate=None,
method=None,
verify=False,
caCerts=None,
verifyDepth=9,
requireCertificate=True,
verifyOnce=True,
enableSingleUseKeys=True,
enableSessions=True,
fixBrokenPeers=False,
enableSessionTickets=False):
"""
Create an OpenSSL context SSL connection context factory.
@param privateKey: A PKey object holding the private key.
@param certificate: An X509 object holding the certificate.
@param method: The SSL protocol to use, one of SSLv23_METHOD,
SSLv2_METHOD, SSLv3_METHOD, TLSv1_METHOD. Defaults to TLSv1_METHOD.
@param verify: If True, verify certificates received from the peer and
fail the handshake if verification fails. Otherwise, allow anonymous
sessions and sessions with certificates which fail validation. By
default this is False.
@param caCerts: List of certificate authority certificate objects to
use to verify the peer's certificate. Only used if verify is
C{True}, and if verify is C{True}, this must be specified. Since
verify is C{False} by default, this is C{None} by default.
@type caCerts: C{list} of L{OpenSSL.crypto.X509}
@param verifyDepth: Depth in certificate chain down to which to verify.
If unspecified, use the underlying default (9).
@param requireCertificate: If True, do not allow anonymous sessions.
@param verifyOnce: If True, do not re-verify the certificate
on session resumption.
@param enableSingleUseKeys: If True, generate a new key whenever
ephemeral DH parameters are used to prevent small subgroup attacks.
@param enableSessions: If True, set a session ID on each context. This
allows a shortened handshake to be used when a known client reconnects.
@param fixBrokenPeers: If True, enable various non-spec protocol fixes
for broken SSL implementations. This should be entirely safe,
according to the OpenSSL documentation, but YMMV. This option is now
off by default, because it causes problems with connections between
peers using OpenSSL 0.9.8a.
@param enableSessionTickets: If True, enable session ticket extension
for session resumption per RFC 5077. Note there is no support for
controlling session tickets. This option is off by default, as some
server implementations don't correctly process incoming empty session
ticket extensions in the hello.
"""
assert (privateKey is None) == (certificate is None), "Specify neither or both of privateKey and certificate"
self.privateKey = privateKey
self.certificate = certificate
if method is not None:
self.method = method
self.verify = verify
assert ((verify and caCerts) or
(not verify)), "Specify client CA certificate information if and only if enabling certificate verification"
self.caCerts = caCerts
self.verifyDepth = verifyDepth
self.requireCertificate = requireCertificate
self.verifyOnce = verifyOnce
self.enableSingleUseKeys = enableSingleUseKeys
self.enableSessions = enableSessions
self.fixBrokenPeers = fixBrokenPeers
self.enableSessionTickets = enableSessionTickets
def __getstate__(self):
d = self.__dict__.copy()
try:
del d['_context']
except KeyError:
pass
return d
def __setstate__(self, state):
self.__dict__ = state
def getContext(self):
"""Return a SSL.Context object.
"""
if self._context is None:
self._context = self._makeContext()
return self._context
def _makeContext(self):
ctx = SSL.Context(self.method)
if self.certificate is not None and self.privateKey is not None:
ctx.use_certificate(self.certificate)
ctx.use_privatekey(self.privateKey)
# Sanity check
ctx.check_privatekey()
verifyFlags = SSL.VERIFY_NONE
if self.verify:
verifyFlags = SSL.VERIFY_PEER
if self.requireCertificate:
verifyFlags |= SSL.VERIFY_FAIL_IF_NO_PEER_CERT
if self.verifyOnce:
verifyFlags |= SSL.VERIFY_CLIENT_ONCE
if self.caCerts:
store = ctx.get_cert_store()
for cert in self.caCerts:
store.add_cert(cert)
# It'd be nice if pyOpenSSL let us pass None here for this behavior (as
# the underlying OpenSSL API call allows NULL to be passed). It
# doesn't, so we'll supply a function which does the same thing.
def _verifyCallback(conn, cert, errno, depth, preverify_ok):
return preverify_ok
ctx.set_verify(verifyFlags, _verifyCallback)
if self.verifyDepth is not None:
ctx.set_verify_depth(self.verifyDepth)
if self.enableSingleUseKeys:
ctx.set_options(SSL.OP_SINGLE_DH_USE)
if self.fixBrokenPeers:
ctx.set_options(self._OP_ALL)
if self.enableSessions:
sessionName = md5("%s-%d" % (reflect.qual(self.__class__), _sessionCounter())).hexdigest()
ctx.set_session_id(sessionName)
if not self.enableSessionTickets:
ctx.set_options(self._OP_NO_TICKET)
return ctx
| apache-2.0 |
bq-dev/android_kernel_bq_msm8976 | tools/perf/scripts/python/check-perf-trace.py | 11214 | 2503 | # perf script event handlers, generated by perf script -g python
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# This script tests basic functionality such as flag and symbol
# strings, common_xxx() calls back into perf, begin, end, unhandled
# events, etc. Basically, if this script runs successfully and
# displays expected results, Python scripting support should be ok.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Core import *
from perf_trace_context import *
unhandled = autodict()
def trace_begin():
print "trace_begin"
pass
def trace_end():
print_unhandled()
def irq__softirq_entry(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
vec):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "vec=%s\n" % \
(symbol_str("irq__softirq_entry", "vec", vec)),
def kmem__kmalloc(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
call_site, ptr, bytes_req, bytes_alloc,
gfp_flags):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "call_site=%u, ptr=%u, bytes_req=%u, " \
"bytes_alloc=%u, gfp_flags=%s\n" % \
(call_site, ptr, bytes_req, bytes_alloc,
flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)),
def trace_unhandled(event_name, context, event_fields_dict):
try:
unhandled[event_name] += 1
except TypeError:
unhandled[event_name] = 1
def print_header(event_name, cpu, secs, nsecs, pid, comm):
print "%-20s %5u %05u.%09u %8u %-20s " % \
(event_name, cpu, secs, nsecs, pid, comm),
# print trace fields not included in handler args
def print_uncommon(context):
print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \
% (common_pc(context), trace_flag_str(common_flags(context)), \
common_lock_depth(context))
def print_unhandled():
keys = unhandled.keys()
if not keys:
return
print "\nunhandled events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for event_name in keys:
print "%-40s %10d\n" % (event_name, unhandled[event_name])
| gpl-2.0 |
impl/rbppc-linux | tools/perf/scripts/python/sctop.py | 11180 | 1924 | # system call top
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
| gpl-2.0 |
codilime/contrail-controller | src/config/svc-monitor/svc_monitor/tests/test_virtual_machine_manager.py | 2 | 12304 | import mock
import unittest
from vnc_api.vnc_api import *
from svc_monitor.virtual_machine_manager import VirtualMachineManager
from svc_monitor.config_db import *
import test_common_utils as test_utils
class VirtualMachineManagerTest(unittest.TestCase):
def setUp(self):
VirtualMachineSM._cassandra = mock.MagicMock()
VirtualMachineSM._cassandra.object_read = test_utils.vm_db_read
VirtualMachineInterfaceSM._cassandra = mock.MagicMock()
VirtualMachineInterfaceSM._cassandra.object_read = test_utils.vmi_db_read
InstanceIpSM._cassandra = mock.MagicMock()
InstanceIpSM._cassandra.object_read = test_utils.iip_db_read
InterfaceRouteTableSM._cassandra = mock.MagicMock()
InterfaceRouteTableSM._cassandra.object_read = test_utils.irt_db_read
self.mocked_vnc = mock.MagicMock()
self.mocked_vnc.fq_name_to_id = test_utils.get_vn_id_for_fq_name
self.mocked_vnc.virtual_network_create = test_utils.vn_create
self.mocked_vnc.virtual_machine_interface_create = test_utils.vmi_create
self.mocked_vnc.instance_ip_create = test_utils.iip_create
self.nova_mock = mock.MagicMock()
self.mocked_db = mock.MagicMock()
self.mocked_args = mock.MagicMock()
self.mocked_args.availability_zone = 'default-availability-zone'
self.log_mock = mock.MagicMock()
self.vm_manager = VirtualMachineManager(
db=self.mocked_db, logger=self.log_mock,
vnc_lib=self.mocked_vnc, vrouter_scheduler=mock.MagicMock(),
nova_client=self.nova_mock, args=self.mocked_args,
agent_manager=mock.MagicMock())
def tearDown(self):
ServiceTemplateSM.reset()
ServiceInstanceSM.reset()
InstanceIpSM.reset()
VirtualMachineInterfaceSM.reset()
VirtualMachineSM.reset()
del InterfaceRouteTableSM._cassandra
del VirtualMachineSM._cassandra
def test_virtual_machine_create(self):
test_utils.create_test_project('fake-domain:fake-project')
test_utils.create_test_security_group('fake-domain:fake-project:default')
test_utils.create_test_virtual_network('fake-domain:fake-project:left-vn')
test_utils.create_test_virtual_network('fake-domain:fake-project:right-vn')
st = test_utils.create_test_st(name='vm-template',
virt_type='virtual-machine',
intf_list=[['management', False], ['left', True], ['right', False]])
si = test_utils.create_test_si(name='vm-instance', count=2,
intf_list=['', 'left-vn', 'right-vn'])
def nova_oper(resource, oper, proj_name, **kwargs):
if resource == 'servers' and oper == 'create':
nova_vm = test_utils.FakeNovaServer('fake-vm-uuid', kwargs['name'])
return nova_vm
else:
return mock.MagicMock()
self.nova_mock.oper = nova_oper
self.vm_manager.create_service(st, si)
self.mocked_vnc.virtual_machine_create.assert_any_call(test_utils.VMObjMatcher(1))
self.mocked_vnc.virtual_machine_create.assert_any_call(test_utils.VMObjMatcher(2))
self.assertTrue(si.availability_zone, 'default-availability-zone')
def test_virtual_machine_delete(self):
vm = test_utils.create_test_virtual_machine('fake-vm-uuid')
self.vm_manager.delete_service(vm)
def test_missing_image_in_template(self):
test_utils.create_test_project('fake-domain:fake-project')
test_utils.create_test_security_group('fake-domain:fake-project:default')
test_utils.create_test_virtual_network('fake-domain:fake-project:left-vn')
test_utils.create_test_virtual_network('fake-domain:fake-project:right-vn')
st = test_utils.create_test_st(name='vm-template',
virt_type='virtual-machine',
intf_list=[['management', False], ['left', True], ['right', False]])
si = test_utils.create_test_si(name='vm-instance', count=2,
intf_list=['', 'left-vn', 'right-vn'])
st.params['image_name'] = None
self.vm_manager.create_service(st, si)
self.log_mock.error.assert_called_with("Image not present in %s" % ((':').join(st.fq_name)))
def test_missing_image_in_nova(self):
test_utils.create_test_project('fake-domain:fake-project')
test_utils.create_test_security_group('fake-domain:fake-project:default')
test_utils.create_test_virtual_network('fake-domain:fake-project:left-vn')
test_utils.create_test_virtual_network('fake-domain:fake-project:right-vn')
st = test_utils.create_test_st(name='vm-template',
virt_type='virtual-machine',
intf_list=[['management', False], ['left', True], ['right', False]])
si = test_utils.create_test_si(name='vm-instance', count=2,
intf_list=['', 'left-vn', 'right-vn'])
def nova_oper(resource, oper, proj_name, **kwargs):
if resource == 'images' and oper == 'find':
return None
else:
return mock.MagicMock()
self.nova_mock.oper = nova_oper
self.vm_manager.create_service(st, si)
self.log_mock.error.assert_called_with("Image not found %s" % si.image)
def test_nova_vm_create_fail(self):
test_utils.create_test_project('fake-domain:fake-project')
test_utils.create_test_security_group('fake-domain:fake-project:default')
test_utils.create_test_virtual_network('fake-domain:fake-project:left-vn')
test_utils.create_test_virtual_network('fake-domain:fake-project:right-vn')
st = test_utils.create_test_st(name='vm-template',
virt_type='virtual-machine',
intf_list=[['management', False], ['left', True], ['right', False]])
si = test_utils.create_test_si(name='vm-instance', count=2,
intf_list=['', 'left-vn', 'right-vn'])
def nova_oper(resource, oper, proj_name, **kwargs):
if resource == 'servers' and oper == 'create':
return None
else:
return mock.MagicMock()
self.nova_mock.oper = nova_oper
self.vm_manager.create_service(st, si)
self.log_mock.error.assert_any_call(test_utils.AnyStringWith('Nova vm create failed'))
def test_missing_flavor_in_template(self):
test_utils.create_test_project('fake-domain:fake-project')
test_utils.create_test_security_group('fake-domain:fake-project:default')
test_utils.create_test_virtual_network('fake-domain:fake-project:left-vn')
test_utils.create_test_virtual_network('fake-domain:fake-project:right-vn')
st = test_utils.create_test_st(name='vm-template',
virt_type='virtual-machine',
intf_list=[['management', False], ['left', True], ['right', False]])
si = test_utils.create_test_si(name='vm-instance', count=2,
intf_list=['', 'left-vn', 'right-vn'])
def nova_oper(resource, oper, proj_name, **kwargs):
if resource == 'flavors' and oper == 'find':
return None
else:
return mock.MagicMock()
self.nova_mock.oper = nova_oper
st.params['flavor'] = None
self.vm_manager.create_service(st, si)
self.log_mock.error.assert_called_with(test_utils.AnyStringWith("Flavor not found"))
def test_availability_zone_setting(self):
test_utils.create_test_project('fake-domain:fake-project')
test_utils.create_test_security_group('fake-domain:fake-project:default')
test_utils.create_test_virtual_network('fake-domain:fake-project:left-vn')
test_utils.create_test_virtual_network('fake-domain:fake-project:right-vn')
st = test_utils.create_test_st(name='vm-template',
virt_type='virtual-machine',
intf_list=[['management', False], ['left', True], ['right', False]])
si = test_utils.create_test_si(name='vm-instance', count=2,
intf_list=['', 'left-vn', 'right-vn'])
def nova_oper(resource, oper, proj_name, **kwargs):
if resource == 'servers' and oper == 'create':
nova_vm = test_utils.FakeNovaServer('fake-vm-uuid', kwargs['name'])
return nova_vm
else:
return mock.MagicMock()
self.nova_mock.oper = nova_oper
st.params['availability_zone_enable'] = True
si.params['availability_zone'] = 'test-availability-zone'
self.vm_manager.create_service(st, si)
self.assertTrue(si.availability_zone, 'test-availability-zone')
def test_network_config_validation(self):
test_utils.create_test_project('fake-domain:fake-project')
test_utils.create_test_security_group('fake-domain:fake-project:default')
test_utils.create_test_virtual_network('fake-domain:fake-project:left-vn')
test_utils.create_test_virtual_network('fake-domain:fake-project:right-vn')
st = test_utils.create_test_st(name='vm-template',
virt_type='virtual-machine',
intf_list=[['management', False], ['left', True], ['right', False]])
si = test_utils.create_test_si(name='vm-instance', count=2,
intf_list=['', 'left-vn', 'right-vn'])
st.params['interface_type'] = []
self.vm_manager.create_service(st, si)
self.log_mock.notice.assert_called_with("Interface list empty for ST %s SI %s" %
((':').join(st.fq_name), (':').join(si.fq_name)))
def test_virtual_machine_exists(self):
test_utils.create_test_project('fake-domain:fake-project')
test_utils.create_test_security_group('fake-domain:fake-project:default')
test_utils.create_test_virtual_network('fake-domain:fake-project:left-vn')
test_utils.create_test_virtual_network('fake-domain:fake-project:right-vn')
st = test_utils.create_test_st(name='vm-template',
virt_type='virtual-machine',
intf_list=[['management', False], ['left', True], ['right', False]])
si = test_utils.create_test_si(name='vm-instance', count=2,
intf_list=['', 'left-vn', 'right-vn'])
def nova_oper(resource, oper, proj_name, **kwargs):
if resource == 'servers' and oper == 'create':
nova_vm = test_utils.FakeNovaServer(kwargs['name'], kwargs['name'])
return nova_vm
else:
return mock.MagicMock()
self.nova_mock.oper = nova_oper
self.mocked_vnc.virtual_machine_create = test_utils.vm_create
self.vm_manager.create_service(st, si)
self.log_mock.info.assert_any_call(test_utils.AnyStringWith('Launching VM :'))
self.log_mock.info.assert_any_call(test_utils.AnyStringWith('Created VM :'))
self.log_mock.info.assert_any_call(test_utils.AnyStringWith(si.name))
self.log_mock.reset_mock()
self.vm_manager.create_service(st, si)
self.assertTrue(self.log_mock.info.call_count, 1)
def test_virtual_machine_static_routes(self):
test_utils.create_test_project('fake-domain:fake-project')
test_utils.create_test_security_group('fake-domain:fake-project:default')
test_utils.create_test_virtual_network('fake-domain:fake-project:left-vn')
test_utils.create_test_virtual_network('fake-domain:fake-project:right-vn')
st = test_utils.create_test_st(name='vm-template',
virt_type='virtual-machine',
intf_list=[['management', False], ['left', True, True], ['right', False]])
si = test_utils.create_test_si(name='vm-instance', count=2,
intf_list=['', 'left-vn', 'right-vn'])
def nova_oper(resource, oper, proj_name, **kwargs):
if resource == 'servers' and oper == 'create':
nova_vm = test_utils.FakeNovaServer('fake-vm-uuid', kwargs['name'])
return nova_vm
else:
return mock.MagicMock()
self.nova_mock.oper = nova_oper
self.vm_manager.create_service(st, si)
self.mocked_vnc.virtual_machine_create.assert_any_call(test_utils.VMObjMatcher(1))
self.mocked_vnc.virtual_machine_create.assert_any_call(test_utils.VMObjMatcher(2))
| apache-2.0 |
david-ragazzi/nupic | examples/network/temporal_anomaly_network_demo.py | 8 | 3405 | #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2015, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Demonstrates common_networks using hotgym example. Outputs
network-demo2-output.csv, which should be identical to the csv
outputted by network-api-demo.py (which does not use common_networks).
"""
import csv
import os
from pkg_resources import resource_filename
from nupic.data.file_record_stream import FileRecordStream
from nupic.engine import common_networks
_INPUT_FILE_PATH = resource_filename(
"nupic.datafiles", "extra/hotgym/rec-center-hourly.csv"
)
_OUTPUT_PATH = "network-demo2-output.csv"
_NUM_RECORDS = 2000
def runNetwork(network, writer):
"""Run the network and write output to writer.
:param network: a Network instance to run
:param writer: a csv.writer instance to write output to
"""
sensorRegion = network.regions["sensor"]
spatialPoolerRegion = network.regions["spatialPoolerRegion"]
temporalPoolerRegion = network.regions["temporalPoolerRegion"]
anomalyRegion = network.regions["anomalyRegion"]
prevPredictedColumns = []
for i in xrange(_NUM_RECORDS):
# Run the network for a single iteration
network.run(1)
# Write out the anomaly score along with the record number and consumption
# value.
anomalyScore = anomalyRegion.getOutputData("rawAnomalyScore")[0]
consumption = sensorRegion.getOutputData("sourceOut")[0]
writer.writerow((i, consumption, anomalyScore))
if __name__ == "__main__":
inputFilePath = resource_filename(
"nupic.datafiles", "extra/hotgym/rec-center-hourly.csv"
)
scalarEncoderArgs = {
"w": 21,
"minval": 0.0,
"maxval": 100.0,
"periodic": False,
"n": 50,
"radius": 0,
"resolution": 0,
"name": "consumption",
"verbosity": 0,
"clipInput": True,
"forced": False,
}
dateEncoderArgs = {
"season": 0,
"dayOfWeek": 0,
"weekend": 0,
"holiday": 0,
"timeOfDay": (21, 9.5),
"customDays": 0,
"name": "timestamp",
"forced": True
}
recordParams = {
"inputFilePath": _INPUT_FILE_PATH,
"scalarEncoderArgs": scalarEncoderArgs,
"dateEncoderArgs": dateEncoderArgs,
}
network = common_networks.createTemporalAnomaly(recordParams)
outputPath = os.path.join(os.path.dirname(__file__), _OUTPUT_PATH)
with open(outputPath, "w") as outputFile:
writer = csv.writer(outputFile)
print "Writing output to %s" % outputPath
runNetwork(network, writer)
| gpl-3.0 |
SalesforceEng/Providence | tests/plugins/__init__.py | 8 | 1491 | '''
Copyright (c) 2015, Salesforce.com, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
| bsd-3-clause |
ashang/calibre | src/calibre/gui2/tweak_book/function_replace.py | 14 | 13782 | #!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2014, Kovid Goyal <kovid at kovidgoyal.net>'
import re, io, weakref, sys
from cStringIO import StringIO
from PyQt5.Qt import (
pyqtSignal, QVBoxLayout, QHBoxLayout, QPlainTextEdit, QLabel, QFontMetrics,
QSize, Qt, QApplication, QIcon)
from calibre.ebooks.oeb.polish.utils import apply_func_to_match_groups, apply_func_to_html_text
from calibre.gui2 import error_dialog
from calibre.gui2.complete2 import EditWithComplete
from calibre.gui2.tweak_book import dictionaries
from calibre.gui2.tweak_book.widgets import Dialog
from calibre.gui2.tweak_book.editor.text import TextEdit
from calibre.utils.config import JSONConfig
from calibre.utils.icu import capitalize, upper, lower, swapcase
from calibre.utils.titlecase import titlecase
from calibre.utils.localization import localize_user_manual_link
user_functions = JSONConfig('editor-search-replace-functions')
def compile_code(src, name='<string>'):
if not isinstance(src, unicode):
match = re.search(r'coding[:=]\s*([-\w.]+)', src[:200])
enc = match.group(1) if match else 'utf-8'
src = src.decode(enc)
if not src or not src.strip():
src = EMPTY_FUNC
# Python complains if there is a coding declaration in a unicode string
src = re.sub(r'^#.*coding\s*[:=]\s*([-\w.]+)', '#', src, flags=re.MULTILINE)
# Translate newlines to \n
src = io.StringIO(src, newline=None).getvalue()
code = compile(src, name, 'exec')
namespace = {}
exec code in namespace
return namespace
class Function(object):
def __init__(self, name, source=None, func=None):
self._source = source
self.is_builtin = source is None
self.name = name
if func is None:
self.mod = compile_code(source, name)
self.func = self.mod['replace']
else:
self.func = func
self.mod = None
if not callable(self.func):
raise ValueError('%r is not a function' % self.func)
self.file_order = getattr(self.func, 'file_order', None)
def init_env(self, name=''):
from calibre.gui2.tweak_book.boss import get_boss
self.context_name = name or ''
self.match_index = 0
self.boss = get_boss()
self.data = {}
self.debug_buf = StringIO()
self.functions = {name:func.mod for name, func in functions().iteritems() if func.mod is not None}
def __hash__(self):
return hash(self.name)
def __eq__(self, other):
return self.name == getattr(other, 'name', None)
def __ne__(self, other):
return not self.__eq__(other)
def __call__(self, match):
self.match_index += 1
oo, oe, sys.stdout, sys.stderr = sys.stdout, sys.stderr, self.debug_buf, self.debug_buf
try:
return self.func(match, self.match_index, self.context_name, self.boss.current_metadata, dictionaries, self.data, self.functions)
finally:
sys.stdout, sys.stderr = oo, oe
@property
def source(self):
if self.is_builtin:
import json
return json.loads(P('editor-functions.json', data=True, allow_user_override=False))[self.name]
return self._source
def end(self):
if getattr(self.func, 'call_after_last_match', False):
oo, oe, sys.stdout, sys.stderr = sys.stdout, sys.stderr, self.debug_buf, self.debug_buf
try:
return self.func(None, self.match_index, self.context_name, self.boss.current_metadata, dictionaries, self.data, self.functions)
finally:
sys.stdout, sys.stderr = oo, oe
self.data, self.boss, self.functions = {}, None, {}
class DebugOutput(Dialog):
def __init__(self, parent=None):
Dialog.__init__(self, 'Debug output', 'sr-function-debug-output')
self.setAttribute(Qt.WA_DeleteOnClose, False)
def setup_ui(self):
self.l = l = QVBoxLayout(self)
self.text = t = QPlainTextEdit(self)
self.log_text = ''
l.addWidget(t)
l.addWidget(self.bb)
self.bb.setStandardButtons(self.bb.Close)
self.cb = b = self.bb.addButton(_('&Copy to clipboard'), self.bb.ActionRole)
b.clicked.connect(self.copy_to_clipboard)
b.setIcon(QIcon(I('edit-copy.png')))
def show_log(self, name, text):
self.setWindowTitle(_('Debug output from %s') % name)
self.text.setPlainText(self.windowTitle() + '\n\n' + text)
self.log_text = text
self.show()
self.raise_()
def sizeHint(self):
fm = QFontMetrics(self.text.font())
return QSize(fm.averageCharWidth() * 120, 400)
def copy_to_clipboard(self):
QApplication.instance().clipboard().setText(self.log_text)
def builtin_functions():
for name, obj in globals().iteritems():
if name.startswith('replace_') and callable(obj) and hasattr(obj, 'imports'):
yield obj
_functions = None
def functions(refresh=False):
global _functions
if _functions is None or refresh:
ans = _functions = {}
for func in builtin_functions():
ans[func.name] = Function(func.name, func=func)
for name, source in user_functions.iteritems():
try:
f = Function(name, source=source)
except Exception:
continue
ans[f.name] = f
return _functions
def remove_function(name, gui_parent=None):
funcs = functions()
if not name:
return False
if name not in funcs:
error_dialog(gui_parent, _('No such function'), _(
'There is no function named %s') % name, show=True)
return False
if name not in user_functions:
error_dialog(gui_parent, _('Cannot remove builtin function'), _(
'The function %s is a builtin function, it cannot be removed.') % name, show=True)
del user_functions[name]
functions(refresh=True)
refresh_boxes()
return True
boxes = []
def refresh_boxes():
for ref in boxes:
box = ref()
if box is not None:
box.refresh()
class FunctionBox(EditWithComplete):
save_search = pyqtSignal()
show_saved_searches = pyqtSignal()
def __init__(self, parent=None, show_saved_search_actions=False):
EditWithComplete.__init__(self, parent)
self.set_separator(None)
self.show_saved_search_actions = show_saved_search_actions
self.refresh()
self.setToolTip(_('Choose a function to run on matched text (by name)'))
boxes.append(weakref.ref(self))
def refresh(self):
self.update_items_cache(set(functions()))
def contextMenuEvent(self, event):
menu = self.lineEdit().createStandardContextMenu()
if self.show_saved_search_actions:
menu.addSeparator()
menu.addAction(_('Save current search'), self.save_search.emit)
menu.addAction(_('Show saved searches'), self.show_saved_searches.emit)
menu.exec_(event.globalPos())
class FunctionEditor(Dialog):
def __init__(self, func_name='', parent=None):
self._func_name = func_name
Dialog.__init__(self, _('Create/edit a function'), 'edit-sr-func', parent=parent)
def setup_ui(self):
self.l = l = QVBoxLayout(self)
self.h = h = QHBoxLayout()
l.addLayout(h)
self.la1 = la = QLabel(_('F&unction name:'))
h.addWidget(la)
self.fb = fb = FunctionBox(self)
la.setBuddy(fb)
h.addWidget(fb, stretch=10)
self.la3 = la = QLabel(_('&Code:'))
self.source_code = TextEdit(self)
self.source_code.load_text('', 'python')
la.setBuddy(self.source_code)
l.addWidget(la), l.addWidget(self.source_code)
if self._func_name:
self.fb.setText(self._func_name)
func = functions().get(self._func_name)
if func is not None:
self.source_code.setPlainText(func.source or ('\n' + EMPTY_FUNC))
else:
self.source_code.setPlainText('\n' + EMPTY_FUNC)
self.la2 = la = QLabel(_(
'For help with creating functions, see the <a href="%s">User Manual</a>') %
localize_user_manual_link('http://manual.calibre-ebook.com/function_mode.html'))
la.setOpenExternalLinks(True)
l.addWidget(la)
l.addWidget(self.bb)
def sizeHint(self):
fm = QFontMetrics(self.font())
return QSize(fm.averageCharWidth() * 120, 600)
@property
def func_name(self):
return self.fb.text().strip()
@property
def source(self):
return self.source_code.toPlainText()
def accept(self):
if not self.func_name:
return error_dialog(self, _('Must specify name'), _(
'You must specify a name for this function.'), show=True)
source = self.source
try:
mod = compile_code(source, self.func_name)
except Exception as err:
return error_dialog(self, _('Invalid python code'), _(
'The code you created is not valid python code, with error: %s') % err, show=True)
if not callable(mod.get('replace')):
return error_dialog(self, _('No replace function'), _(
'You must create a python function named replace in your code'), show=True)
user_functions[self.func_name] = source
functions(refresh=True)
refresh_boxes()
Dialog.accept(self)
# Builtin functions ##########################################################
def builtin(name, *args):
def f(func):
func.name = name
func.imports = args
return func
return f
EMPTY_FUNC = '''\
def replace(match, number, file_name, metadata, dictionaries, data, functions, *args, **kwargs):
return ''
'''
@builtin('Upper-case text', upper, apply_func_to_match_groups)
def replace_uppercase(match, number, file_name, metadata, dictionaries, data, functions, *args, **kwargs):
'''Make matched text upper case. If the regular expression contains groups,
only the text in the groups will be changed, otherwise the entire text is
changed.'''
return apply_func_to_match_groups(match, upper)
@builtin('Lower-case text', lower, apply_func_to_match_groups)
def replace_lowercase(match, number, file_name, metadata, dictionaries, data, functions, *args, **kwargs):
'''Make matched text lower case. If the regular expression contains groups,
only the text in the groups will be changed, otherwise the entire text is
changed.'''
return apply_func_to_match_groups(match, lower)
@builtin('Capitalize text', capitalize, apply_func_to_match_groups)
def replace_capitalize(match, number, file_name, metadata, dictionaries, data, functions, *args, **kwargs):
'''Capitalize matched text. If the regular expression contains groups,
only the text in the groups will be changed, otherwise the entire text is
changed.'''
return apply_func_to_match_groups(match, capitalize)
@builtin('Title-case text', titlecase, apply_func_to_match_groups)
def replace_titlecase(match, number, file_name, metadata, dictionaries, data, functions, *args, **kwargs):
'''Title-case matched text. If the regular expression contains groups,
only the text in the groups will be changed, otherwise the entire text is
changed.'''
return apply_func_to_match_groups(match, titlecase)
@builtin('Swap the case of text', swapcase, apply_func_to_match_groups)
def replace_swapcase(match, number, file_name, metadata, dictionaries, data, functions, *args, **kwargs):
'''Swap the case of the matched text. If the regular expression contains groups,
only the text in the groups will be changed, otherwise the entire text is
changed.'''
return apply_func_to_match_groups(match, swapcase)
@builtin('Upper-case text (ignore tags)', upper, apply_func_to_html_text)
def replace_uppercase_ignore_tags(match, number, file_name, metadata, dictionaries, data, functions, *args, **kwargs):
'''Make matched text upper case, ignoring the text inside tag definitions.'''
return apply_func_to_html_text(match, upper)
@builtin('Lower-case text (ignore tags)', lower, apply_func_to_html_text)
def replace_lowercase_ignore_tags(match, number, file_name, metadata, dictionaries, data, functions, *args, **kwargs):
'''Make matched text lower case, ignoring the text inside tag definitions.'''
return apply_func_to_html_text(match, lower)
@builtin('Capitalize text (ignore tags)', capitalize, apply_func_to_html_text)
def replace_capitalize_ignore_tags(match, number, file_name, metadata, dictionaries, data, functions, *args, **kwargs):
'''Capitalize matched text, ignoring the text inside tag definitions.'''
return apply_func_to_html_text(match, capitalize)
@builtin('Title-case text (ignore tags)', titlecase, apply_func_to_html_text)
def replace_titlecase_ignore_tags(match, number, file_name, metadata, dictionaries, data, functions, *args, **kwargs):
'''Title-case matched text, ignoring the text inside tag definitions.'''
return apply_func_to_html_text(match, titlecase)
@builtin('Swap the case of text (ignore tags)', swapcase, apply_func_to_html_text)
def replace_swapcase_ignore_tags(match, number, file_name, metadata, dictionaries, data, functions, *args, **kwargs):
'''Swap the case of the matched text, ignoring the text inside tag definitions.'''
return apply_func_to_html_text(match, swapcase)
if __name__ == '__main__':
app = QApplication([])
FunctionEditor().exec_()
del app
| gpl-3.0 |
kimkulling/osre | contrib/gmock-1.7.0/gtest/test/gtest_env_var_test.py | 2408 | 3487 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly parses environment variables."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
IS_WINDOWS = os.name == 'nt'
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_env_var_test_')
environ = os.environ.copy()
def AssertEq(expected, actual):
if expected != actual:
print 'Expected: %s' % (expected,)
print ' Actual: %s' % (actual,)
raise AssertionError
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
def GetFlag(flag):
"""Runs gtest_env_var_test_ and returns its output."""
args = [COMMAND]
if flag is not None:
args += [flag]
return gtest_test_utils.Subprocess(args, env=environ).output
def TestFlag(flag, test_val, default_val):
"""Verifies that the given flag is affected by the corresponding env var."""
env_var = 'GTEST_' + flag.upper()
SetEnvVar(env_var, test_val)
AssertEq(test_val, GetFlag(flag))
SetEnvVar(env_var, None)
AssertEq(default_val, GetFlag(flag))
class GTestEnvVarTest(gtest_test_utils.TestCase):
def testEnvVarAffectsFlag(self):
"""Tests that environment variable should affect the corresponding flag."""
TestFlag('break_on_failure', '1', '0')
TestFlag('color', 'yes', 'auto')
TestFlag('filter', 'FooTest.Bar', '*')
TestFlag('output', 'xml:tmp/foo.xml', '')
TestFlag('print_time', '0', '1')
TestFlag('repeat', '999', '1')
TestFlag('throw_on_failure', '1', '0')
TestFlag('death_test_style', 'threadsafe', 'fast')
TestFlag('catch_exceptions', '0', '1')
if IS_LINUX:
TestFlag('death_test_use_fork', '1', '0')
TestFlag('stack_trace_depth', '0', '100')
if __name__ == '__main__':
gtest_test_utils.Main()
| mit |
prohaska7/ft-index | scripts/run.stress-tests.py | 32 | 33186 | #!/usr/bin/env python
"""
A script for running our stress tests repeatedly to see if any fail.
Runs a list of stress tests in parallel, reporting passes and collecting
failure scenarios until killed. Runs with different table sizes,
cachetable sizes, and numbers of threads.
Suitable for running on a dev branch, or a release branch, or main.
Just run the script from within a branch you want to test.
By default, we stop everything, update from git, rebuild, and restart the
tests once a day.
"""
import logging
import os
import re
import stat
import sys
import time
from email.mime.text import MIMEText
from glob import glob
from logging import debug, info, warning, error, exception
from optparse import OptionGroup, OptionParser
from Queue import Queue
from random import randrange, shuffle
from resource import setrlimit, RLIMIT_CORE
from shutil import copy, copytree, move, rmtree
from signal import signal, SIGHUP, SIGINT, SIGPIPE, SIGALRM, SIGTERM
from smtplib import SMTP
from socket import gethostname
from subprocess import call, Popen, PIPE, STDOUT
from traceback import format_exc
from tempfile import mkdtemp, mkstemp
from threading import Event, Thread, Timer
__version__ = '$Id$'
__copyright__ = """Copyright (c) 2006, 2015, Percona and/or its affiliates. All rights reserved."""
# relpath implementation for python <2.6
# from http://unittest-ext.googlecode.com/hg-history/1df911640f7be239e58fb185b06ac2a8489dcdc4/unittest2/unittest2/compatibility.py
if not hasattr(os.path, 'relpath'):
if os.path is sys.modules.get('ntpath'):
def relpath(path, start=os.path.curdir):
"""Return a relative version of a path"""
if not path:
raise ValueError("no path specified")
start_list = os.path.abspath(start).split(os.path.sep)
path_list = os.path.abspath(path).split(os.path.sep)
if start_list[0].lower() != path_list[0].lower():
unc_path, rest = os.path.splitunc(path)
unc_start, rest = os.path.splitunc(start)
if bool(unc_path) ^ bool(unc_start):
raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)"
% (path, start))
else:
raise ValueError("path is on drive %s, start on drive %s"
% (path_list[0], start_list[0]))
# Work out how much of the filepath is shared by start and path.
for i in range(min(len(start_list), len(path_list))):
if start_list[i].lower() != path_list[i].lower():
break
else:
i += 1
rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
if not rel_list:
return os.path.curdir
return os.path.join(*rel_list)
else:
# default to posixpath definition
def relpath(path, start=os.path.curdir):
"""Return a relative version of a path"""
if not path:
raise ValueError("no path specified")
start_list = os.path.abspath(start).split(os.path.sep)
path_list = os.path.abspath(path).split(os.path.sep)
# Work out how much of the filepath is shared by start and path.
i = len(os.path.commonprefix([start_list, path_list]))
rel_list = [os.path.pardir] * (len(start_list)-i) + path_list[i:]
if not rel_list:
return os.path.curdir
return os.path.join(*rel_list)
os.path.relpath = relpath
def setlimits():
setrlimit(RLIMIT_CORE, (-1, -1))
os.nice(7)
class TestFailure(Exception):
pass
class Killed(Exception):
pass
class TestRunnerBase(object):
def __init__(self, scheduler, builddir, rev, execf, tsize, csize, default_test_time, savedir):
self.scheduler = scheduler
self.builddir = builddir
self.rev = rev
self.execf = execf
self.tsize = tsize
self.csize = csize
self.default_test_time = default_test_time
self.long_test_index = randrange(16)
self.savedir = savedir
self.env = os.environ
self.nruns = 0
self.num_ptquery = 1
self.num_update = 1
self.rundir = None
self.outf = None
self.times = [0, 0]
self.is_large = (tsize >= 10000000)
self.oldversionstr = 'noupgrade'
def __str__(self):
return (self.__class__.__name__ +
'<%(execf)s, %(tsize)d, %(csize)d, %(oldversionstr)s>') % self
def __getitem__(self, k):
return self.__getattribute__(k)
def infostr(self):
return '\t'.join(['%(execf)s',
'%(rev)s',
'%(tsize)d',
'%(csize)d',
'%(oldversionstr)s',
'%(num_ptquery)d',
'%(num_update)d',
'%(time)d']) % self
@property
def time(self):
if self.times[0] != 0 and self.times[1] != 0:
return self.times[1] - self.times[0]
else:
return 0
@property
def test_time(self):
if self.nruns % 16 == self.long_test_index:
return 3600
else:
return self.default_test_time
@property
def envdir(self):
return os.path.join(self.rundir, 'envdir')
@property
def prepareloc(self):
preparename = 'dir.%(execf)s-%(tsize)d-%(csize)d' % self
return os.path.join(self.builddir, 'src', 'tests', preparename)
def prepare(self):
if os.path.isdir(self.prepareloc):
debug('%s found existing environment.', self)
copytree(self.prepareloc, self.envdir)
else:
debug('%s preparing an environment.', self)
self.run_prepare()
self.save_prepared_envdir()
def save_prepared_envdir(self):
debug('%s copying environment to %s.', self, self.prepareloc)
copytree(self.envdir, self.prepareloc)
def run(self):
if self.nruns % 2 < 1:
self.num_ptquery = 1
else:
self.num_ptquery = randrange(16)
if self.nruns % 4 < 2:
self.num_update = 1
else:
self.num_update = randrange(16)
srctests = os.path.join(self.builddir, 'src', 'tests')
self.rundir = mkdtemp(dir=srctests)
try:
outname = os.path.join(self.rundir, 'output.txt')
self.outf = open(outname, 'w')
try:
self.prepare()
debug('%s testing.', self)
self.times[0] = time.time()
self.run_test()
self.times[1] = time.time()
debug('%s done.', self)
except Killed:
pass
except TestFailure:
self.times[1] = time.time()
savepfx = '%(execf)s-%(rev)s-%(tsize)d-%(csize)d-%(num_ptquery)d-%(num_update)d-%(phase)s-' % self
if not os.path.exists(self.savedir):
os.mkdir(self.savedir)
savedir = mkdtemp(dir=self.savedir, prefix=savepfx)
tarfile = '%s.tar' % savedir
commands = ''
try:
f = open(os.path.join(self.rundir, 'commands.txt'))
commands = f.read()
f.close()
except:
pass
output = ''
try:
f = open(os.path.join(self.rundir, 'output.txt'))
output = f.read()
f.close()
except:
pass
self.scheduler.email_failure(self, tarfile, commands, output)
self.save(savedir, tarfile)
self.scheduler.report_failure(self)
warning('Saved environment to %s', tarfile)
else:
self.scheduler.report_success(self)
finally:
self.outf.close()
rmtree(self.rundir)
self.rundir = None
self.times = [0, 0]
self.nruns += 1
def save(self, savedir, tarfile):
def targetfor(path):
return os.path.join(savedir, os.path.basename(path))
for f in glob(os.path.join(self.rundir, '*')):
if os.path.isdir(f):
copytree(f, targetfor(f))
else:
copy(f, targetfor(f))
fullexecf = os.path.join(self.builddir, 'src', 'tests', self.execf)
copy(fullexecf, targetfor(fullexecf))
# TODO: Leif was lazy and did this in bash, it should be done in python for portability
os.system("for l in $(ldd %(fullexecf)s | sed 's/\ *(0x[0-9a-f]*)$//;s/.*=>\ \?//;s/^\ *|\ *$//' | grep -v '^$'); do mkdir -p %(savedir)s/$(dirname $l); cp $l %(savedir)s/$l; done" % {'fullexecf': fullexecf, 'savedir': savedir})
r = call(['tar', 'cf', os.path.basename(tarfile), os.path.basename(savedir)], cwd=os.path.dirname(savedir))
if r != 0:
error('tarring up %s failed.' % savedir)
sys.exit(r)
os.chmod(tarfile, stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
def waitfor(self, proc):
while proc.poll() is None:
self.scheduler.stopping.wait(1)
if self.scheduler.stopping.isSet():
os.kill(proc.pid, SIGTERM)
raise Killed()
def spawn_child(self, args):
logging.debug('%s spawning %s', self, ' '.join([self.execf] + args))
commandsf = open(os.path.join(self.rundir, 'commands.txt'), 'a')
print >>commandsf, ' '.join([self.execf] + args)
commandsf.close()
proc = Popen([self.execf] + args,
executable=os.path.join('..', self.execf),
env=self.env,
cwd=self.rundir,
preexec_fn=setlimits,
stdout=self.outf,
stderr=STDOUT)
self.waitfor(proc)
return proc.returncode
@property
def extraargs(self):
# for overriding
return []
@property
def prepareargs(self):
return ['-v',
'--envdir', 'envdir',
'--num_elements', str(self.tsize),
'--cachetable_size', str(self.csize)] + self.extraargs
@property
def testargs(self):
return ['--num_seconds', str(self.test_time),
'--join_timeout', str(60 * 60 * 12),
'--no-crash_on_operation_failure',
'--num_ptquery_threads', str(self.num_ptquery),
'--num_update_threads', str(self.num_update)] + self.prepareargs
class TestRunner(TestRunnerBase):
def run_prepare(self):
self.phase = "create"
if self.spawn_child(['--only_create'] + self.prepareargs) != 0:
raise TestFailure('%s crashed during --only_create.' % self.execf)
def run_test(self):
self.phase = "stress"
if self.spawn_child(['--only_stress'] + self.testargs) != 0:
raise TestFailure('%s crashed during --only_stress.' % self.execf)
class RecoverTestRunner(TestRunnerBase):
def run_prepare(self):
self.phase = "create"
if self.spawn_child(['--only_create', '--test'] + self.prepareargs) != 0:
raise TestFailure('%s crashed during --only_create --test.' % self.execf)
def run_test(self):
self.phase = "test"
if self.spawn_child(['--only_stress', '--test'] + self.testargs) == 0:
raise TestFailure('%s did not crash during --only_stress --test' % self.execf)
self.phase = "recover"
if self.spawn_child(['--recover'] + self.prepareargs) != 0:
raise TestFailure('%s crashed during --recover' % self.execf)
class UpgradeTestRunnerMixin(TestRunnerBase):
def __init__(self, old_environments_dir, version, pristine_or_stressed, **kwargs):
super(UpgradeTestRunnerMixin, self).__init__(**kwargs)
self.version = version
self.pristine_or_stressed = pristine_or_stressed
self.old_env_dirs = os.path.join(old_environments_dir, version)
self.oldversionstr = '%(version)s-%(pristine_or_stressed)s' % self
@property
def extraargs(self):
return ['--num_DBs', '1']
@property
def old_envdir(self):
oldname = 'saved%(pristine_or_stressed)s-%(tsize)d-dir' % self
logging.debug('%s using old version environment %s from %s.', self, oldname, self.old_env_dirs)
return os.path.join(self.old_env_dirs, oldname)
def save_prepared_envdir(self):
# no need to do this
pass
def run_prepare(self):
self.phase = "create"
copytree(self.old_envdir, self.envdir)
class DoubleTestRunnerMixin(TestRunnerBase):
"""Runs the test phase twice in a row.
Good for upgrade tests, to run the test once to upgrade it and then
again to make sure the upgrade left it in a good state.
"""
def run_test(self):
super(DoubleTestRunnerMixin, self).run_test()
super(DoubleTestRunnerMixin, self).run_test()
class UpgradeTestRunner(UpgradeTestRunnerMixin, TestRunner):
pass
class UpgradeRecoverTestRunner(UpgradeTestRunnerMixin, RecoverTestRunner):
pass
class DoubleUpgradeTestRunner(DoubleTestRunnerMixin, UpgradeTestRunner):
pass
class DoubleUpgradeRecoverTestRunner(DoubleTestRunnerMixin, UpgradeRecoverTestRunner):
pass
class Worker(Thread):
def __init__(self, scheduler):
super(Worker, self).__init__()
self.scheduler = scheduler
def run(self):
debug('%s starting.' % self)
while not self.scheduler.stopping.isSet():
test_runner = self.scheduler.get()
if test_runner.is_large:
if self.scheduler.nlarge + 1 > self.scheduler.maxlarge:
debug('%s pulled a large test, but there are already %d running. Putting it back.',
self, self.scheduler.nlarge)
self.scheduler.put(test_runner)
continue
self.scheduler.nlarge += 1
try:
test_runner.run()
except Exception, e:
exception('Fatal error in worker thread.')
info('Killing all workers.')
self.scheduler.error = format_exc()
self.scheduler.stop()
if test_runner.is_large:
self.scheduler.nlarge -= 1
if not self.scheduler.stopping.isSet():
self.scheduler.put(test_runner)
debug('%s exiting.' % self)
class Scheduler(Queue):
def __init__(self, nworkers, maxlarge, logger, email, branch):
Queue.__init__(self)
info('Initializing scheduler with %d jobs.', nworkers)
self.nworkers = nworkers
self.logger = logger
self.maxlarge = maxlarge
self.nlarge = 0 # not thread safe, don't really care right now
self.passed = 0
self.failed = 0
self.workers = []
self.stopping = Event()
self.timer = None
self.error = None
self.email = email
self.branch = branch
def run(self, timeout):
info('Starting workers.')
self.stopping.clear()
for i in range(self.nworkers):
w = Worker(self)
self.workers.append(w)
w.start()
if timeout != 0:
self.timer = Timer(timeout, self.stop)
self.timer.start()
while not self.stopping.isSet():
try:
for w in self.workers:
if self.stopping.isSet():
break
w.join(timeout=1.0)
except (KeyboardInterrupt, SystemExit):
debug('Scheduler interrupted. Stopping and joining threads.')
self.stop()
self.join()
sys.exit(0)
else:
debug('Scheduler stopped by someone else. Joining threads.')
self.join()
if self.error:
send_mail(self.email, 'Stress tests scheduler stopped by something, on %s' % gethostname(), self.error)
sys.exit(77)
def join(self):
if self.timer is not None:
self.timer.cancel()
while len(self.workers) > 0:
self.workers.pop().join()
def stop(self):
info('Stopping workers.')
self.stopping.set()
def __getitem__(self, k):
return self.__dict__[k]
def reportstr(self):
return '[PASS=%(passed)d FAIL=%(failed)d]' % self
def report_success(self, runner):
self.passed += 1
self.logger.info('PASSED %s', runner.infostr())
info('%s PASSED %s', self.reportstr(), runner.infostr())
def report_failure(self, runner):
self.failed += 1
self.logger.warning('FAILED %s', runner.infostr())
warning('%s FAILED %s', self.reportstr(), runner.infostr())
def email_failure(self, runner, savedtarfile, commands, output):
if self.email is None:
return
h = gethostname()
if isinstance(runner, UpgradeTestRunnerMixin):
upgradestr = '''
The test was upgrading from %s.''' % runner.oldversionstr
else:
upgradestr = ''
send_mail(self.email,
'Stress test failure on %(hostname)s running %(branch)s.' % { 'hostname': h, 'branch': self.branch },
('''A stress test failed on %(hostname)s running %(branch)s at revision %(rev)s after %(test_duration)d seconds.%(upgradestr)s
Its environment is saved to %(tarfile)s on that machine.
The test configuration was:
testname: %(execf)s
num_elements: %(tsize)d
cachetable_size: %(csize)d
num_ptquery_threads: %(num_ptquery)d
num_update_threads: %(num_update)d
Commands run:
%(commands)s
Test output:
%(output)s
''' % {
'hostname': h,
'rev': runner.rev,
'test_duration': runner.time,
'upgradestr': upgradestr,
'tarfile': savedtarfile,
'execf': runner.execf,
'tsize': runner.tsize,
'csize': runner.csize,
'num_ptquery': runner.num_ptquery,
'num_update': runner.num_update,
'branch': self.branch,
'commands': commands,
'output': output,
}))
def send_mail(toaddrs, subject, body):
# m = MIMEText(body)
# fromaddr = 'dev-private@percona.com'
# m['From'] = fromaddr
# m['To'] = ', '.join(toaddrs)
# m['Subject'] = subject
# s = SMTP('192.168.1.114')
# s.sendmail(fromaddr, toaddrs, str(m))
# s.quit()
info(subject);
info(body);
def update(tokudb):
info('Updating from git.')
devnull = open(os.devnull, 'w')
call(['git', 'pull'], stdout=devnull, stderr=STDOUT, cwd=tokudb)
devnull.close()
def rebuild(tokudb, builddir, tokudb_data, cc, cxx, tests):
info('Building tokudb.')
if not os.path.exists(builddir):
os.mkdir(builddir)
newenv = os.environ
newenv['CC'] = cc
newenv['CXX'] = cxx
r = call(['cmake',
'-DCMAKE_BUILD_TYPE=Debug',
'-DUSE_GTAGS=OFF',
'-DUSE_CTAGS=OFF',
'-DUSE_ETAGS=OFF',
'-DUSE_CSCOPE=OFF',
'-DTOKUDB_DATA=%s' % tokudb_data,
tokudb],
env=newenv,
cwd=builddir)
if r != 0:
send_mail(['dev-private@percona.com'], 'Stress tests on %s failed to build.' % gethostname(), '')
error('Building the tests failed.')
sys.exit(r)
r = call(['make', '-j8'], cwd=builddir)
if r != 0:
send_mail(['dev-private@percona.com'], 'Stress tests on %s failed to build.' % gethostname(), '')
error('Building the tests failed.')
sys.exit(r)
def revfor(tokudb):
proc = Popen("git describe --tags",
shell=True, cwd=tokudb, stdout=PIPE)
(out, err) = proc.communicate()
rev = out.strip()
info('Using tokudb at r%s.', rev)
return rev
def main(opts):
builddir = os.path.join(opts.tokudb, 'build')
if opts.build:
rebuild(opts.tokudb, builddir, opts.tokudb_data, opts.cc, opts.cxx, opts.testnames + opts.recover_testnames)
rev = revfor(opts.tokudb)
if not os.path.exists(opts.savedir):
os.mkdir(opts.savedir)
logger = logging.getLogger('stress')
logger.propagate = False
logger.setLevel(logging.INFO)
logger.addHandler(logging.FileHandler(opts.log))
info('Saving pass/fail logs to %s.', opts.log)
info('Saving failure environments to %s.', opts.savedir)
scheduler = Scheduler(opts.jobs, opts.maxlarge, logger, opts.email, opts.branch)
runners = []
for tsize in [2000, 200000, 50000000]:
for csize in [50 * tsize, 1000 ** 3]:
kwargs = {
'scheduler': scheduler,
'builddir': builddir,
'rev': rev,
'tsize': tsize,
'csize': csize,
'default_test_time': opts.test_time,
'savedir': opts.savedir
}
for test in opts.testnames:
if opts.run_non_upgrade:
runners.append(TestRunner(execf=test, **kwargs))
# never run test_stress_openclose.tdb on existing
# environments, it doesn't want them
if opts.run_upgrade and test != 'test_stress_openclose.tdb':
for version in opts.old_versions:
for pristine_or_stressed in ['pristine', 'stressed']:
upgrade_kwargs = {
'old_environments_dir': opts.old_environments_dir,
'version': version,
'pristine_or_stressed': pristine_or_stressed
}
upgrade_kwargs.update(kwargs)
# skip running test_stress4.tdb on any env
# that has already been stressed, as that
# breaks its assumptions
if opts.double_upgrade and test != 'test_stress4.tdb':
runners.append(DoubleUpgradeTestRunner(
execf=test,
**upgrade_kwargs))
elif not (test == 'test_stress4.tdb' and pristine_or_stressed == 'stressed'):
runners.append(UpgradeTestRunner(
execf=test,
**upgrade_kwargs))
for test in opts.recover_testnames:
if opts.run_non_upgrade:
runners.append(RecoverTestRunner(execf=test, **kwargs))
if opts.run_upgrade:
for version in opts.old_versions:
for pristine_or_stressed in ['pristine', 'stressed']:
upgrade_kwargs = {
'old_environments_dir': opts.old_environments_dir,
'version': version,
'pristine_or_stressed': pristine_or_stressed
}
upgrade_kwargs.update(kwargs)
if opts.double_upgrade:
runners.append(DoubleUpgradeRecoverTestRunner(
execf=test,
**upgrade_kwargs))
else:
runners.append(UpgradeRecoverTestRunner(
execf=test,
**upgrade_kwargs))
shuffle(runners)
for runner in runners:
scheduler.put(runner)
try:
while scheduler.error is None:
scheduler.run(opts.rebuild_period)
if scheduler.error is not None:
error('Scheduler reported an error.')
raise scheduler.error
update(opts.tokudb)
rebuild(opts.tokudb, builddir, opts.tokudb_data, opts.cc, opts.cxx, opts.testnames + opts.recover_testnames)
rev = revfor(opts.tokudb)
for runner in runners:
runner.rev = rev
except (KeyboardInterrupt, SystemExit):
sys.exit(0)
except Exception, e:
exception('Unhandled exception caught in main.')
send_mail(['dev-private@percona.com'], 'Stress tests caught unhandled exception in main, on %s' % gethostname(), format_exc())
raise e
if __name__ == '__main__':
a0 = os.path.abspath(sys.argv[0])
usage = '%prog [options]\n' + __doc__
parser = OptionParser(usage=usage)
parser.add_option('-v', '--verbose', action='store_true', dest='verbose', default=False, help='show build status, passing tests, and other info')
parser.add_option('-d', '--debug', action='store_true', dest='debug', default=False, help='show debugging info')
parser.add_option('-l', '--log', type='string', dest='log',
default='/tmp/run.stress-tests.log',
help='where to save logfiles')
parser.add_option('-s', '--savedir', type='string', dest='savedir',
default='/tmp/run.stress-tests.failures',
help='where to save environments and extra data for failed tests')
parser.add_option('--email', action='append', type='string', dest='email', default=[], help='where to send emails')
parser.add_option('--no-email', action='store_false', dest='send_emails', default=True, help='suppress emails on failure')
default_toplevel = os.path.dirname(os.path.dirname(a0))
parser.add_option('--tokudb', type='string', dest='tokudb',
default=default_toplevel,
help=('top of the tokudb tree (contains ft/ and src/) [default=%s]' % os.path.relpath(default_toplevel)))
toplevel_basename = os.path.basename(default_toplevel)
if toplevel_basename == 'tokudb':
maybe_absolute_branchpath = os.path.dirname(default_toplevel)
if os.path.basename(os.path.dirname(maybe_absolute_branchpath)) == 'mysql.branches':
default_branchname = os.path.basename(maybe_absolute_branchpath)
else:
default_branchname = 'mainline'
elif toplevel_basename[:7] == 'tokudb.':
default_branchname = toplevel_basename[7:]
else:
default_branchname = 'unknown branch'
parser.add_option('--branch', type='string', dest='branch',
default=default_branchname,
help=('what to call this branch [default=%s]' % default_branchname))
test_group = OptionGroup(parser, 'Scheduler Options', 'Control how the scheduler runs jobs.')
test_group.add_option('-t', '--test_time', type='int', dest='test_time',
default=60,
help='time to run each test, in seconds [default=60]'),
test_group.add_option('-j', '--jobs', type='int', dest='jobs', default=8,
help='how many concurrent tests to run [default=8]')
test_group.add_option('--maxlarge', type='int', dest='maxlarge', default=2,
help='maximum number of large tests to run concurrently (helps prevent swapping) [default=2]')
parser.add_option_group(test_group)
default_testnames = ['test_stress0.tdb',
'test_stress1.tdb',
'test_stress2.tdb',
'test_stress3.tdb',
'test_stress4.tdb',
'test_stress5.tdb',
'test_stress6.tdb',
'test_stress7.tdb',
'test_stress_hot_indexing.tdb',
'test_stress_with_verify.tdb',
'test_stress_openclose.tdb']
default_recover_testnames = ['recover-test_stress1.tdb',
'recover-test_stress2.tdb',
'recover-test_stress3.tdb',
'recover-child-rollback.tdb',
'recover-test_stress_openclose.tdb']
build_group = OptionGroup(parser, 'Build Options', 'Control how the fractal tree and tests get built.')
build_group.add_option('--skip_build', action='store_false', dest='build', default=True,
help='skip the git pull and build phase before testing [default=False]')
build_group.add_option('--rebuild_period', type='int', dest='rebuild_period', default=60 * 60 * 24,
help='how many seconds between doing an git pull and rebuild, 0 means never rebuild [default=24 hours]')
default_tokudb_data = os.path.abspath(os.path.join(default_toplevel, '..', 'tokudb.data'))
build_group.add_option('--tokudb_data', type='string', dest='tokudb_data', default=default_tokudb_data,
help='passed to cmake as TOKUDB_DATA [default=%s]' % default_tokudb_data)
build_group.add_option('--cc', type='string', dest='cc', default='gcc47',
help='which compiler to use [default=gcc47]')
build_group.add_option('--cxx', type='string', dest='cxx', default='g++47',
help='which compiler to use [default=g++47]')
build_group.add_option('--add_test', action='append', type='string', dest='testnames', default=default_testnames,
help=('add a stress test to run [default=%r]' % default_testnames))
build_group.add_option('--add_recover_test', action='append', type='string', dest='recover_testnames', default=default_recover_testnames,
help=('add a recover stress test to run [default=%r]' % default_recover_testnames))
parser.add_option_group(build_group)
upgrade_group = OptionGroup(parser, 'Upgrade Options', 'Also run on environments from old versions of tokudb.')
upgrade_group.add_option('--run_upgrade', action='store_true', dest='run_upgrade', default=False,
help='run the tests on old dictionaries as well, to test upgrade [default=False]')
upgrade_group.add_option('--skip_non_upgrade', action='store_false', dest='run_non_upgrade', default=True,
help="skip the tests that don't involve upgrade [default=False]")
upgrade_group.add_option('--double_upgrade', action='store_true', dest='double_upgrade', default=False,
help='run the upgrade tests twice in a row [default=False]')
upgrade_group.add_option('--add_old_version', action='append', type='choice', dest='old_versions', choices=['4.2.0', '5.0.8', '5.2.7', '6.0.0', '6.1.0', '6.5.1', '6.6.3', '7.0.1','7.1.6','v26','7.5.0'],
help='which old versions to use for running the stress tests in upgrade mode. can be specified multiple times [options=4.2.0, 5.0.8, 5.2.7, 6.0.0, 6.1.0, 6.5.1, 6.6.3, 7.1.0, 7.1.6, v26, 7.5.0]')
upgrade_group.add_option('--old_environments_dir', type='string', dest='old_environments_dir',
default=('%s/old-stress-test-envs' % default_tokudb_data),
help='directory containing old version environments (should contain 5.0.8/, 5.2.7/, etc, and the environments should be in those) [default=../../tokudb.data/stress_environments]')
parser.add_option_group(upgrade_group)
(opts, args) = parser.parse_args()
if len(args) > 0:
parser.error('Invalid arguments: %r' % args)
if opts.old_versions is not None and len(opts.old_versions) > 0:
opts.run_upgrade = True
if opts.run_upgrade:
if not os.path.isdir(opts.old_environments_dir):
parser.error('You specified --run_upgrade but did not specify an --old_environments_dir that exists.')
if len(opts.old_versions) < 1:
parser.error('You specified --run_upgrade but gave no --old_versions to run against.')
for version in opts.old_versions:
version_dir = os.path.join(opts.old_environments_dir, version)
if not os.path.isdir(version_dir):
parser.error('You specified --run_upgrade but %s is not a directory.' % version_dir)
if not opts.send_emails:
opts.email = None
elif len(opts.email) == 0:
opts.email.append('dev-private@percona.com')
if opts.debug:
logging.basicConfig(level=logging.DEBUG)
elif opts.verbose:
logging.basicConfig(level=logging.INFO)
else:
logging.basicConfig(level=logging.WARNING)
main(opts)
| gpl-2.0 |
undefinedv/Jingubang | sqlmap/lib/core/optiondict.py | 1 | 13064 | #!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
optDict = {
# Format:
# Family: { "parameter name": "parameter datatype" },
# Or:
# Family: { "parameter name": ("parameter datatype", "category name used for common outputs feature") },
"Target": {
"direct": "string",
"url": "string",
"logFile": "string",
"bulkFile": "string",
"requestFile": "string",
"sessionFile": "string",
"googleDork": "string",
"configFile": "string",
"sitemapUrl": "string",
},
"Request": {
"method": "string",
"data": "string",
"paramDel": "string",
"cookie": "string",
"cookieDel": "string",
"loadCookies": "string",
"dropSetCookie": "boolean",
"agent": "string",
"randomAgent": "boolean",
"host": "string",
"referer": "string",
"headers": "string",
"authType": "string",
"authCred": "string",
"authFile": "string",
"proxy": "string",
"proxyCred": "string",
"proxyFile": "string",
"ignoreProxy": "boolean",
"tor": "boolean",
"torPort": "integer",
"torType": "string",
"checkTor": "boolean",
"delay": "float",
"timeout": "float",
"retries": "integer",
"rParam": "string",
"safeUrl": "string",
"safePost": "string",
"safeReqFile": "string",
"safeFreq": "integer",
"skipUrlEncode": "boolean",
"csrfToken": "string",
"csrfUrl": "string",
"forceSSL": "boolean",
"hpp": "boolean",
"evalCode": "string",
},
"Optimization": {
"optimize": "boolean",
"predictOutput": "boolean",
"keepAlive": "boolean",
"nullConnection": "boolean",
"threads": "integer",
},
"Injection": {
"testParameter": "string",
"skip": "string",
"skipStatic": "boolean",
"dbms": "string",
"dbmsCred": "string",
"os": "string",
"invalidBignum": "boolean",
"invalidLogical": "boolean",
"invalidString": "boolean",
"noCast": "boolean",
"noEscape": "boolean",
"prefix": "string",
"suffix": "string",
"tamper": "string",
},
"Detection": {
"level": "integer",
"risk": "integer",
"string": "string",
"notString": "string",
"regexp": "string",
"code": "integer",
"textOnly": "boolean",
"titles": "boolean",
},
"Techniques": {
"tech": "string",
"timeSec": "integer",
"uCols": "string",
"uChar": "string",
"uFrom": "string",
"dnsName": "string",
"secondOrder": "string",
},
"Fingerprint": {
"extensiveFp": "boolean",
},
"Enumeration": {
"getAll": "boolean",
"getBanner": ("boolean", "Banners"),
"getCurrentUser": ("boolean", "Users"),
"getCurrentDb": ("boolean", "Databases"),
"getHostname": "boolean",
"isDba": "boolean",
"getUsers": ("boolean", "Users"),
"getPasswordHashes": ("boolean", "Passwords"),
"getPrivileges": ("boolean", "Privileges"),
"getRoles": ("boolean", "Roles"),
"getDbs": ("boolean", "Databases"),
"getTables": ("boolean", "Tables"),
"getColumns": ("boolean", "Columns"),
"getSchema": "boolean",
"getCount": "boolean",
"dumpTable": "boolean",
"dumpAll": "boolean",
"search": "boolean",
"getComments": "boolean",
"db": "string",
"tbl": "string",
"col": "string",
"excludeCol": "string",
"pivotColumn": "string",
"dumpWhere": "string",
"user": "string",
"excludeSysDbs": "boolean",
"limitStart": "integer",
"limitStop": "integer",
"firstChar": "integer",
"lastChar": "integer",
"query": "string",
"sqlShell": "boolean",
"sqlFile": "string",
},
"Brute": {
"commonTables": "boolean",
"commonColumns": "boolean",
},
"User-defined function": {
"udfInject": "boolean",
"shLib": "string",
},
"File system": {
"rFile": "string",
"wFile": "string",
"dFile": "string",
},
"Takeover": {
"osCmd": "string",
"osShell": "boolean",
"osPwn": "boolean",
"osSmb": "boolean",
"osBof": "boolean",
"privEsc": "boolean",
"msfPath": "string",
"tmpPath": "string",
},
"Windows": {
"regRead": "boolean",
"regAdd": "boolean",
"regDel": "boolean",
"regKey": "string",
"regVal": "string",
"regData": "string",
"regType": "string",
},
"General": {
#"xmlFile": "string",
"trafficFile": "string",
"batch": "boolean",
"binaryFields": "string",
"charset": "string",
"crawlDepth": "integer",
"crawlExclude": "string",
"csvDel": "string",
"dumpFormat": "string",
"eta": "boolean",
"flushSession": "boolean",
"forms": "boolean",
"freshQueries": "boolean",
"hexConvert": "boolean",
"outputDir": "string",
"parseErrors": "boolean",
"saveConfig": "string",
"scope": "string",
"testFilter": "string",
"testSkip": "string",
"updateAll": "boolean",
},
"Miscellaneous": {
"alert": "string",
"answers": "string",
"beep": "boolean",
"cleanup": "boolean",
"dependencies": "boolean",
"disableColoring": "boolean",
"googlePage": "integer",
"identifyWaf": "boolean",
"mobile": "boolean",
"offline": "boolean",
"pageRank": "boolean",
"purgeOutput": "boolean",
"skipWaf": "boolean",
"smart": "boolean",
"tmpDir": "string",
"wizard": "boolean",
"verbose": "integer",
},
"Hidden": {
"dummy": "boolean",
"disablePrecon": "boolean",
"profile": "boolean",
"forceDns": "boolean",
"ignore401": "boolean",
"smokeTest": "boolean",
"liveTest": "boolean",
"stopFail": "boolean",
"runCase": "string",
}
}
| gpl-3.0 |
smn/onadata | onadata/apps/api/migrations/0001_initial.py | 20 | 6989 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'OrganizationProfile'
db.create_table('api_organizationprofile', (
('userprofile_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['main.UserProfile'], unique=True, primary_key=True)),
('is_organization', self.gf('django.db.models.fields.BooleanField')(default=True)),
('creator', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
))
db.send_create_signal('api', ['OrganizationProfile'])
# Adding model 'Team'
db.create_table('api_team', (
('group_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['auth.Group'], unique=True, primary_key=True)),
('organization', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
))
db.send_create_signal('api', ['Team'])
def backwards(self, orm):
# Deleting model 'OrganizationProfile'
db.delete_table('api_organizationprofile')
# Deleting model 'Team'
db.delete_table('api_team')
models = {
'api.organizationprofile': {
'Meta': {'object_name': 'OrganizationProfile', '_ormbases': ['main.UserProfile']},
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'is_organization': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'userprofile_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['main.UserProfile']", 'unique': 'True', 'primary_key': 'True'})
},
'api.team': {
'Meta': {'object_name': 'Team', '_ormbases': ['auth.Group']},
'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['models.Project']", 'symmetrical': 'False'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'main.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'home_page': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'phonenumber': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'require_auth': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"})
},
'models.project': {
'Meta': {'object_name': 'Project'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['api']
| bsd-2-clause |
kambysese/mne-python | mne/preprocessing/nirs/tests/test_temporal_derivative_distribution_repair.py | 12 | 1068 | # Authors: Robert Luke <mail@robertluke.net>
#
# License: BSD (3-clause)
import os.path as op
import pytest
import numpy as np
from mne.datasets.testing import data_path
from mne.io import read_raw_nirx
from mne.preprocessing.nirs import optical_density, tddr
from mne.datasets import testing
fname_nirx_15_2 = op.join(data_path(download=False),
'NIRx', 'nirscout', 'nirx_15_2_recording')
@testing.requires_testing_data
@pytest.mark.parametrize('fname', ([fname_nirx_15_2]))
def test_temporal_derivative_distribution_repair(fname, tmpdir):
"""Test running artifact rejection."""
raw = read_raw_nirx(fname)
raw = optical_density(raw)
# Add a baseline shift artifact about half way through data
max_shift = np.max(np.diff(raw._data[0]))
shift_amp = 5 * max_shift
raw._data[0, 0:30] = raw._data[0, 0:30] - (shift_amp)
assert np.max(np.diff(raw._data[0])) > shift_amp
# Ensure that applying the algorithm reduces the step change
raw = tddr(raw)
assert np.max(np.diff(raw._data[0])) < shift_amp
| bsd-3-clause |
vampirekiss/wechat_message | tests/__init__.py | 2 | 1628 | # -*- coding: utf-8 -*-
from tornado.testing import IOLoop, AsyncHTTPTestCase as BaseAsyncHTTPTestCase
from tornado.gen import coroutine, Return
from app.application import Application
from urllib import urlencode
import mock
class TestCaseMixin(object):
def get_new_ioloop(self):
return IOLoop.instance()
def mock_coroutine_object(self, klass):
return _CoroutineMockObject(klass)
class AsyncHTTPTestCase(BaseAsyncHTTPTestCase, TestCaseMixin):
def get_app(self):
return Application()
def get(self, url, **kwargs):
return self.fetch(url, **kwargs)
def post(self, url, **kwargs):
if 'body' in kwargs and isinstance(kwargs['body'], dict):
kwargs['body'] = urlencode(kwargs['body'])
return self.fetch(url, method='POST', **kwargs)
class _CoroutineMockObject(object):
def __init__(self, klass):
self.mock_object = mock.patch(klass).start()
self.mocked_methods = {}
def set_method_result(self, method, result):
@coroutine
def side_effect(*args, **kwargs):
raise Return(result)
self.mocked_methods[method] = True
self.mock_object[method].side_effect = side_effect
def mock_method(self, method, target):
@coroutine
def side_effect(*args, **kwargs):
raise Return(target(*args, **kwargs))
self.mocked_methods[method] = True
self.mock_object[method].side_effect = side_effect
def __getattr__(self, method):
if method in self.mocked_methods:
return self.mock_object[method].side_effect
return None
| gpl-2.0 |
luiseduardohdbackup/odoo | addons/product_visible_discount/product_visible_discount.py | 165 | 5466 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class product_pricelist(osv.osv):
_inherit = 'product.pricelist'
_columns ={
'visible_discount': fields.boolean('Visible Discount'),
}
_defaults = {
'visible_discount': True,
}
class sale_order_line(osv.osv):
_inherit = "sale.order.line"
def product_id_change(self, cr, uid, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, packaging=False,
fiscal_position=False, flag=False, context=None):
def get_real_price_curency(res_dict, product_id, qty, uom, pricelist):
"""Retrieve the price before applying the pricelist"""
item_obj = self.pool.get('product.pricelist.item')
price_type_obj = self.pool.get('product.price.type')
product_obj = self.pool.get('product.product')
field_name = 'list_price'
rule_id = res_dict.get(pricelist) and res_dict[pricelist][1] or False
currency_id = None
if rule_id:
item_base = item_obj.read(cr, uid, [rule_id], ['base'])[0]['base']
if item_base > 0:
price_type = price_type_obj.browse(cr, uid, item_base)
field_name = price_type.field
currency_id = price_type.currency_id
product = product_obj.browse(cr, uid, product_id, context)
product_read = product_obj.read(cr, uid, [product_id], [field_name], context=context)[0]
if not currency_id:
currency_id = product.company_id.currency_id.id
factor = 1.0
if uom and uom != product.uom_id.id:
# the unit price is in a different uom
factor = self.pool['product.uom']._compute_qty(cr, uid, uom, 1.0, product.uom_id.id)
return product_read[field_name] * factor, currency_id
def get_real_price(res_dict, product_id, qty, uom, pricelist):
return get_real_price_curency(res_dict, product_id, qty, uom, pricelist)[0]
res=super(sale_order_line, self).product_id_change(cr, uid, ids, pricelist, product, qty,
uom, qty_uos, uos, name, partner_id,
lang, update_tax, date_order, packaging=packaging, fiscal_position=fiscal_position, flag=flag, context=context)
context = {'lang': lang, 'partner_id': partner_id}
result=res['value']
pricelist_obj=self.pool.get('product.pricelist')
product_obj = self.pool.get('product.product')
if product and pricelist and self.pool.get('res.users').has_group(cr, uid, 'sale.group_discount_per_so_line'):
if result.get('price_unit',False):
price=result['price_unit']
else:
return res
uom = result.get('product_uom', uom)
product = product_obj.browse(cr, uid, product, context)
pricelist_context = dict(context, uom=uom, date=date_order)
list_price = pricelist_obj.price_rule_get(cr, uid, [pricelist],
product.id, qty or 1.0, partner_id, context=pricelist_context)
so_pricelist = pricelist_obj.browse(cr, uid, pricelist, context=context)
new_list_price, currency_id = get_real_price_curency(list_price, product.id, qty, uom, pricelist)
if so_pricelist.visible_discount and list_price[pricelist][0] != 0 and new_list_price != 0:
if product.company_id and so_pricelist.currency_id.id != product.company_id.currency_id.id:
# new_list_price is in company's currency while price in pricelist currency
ctx = context.copy()
ctx['date'] = date_order
new_list_price = self.pool['res.currency'].compute(cr, uid,
currency_id.id, so_pricelist.currency_id.id,
new_list_price, context=ctx)
discount = (new_list_price - price) / new_list_price * 100
if discount > 0:
result['price_unit'] = new_list_price
result['discount'] = discount
else:
result['discount'] = 0.0
else:
result['discount'] = 0.0
else:
result['discount'] = 0.0
return res
| agpl-3.0 |
scorphus/politicos | politicos/migrations/versions/585774625ec2_create_mandate_events_table.py | 1 | 1328 | """create mandate events table
Revision ID: 585774625ec2
Revises: 2cbbef3d8e8f
Create Date: 2015-07-06 00:58:44.470013
"""
# revision identifiers, used by Alembic.
revision = '585774625ec2'
down_revision = '2cbbef3d8e8f'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'mandate_events',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('date', sa.Date, nullable=False),
sa.Column('mandate_id', sa.Integer, nullable=False),
sa.Column('mandate_events_type_id', sa.Integer, nullable=False),
)
op.create_foreign_key(
'fk_mandate',
'mandate_events', 'mandate',
['mandate_id'], ['id']
)
op.create_foreign_key(
'fk_mandate_events_type',
'mandate_events', 'mandate_events_type',
['mandate_events_type_id'], ['id']
)
op.create_unique_constraint(
'uk_mandate_events',
'mandate_events',
['mandate_id', 'mandate_events_type_id', 'date']
)
def downgrade():
op.drop_constraint('fk_mandate', 'mandate_events', type_='foreignkey')
op.drop_constraint(
'fk_mandate_events_type', 'mandate_events', type_='foreignkey'
)
op.drop_constraint('uk_mandate_events', 'mandate_events', type_='unique')
op.drop_table('mandate_events')
| agpl-3.0 |
orekyuu/intellij-community | python/lib/Lib/site-packages/django/contrib/gis/gdal/geomtype.py | 404 | 2967 | from django.contrib.gis.gdal.error import OGRException
#### OGRGeomType ####
class OGRGeomType(object):
"Encapulates OGR Geometry Types."
wkb25bit = -2147483648
# Dictionary of acceptable OGRwkbGeometryType s and their string names.
_types = {0 : 'Unknown',
1 : 'Point',
2 : 'LineString',
3 : 'Polygon',
4 : 'MultiPoint',
5 : 'MultiLineString',
6 : 'MultiPolygon',
7 : 'GeometryCollection',
100 : 'None',
101 : 'LinearRing',
1 + wkb25bit: 'Point25D',
2 + wkb25bit: 'LineString25D',
3 + wkb25bit: 'Polygon25D',
4 + wkb25bit: 'MultiPoint25D',
5 + wkb25bit : 'MultiLineString25D',
6 + wkb25bit : 'MultiPolygon25D',
7 + wkb25bit : 'GeometryCollection25D',
}
# Reverse type dictionary, keyed by lower-case of the name.
_str_types = dict([(v.lower(), k) for k, v in _types.items()])
def __init__(self, type_input):
"Figures out the correct OGR Type based upon the input."
if isinstance(type_input, OGRGeomType):
num = type_input.num
elif isinstance(type_input, basestring):
type_input = type_input.lower()
if type_input == 'geometry': type_input='unknown'
num = self._str_types.get(type_input, None)
if num is None:
raise OGRException('Invalid OGR String Type "%s"' % type_input)
elif isinstance(type_input, int):
if not type_input in self._types:
raise OGRException('Invalid OGR Integer Type: %d' % type_input)
num = type_input
else:
raise TypeError('Invalid OGR input type given.')
# Setting the OGR geometry type number.
self.num = num
def __str__(self):
"Returns the value of the name property."
return self.name
def __eq__(self, other):
"""
Does an equivalence test on the OGR type with the given
other OGRGeomType, the short-hand string, or the integer.
"""
if isinstance(other, OGRGeomType):
return self.num == other.num
elif isinstance(other, basestring):
return self.name.lower() == other.lower()
elif isinstance(other, int):
return self.num == other
else:
return False
def __ne__(self, other):
return not (self == other)
@property
def name(self):
"Returns a short-hand string form of the OGR Geometry type."
return self._types[self.num]
@property
def django(self):
"Returns the Django GeometryField for this OGR Type."
s = self.name.replace('25D', '')
if s in ('LinearRing', 'None'):
return None
elif s == 'Unknown':
s = 'Geometry'
return s + 'Field'
| apache-2.0 |
tima/ansible | lib/ansible/modules/cloud/docker/docker_secret.py | 39 | 8291 | #!/usr/bin/python
#
# Copyright 2016 Red Hat | Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: docker_secret
short_description: Manage docker secrets.
version_added: "2.4"
description:
- Create and remove Docker secrets in a Swarm environment. Similar to `docker secret create` and `docker secret rm`.
- Adds to the metadata of new secrets 'ansible_key', an encrypted hash representation of the data, which is then used
- in future runs to test if a secret has changed.
- If 'ansible_key is not present, then a secret will not be updated unless the C(force) option is set.
- Updates to secrets are performed by removing the secret and creating it again.
options:
data:
description:
- String. The value of the secret. Required when state is C(present).
required: false
labels:
description:
- "A map of key:value meta data, where both the I(key) and I(value) are expected to be a string."
- If new meta data is provided, or existing meta data is modified, the secret will be updated by removing it and creating it again.
required: false
force:
description:
- Boolean. Use with state C(present) to always remove and recreate an existing secret.
- If I(true), an existing secret will be replaced, even if it has not changed.
default: false
name:
description:
- The name of the secret.
required: true
state:
description:
- Set to C(present), if the secret should exist, and C(absent), if it should not.
required: false
default: present
choices:
- absent
- present
extends_documentation_fragment:
- docker
requirements:
- "docker-py >= 2.1.0"
- "Docker API >= 1.25"
author:
- Chris Houseknecht (@chouseknecht)
'''
EXAMPLES = '''
- name: Create secret foo
docker_secret:
name: foo
data: Hello World!
state: present
- name: Change the secret data
docker_secret:
name: foo
data: Goodnight everyone!
labels:
bar: baz
one: '1'
state: present
- name: Add a new label
docker_secret:
name: foo
data: Goodnight everyone!
labels:
bar: baz
one: '1'
# Adding a new label will cause a remove/create of the secret
two: '2'
state: present
- name: No change
docker_secret:
name: foo
data: Goodnight everyone!
labels:
bar: baz
one: '1'
# Even though 'two' is missing, there is no change to the existing secret
state: present
- name: Update an existing label
docker_secret:
name: foo
data: Goodnight everyone!
labels:
bar: monkey # Changing a label will cause a remove/create of the secret
one: '1'
state: present
- name: Force the removal/creation of the secret
docker_secret:
name: foo
data: Goodnight everyone!
force: yes
state: present
- name: Remove secret foo
docker_secret:
name: foo
state: absent
'''
RETURN = '''
secret_id:
description:
- The ID assigned by Docker to the secret object.
returned: success
type: string
sample: 'hzehrmyjigmcp2gb6nlhmjqcv'
'''
import hashlib
try:
from docker.errors import APIError
except ImportError:
# missing docker-py handled in ansible.module_utils.docker
pass
from ansible.module_utils.docker_common import AnsibleDockerClient, DockerBaseClass
from ansible.module_utils._text import to_native, to_bytes
class SecretManager(DockerBaseClass):
def __init__(self, client, results):
super(SecretManager, self).__init__()
self.client = client
self.results = results
self.check_mode = self.client.check_mode
parameters = self.client.module.params
self.name = parameters.get('name')
self.state = parameters.get('state')
self.data = parameters.get('data')
self.labels = parameters.get('labels')
self.force = parameters.get('force')
self.data_key = None
def __call__(self):
if self.state == 'present':
self.data_key = hashlib.sha224(to_bytes(self.data)).hexdigest()
self.present()
elif self.state == 'absent':
self.absent()
def get_secret(self):
''' Find an existing secret. '''
try:
secrets = self.client.secrets(filters={'name': self.name})
except APIError as exc:
self.client.fail("Error accessing secret %s: %s" % (self.name, to_native(exc)))
for secret in secrets:
if secret['Spec']['Name'] == self.name:
return secret
return None
def create_secret(self):
''' Create a new secret '''
secret_id = None
# We can't see the data after creation, so adding a label we can use for idempotency check
labels = {
'ansible_key': self.data_key
}
if self.labels:
labels.update(self.labels)
try:
if not self.check_mode:
secret_id = self.client.create_secret(self.name, self.data, labels=labels)
except APIError as exc:
self.client.fail("Error creating secret: %s" % to_native(exc))
if isinstance(secret_id, dict):
secret_id = secret_id['ID']
return secret_id
def present(self):
''' Handles state == 'present', creating or updating the secret '''
secret = self.get_secret()
if secret:
self.results['secret_id'] = secret['ID']
data_changed = False
attrs = secret.get('Spec', {})
if attrs.get('Labels', {}).get('ansible_key'):
if attrs['Labels']['ansible_key'] != self.data_key:
data_changed = True
labels_changed = False
if self.labels and attrs.get('Labels'):
# check if user requested a label change
for label in attrs['Labels']:
if self.labels.get(label) and self.labels[label] != attrs['Labels'][label]:
labels_changed = True
# check if user added a label
labels_added = False
if self.labels:
if attrs.get('Labels'):
for label in self.labels:
if label not in attrs['Labels']:
labels_added = True
else:
labels_added = True
if data_changed or labels_added or labels_changed or self.force:
# if something changed or force, delete and re-create the secret
self.absent()
secret_id = self.create_secret()
self.results['changed'] = True
self.results['secret_id'] = secret_id
else:
self.results['changed'] = True
self.results['secret_id'] = self.create_secret()
def absent(self):
''' Handles state == 'absent', removing the secret '''
secret = self.get_secret()
if secret:
try:
if not self.check_mode:
self.client.remove_secret(secret['ID'])
except APIError as exc:
self.client.fail("Error removing secret %s: %s" % (self.name, to_native(exc)))
self.results['changed'] = True
def main():
argument_spec = dict(
name=dict(type='str', required=True),
state=dict(type='str', choices=['absent', 'present'], default='present'),
data=dict(type='str', no_log=True),
labels=dict(type='dict'),
force=dict(type='bool', default=False)
)
required_if = [
('state', 'present', ['data'])
]
client = AnsibleDockerClient(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=required_if
)
results = dict(
changed=False,
secret_id=''
)
SecretManager(client, results)()
client.module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
chripede/django-js-reverse | django_js_reverse/management/commands/collectstatic_js_reverse.py | 2 | 1127 | # -*- coding: utf-8 -*-
import os
import sys
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.base import ContentFile
from django.core.files.storage import FileSystemStorage
from django.core.management.base import BaseCommand
from django_js_reverse.views import urls_js
class Command(BaseCommand):
help = 'Creates a static urls-js file for django-js-reverse'
def handle(self, *args, **options):
if not hasattr(settings, 'STATIC_ROOT') or not settings.STATIC_ROOT:
raise ImproperlyConfigured('The collectstatic_js_reverse command needs settings.STATIC_ROOT to be set.')
location = os.path.join(settings.STATIC_ROOT, 'django_js_reverse', 'js')
file = 'reverse.js'
fs = FileSystemStorage(location=location)
if fs.exists(file):
fs.delete(file)
content = urls_js()
fs.save(file, ContentFile(content))
if len(sys.argv) > 1 and sys.argv[1] in ['collectstatic_js_reverse']:
self.stdout.write('js-reverse file written to %s' % (location)) # pragma: no cover
| mit |
ThQ/luigi | test/contrib/pig_test.py | 34 | 6792 | # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import subprocess
import tempfile
import luigi
from helpers import unittest
from luigi.contrib.pig import PigJobError, PigJobTask
from mock import patch
class SimpleTestJob(PigJobTask):
def output(self):
return luigi.LocalTarget('simple-output')
def pig_script_path(self):
return "my_simple_pig_script.pig"
class ComplexTestJob(PigJobTask):
def output(self):
return luigi.LocalTarget('complex-output')
def pig_script_path(self):
return "my_complex_pig_script.pig"
def pig_env_vars(self):
return {'PIG_CLASSPATH': '/your/path'}
def pig_properties(self):
return {'pig.additional.jars': '/path/to/your/jar'}
def pig_parameters(self):
return {'YOUR_PARAM_NAME': 'Your param value'}
def pig_options(self):
return ['-x', 'local']
class SimplePigTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@patch('subprocess.Popen')
def test_run__success(self, mock):
arglist_result = []
p = subprocess.Popen
subprocess.Popen = _get_fake_Popen(arglist_result, 0)
try:
job = SimpleTestJob()
job.run()
self.assertEqual([['/usr/share/pig/bin/pig', '-f', 'my_simple_pig_script.pig']], arglist_result)
finally:
subprocess.Popen = p
@patch('subprocess.Popen')
def test_run__fail(self, mock):
arglist_result = []
p = subprocess.Popen
subprocess.Popen = _get_fake_Popen(arglist_result, 1)
try:
job = SimpleTestJob()
job.run()
self.assertEqual([['/usr/share/pig/bin/pig', '-f', 'my_simple_pig_script.pig']], arglist_result)
except PigJobError as e:
p = e
self.assertEqual('stderr', p.err)
else:
self.fail("Should have thrown PigJobError")
finally:
subprocess.Popen = p
class ComplexPigTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@patch('subprocess.Popen')
def test_run__success(self, mock):
arglist_result = []
p = subprocess.Popen
subprocess.Popen = _get_fake_Popen(arglist_result, 0)
with tempfile.NamedTemporaryFile(delete=False) as param_file_mock, \
tempfile.NamedTemporaryFile(delete=False) as prop_file_mock, \
patch('luigi.contrib.pig.tempfile.NamedTemporaryFile',
side_effect=[param_file_mock, prop_file_mock]):
try:
job = ComplexTestJob()
job.run()
self.assertEqual([['/usr/share/pig/bin/pig', '-x', 'local',
'-param_file', param_file_mock.name,
'-propertyFile', prop_file_mock.name,
'-f', 'my_complex_pig_script.pig']],
arglist_result)
# Check param file
with open(param_file_mock.name) as pparams_file:
pparams = pparams_file.readlines()
self.assertEqual(1, len(pparams))
self.assertEqual('YOUR_PARAM_NAME=Your param value\n', pparams[0])
# Check property file
with open(prop_file_mock.name) as pprops_file:
pprops = pprops_file.readlines()
self.assertEqual(1, len(pprops))
self.assertEqual('pig.additional.jars=/path/to/your/jar\n', pprops[0])
finally:
subprocess.Popen = p
@patch('subprocess.Popen')
def test_run__fail(self, mock):
arglist_result = []
p = subprocess.Popen
subprocess.Popen = _get_fake_Popen(arglist_result, 1)
with tempfile.NamedTemporaryFile(delete=False) as param_file_mock, \
tempfile.NamedTemporaryFile(delete=False) as prop_file_mock, \
patch('luigi.contrib.pig.tempfile.NamedTemporaryFile',
side_effect=[param_file_mock, prop_file_mock]):
try:
job = ComplexTestJob()
job.run()
except PigJobError as e:
p = e
self.assertEqual('stderr', p.err)
self.assertEqual([['/usr/share/pig/bin/pig', '-x', 'local',
'-param_file', param_file_mock.name,
'-propertyFile', prop_file_mock.name, '-f',
'my_complex_pig_script.pig']],
arglist_result)
# Check param file
with open(param_file_mock.name) as pparams_file:
pparams = pparams_file.readlines()
self.assertEqual(1, len(pparams))
self.assertEqual('YOUR_PARAM_NAME=Your param value\n', pparams[0])
# Check property file
with open(prop_file_mock.name) as pprops_file:
pprops = pprops_file.readlines()
self.assertEqual(1, len(pprops))
self.assertEqual('pig.additional.jars=/path/to/your/jar\n', pprops[0])
else:
self.fail("Should have thrown PigJobError")
finally:
subprocess.Popen = p
def _get_fake_Popen(arglist_result, return_code, *args, **kwargs):
def Popen_fake(arglist, shell=None, stdout=None, stderr=None, env=None, close_fds=True):
arglist_result.append(arglist)
class P(object):
def wait(self):
pass
def poll(self):
return 0
def communicate(self):
return 'end'
def env(self):
return self.env
p = P()
p.returncode = return_code
p.stderr = tempfile.TemporaryFile()
p.stdout = tempfile.TemporaryFile()
p.stdout.write(b'stdout')
p.stderr.write(b'stderr')
# Reset temp files so the output can be read.
p.stdout.seek(0)
p.stderr.seek(0)
return p
return Popen_fake
| apache-2.0 |
hofschroeer/gnuradio | docs/doxygen/doxyxml/__init__.py | 7 | 2515 | #
# Copyright 2010 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
"""
Python interface to contents of doxygen xml documentation.
Example use:
See the contents of the example folder for the C++ and
doxygen-generated xml used in this example.
>>> # Parse the doxygen docs.
>>> import os
>>> this_dir = os.path.dirname(globals()['__file__'])
>>> xml_path = this_dir + "/example/xml/"
>>> di = DoxyIndex(xml_path)
Get a list of all top-level objects.
>>> print([mem.name() for mem in di.members()])
[u'Aadvark', u'aadvarky_enough', u'main']
Get all functions.
>>> print([mem.name() for mem in di.in_category(DoxyFunction)])
[u'aadvarky_enough', u'main']
Check if an object is present.
>>> di.has_member(u'Aadvark')
True
>>> di.has_member(u'Fish')
False
Get an item by name and check its properties.
>>> aad = di.get_member(u'Aadvark')
>>> print(aad.brief_description)
Models the mammal Aadvark.
>>> print(aad.detailed_description)
Sadly the model is incomplete and cannot capture all aspects of an aadvark yet.
<BLANKLINE>
This line is uninformative and is only to test line breaks in the comments.
>>> [mem.name() for mem in aad.members()]
[u'aadvarkness', u'print', u'Aadvark', u'get_aadvarkness']
>>> aad.get_member(u'print').brief_description
u'Outputs the vital aadvark statistics.'
"""
from __future__ import unicode_literals
from .doxyindex import DoxyIndex, DoxyFunction, DoxyParam, DoxyClass, DoxyFile, DoxyNamespace, DoxyGroup, DoxyFriend, DoxyOther
def _test():
import os
this_dir = os.path.dirname(globals()['__file__'])
xml_path = this_dir + "/example/xml/"
di = DoxyIndex(xml_path)
# Get the Aadvark class
aad = di.get_member('Aadvark')
aad.brief_description
import doctest
return doctest.testmod()
if __name__ == "__main__":
_test()
| gpl-3.0 |
Crystalnix/bitpop-omaha | run_unit_tests.py | 65 | 3468 | #!/usr/bin/python2.4
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========================================================================
"""Runs a set of unit tests and returns success only if they all succeed.
This script assumes it is being run from the omaha directory.
To run unit tests for Omaha's default set of test directories, just run the file
from the command line.
"""
import dircache
import os
TEST_EXECUTABLE_RHS = '_unittest.exe'
# Build paths that contain tests.
STAGING_PATH = 'scons-out\\dbg-win\\staging'
TESTS_PATH = 'scons-out\\dbg-win\\tests'
def RunTest(test_path):
"""Runs a test and returns its exit code.
Assumes the tests can be run from any directory. In other words, it does not
chdir.
Args:
test_path: Path to test executables.
Returns:
The exit code from the test process.
"""
print '\nRunning %s . . .\n' % test_path
# Put './' in front of the file name to avoid accidentally running a file with
# the same name in some other directory if test_path were just a file name.
return os.system(os.path.join('.', test_path))
def RunTests(test_paths):
"""Runs all tests specified by test_paths.
Args:
test_paths: A list of paths to test executables.
Returns:
0 if all tests are successful.
1 if some tests fail, or if there is an error.
"""
if not test_paths or len(test_paths) < 1:
return 1
print 'Found the following tests to run:'
for test in test_paths:
print '\t%s' % test
# Run all tests and remembers those that failed.
failed_tests = [t for t in test_paths if RunTest(t)]
print '\n\n%s test executables were run.' % len(test_paths)
failed_test_count = len(failed_tests)
if failed_test_count:
# Lists the executables that failed so the user can investigate them.
print 'FAILED!'
print 'The following %s tests failed:\n' % failed_test_count
for test in failed_tests:
print test
return 1
else:
# No, there is none.
if test_paths:
print 'All of them PASSED!'
return 0
def GetTestsInDirs(test_dirs):
"""Returns a list of all unit test executables in test_dirs.
Does not search subdirectories.
Args:
test_dirs: A list of directories to search.
Returns:
List of all unit tests.
"""
tests = []
for test_dir in test_dirs:
# Use dircache.listdir so order is alphabetical and thus deterministic.
files = dircache.listdir(test_dir)
for test_file in files:
if test_file.endswith(TEST_EXECUTABLE_RHS):
relative_path = os.path.join(test_dir, test_file)
if os.path.isfile(relative_path):
tests += [relative_path]
return tests
# Run a unit test when the module is run directly.
if __name__ == '__main__':
# List of paths that contain unit tests to run.
dirs_containing_tests = [STAGING_PATH, TESTS_PATH]
tests_to_run = GetTestsInDirs(dirs_containing_tests)
RunTests(tests_to_run)
| apache-2.0 |
borysiasty/inasafe | safe/impact_functions/generic/classified_polygon_people/metadata_definitions.py | 1 | 4522 | # coding=utf-8
"""InaSAFE Disaster risk tool by Australian Aid - Classified Polygon on
Land Cover Metadata Definitions.
Contact : ole.moller.nielsen@gmail.com
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Samweli Twesa Mwakisambwe "Samweli" <smwltwesa6@gmail.com>'
__date__ = '8/6/15'
from safe.common.utilities import OrderedDict
from safe.impact_functions.impact_function_metadata import \
ImpactFunctionMetadata
from safe.definitions import (
area_name_field,
area_id_field,
exposure_population,
hazard_all,
hazard_category_single_event,
hazard_category_multiple_event,
generic_vector_hazard_classes,
flood_vector_hazard_classes,
layer_mode_classified,
layer_mode_continuous,
layer_geometry_polygon,
count_exposure_unit
)
from safe.defaults import (
default_minimum_needs,
default_gender_postprocessor,
age_postprocessor,
minimum_needs_selector)
from safe.utilities.i18n import tr
class ClassifiedPolygonHazardPolygonPeopleFunctionMetadata(
ImpactFunctionMetadata):
@staticmethod
def as_dict():
"""Return metadata as a dictionary.
This is a static method. You can use it to get the metadata in
dictionary format for an impact function.
:returns: A dictionary representing all the metadata for the
concrete impact function.
:rtype: dict
"""
dict_meta = {
'id': 'ClassifiedPolygonHazardPolygonPeopleFunction',
'name': tr('Classified polygon hazard on polygon people'),
'impact': tr('Be affected'),
'title': tr('Be affected'),
'function_type': 'qgis2.0',
'author': 'Samweli Twesa Mwakisambwe(smwltwesa6@gmail.com)',
'date_implemented': '06/08/2015',
'overview': tr(
'To assess the impact of each hazard zone on polygon people.'),
'detailed_description': '',
'hazard_input': tr(
'The hazard layer must be a polygon layer. This layer '
'must have an attribute representing the hazard '
'zone that can be specified in the impact function options.'),
'exposure_input': tr(
'Vector polygon layer where each '
'polygon represents a type of area where people lives.'),
'output': tr(
'A vector layer of areas polygons with each tagged '
'according to the hazard zone in which it falls.'),
'actions': tr(
'Provide details about how big area fall within '
'each hazard zone.'),
'limitations': [],
'citations': [],
'legend_title': '',
'legend_units': '',
'legend_notes': '',
'map_title': tr('Affected People'),
'layer_name': tr('People affected'),
'layer_requirements': {
'hazard': {
'layer_mode': layer_mode_classified,
'layer_geometries': [layer_geometry_polygon],
'hazard_categories': [
hazard_category_single_event,
hazard_category_multiple_event
],
'hazard_types': hazard_all,
'continuous_hazard_units': [],
'vector_hazard_classifications': [
generic_vector_hazard_classes,
flood_vector_hazard_classes],
'raster_hazard_classifications': [],
'additional_keywords': []
},
'exposure': {
'layer_mode': layer_mode_continuous,
'layer_geometries': [layer_geometry_polygon],
'exposure_types': [exposure_population],
'exposure_units': [count_exposure_unit],
'exposure_class_fields': [],
'additional_keywords': [
area_id_field,
area_name_field
]
}
},
'parameters': OrderedDict([
('minimum needs', default_minimum_needs())])
}
return dict_meta
| gpl-3.0 |
kotejante/python-moodle | examples/get_user.py | 1 | 1498 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Moodle Webservice
# Copyright (c) 2011 Zikzakmedia S.L. (http://zikzakmedia.com) All Rights Reserved.
# Raimon Esteve <resteve@zikzakmedia.com>
# Jesus Martín <jmartin@zikzakmedia.com>
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from config import *
from moodle_ws_client import moodle
mdl = moodle.MDL()
# xmlrpc Connection
print mdl.conn_xmlrpc(server)
"""
Get user
"""
criteria = [{'key':'username','value':'usuario'}]
print mdl.get_users(server, criteria)
# Enrol a user in a course
enrols =[{
'roleid': 1,
'userid': 18,
'courseid': 18,
}]
print mdl.enrol_users(server, enrols)
| agpl-3.0 |
luhanhan/horizon | openstack_dashboard/dashboards/project/data_processing/clusters/tests.py | 26 | 2651 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
INDEX_URL = reverse('horizon:project:data_processing.clusters:index')
DETAILS_URL = reverse(
'horizon:project:data_processing.clusters:details', args=['id'])
class DataProcessingClusterTests(test.TestCase):
@test.create_stubs({api.sahara: ('cluster_list',)})
def test_index(self):
api.sahara.cluster_list(IsA(http.HttpRequest), {}) \
.AndReturn(self.clusters.list())
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(
res, 'project/data_processing.clusters/clusters.html')
self.assertContains(res, 'Clusters')
self.assertContains(res, 'Name')
@test.create_stubs({api.sahara: ('cluster_template_list', 'image_list')})
def test_launch_cluster_get_nodata(self):
api.sahara.cluster_template_list(IsA(http.HttpRequest)) \
.AndReturn([])
api.sahara.image_list(IsA(http.HttpRequest)) \
.AndReturn([])
self.mox.ReplayAll()
url = reverse(
'horizon:project:data_processing.clusters:configure-cluster')
res = self.client.get("%s?plugin_name=shoes&hadoop_version=1.1" % url)
self.assertContains(res, "No Images Available")
self.assertContains(res, "No Templates Available")
@test.create_stubs({api.sahara: ('cluster_list',
'cluster_delete')})
def test_delete(self):
cluster = self.clusters.first()
api.sahara.cluster_list(IsA(http.HttpRequest), {}) \
.AndReturn(self.clusters.list())
api.sahara.cluster_delete(IsA(http.HttpRequest), cluster.id)
self.mox.ReplayAll()
form_data = {'action': 'clusters__delete__%s' % cluster.id}
res = self.client.post(INDEX_URL, form_data)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.assertMessageCount(success=1)
| apache-2.0 |
jcanizales/grpc | src/python/grpcio_tests/tests/unit/framework/common/__init__.py | 1496 | 1530 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| bsd-3-clause |
xombiemp/CouchPotatoServer | libs/dateutil/zoneinfo/__init__.py | 144 | 2773 | # -*- coding: utf-8 -*-
"""
Copyright (c) 2003-2005 Gustavo Niemeyer <gustavo@niemeyer.net>
This module offers extensions to the standard Python
datetime module.
"""
from dateutil.tz import tzfile
from tarfile import TarFile
import os
__author__ = "Tomi Pieviläinen <tomi.pievilainen@iki.fi>"
__license__ = "Simplified BSD"
__all__ = ["setcachesize", "gettz", "rebuild"]
CACHE = []
CACHESIZE = 10
class tzfile(tzfile):
def __reduce__(self):
return (gettz, (self._filename,))
def getzoneinfofile():
filenames = sorted(os.listdir(os.path.join(os.path.dirname(__file__))))
filenames.reverse()
for entry in filenames:
if entry.startswith("zoneinfo") and ".tar." in entry:
return os.path.join(os.path.dirname(__file__), entry)
return None
ZONEINFOFILE = getzoneinfofile()
del getzoneinfofile
def setcachesize(size):
global CACHESIZE, CACHE
CACHESIZE = size
del CACHE[size:]
def gettz(name):
tzinfo = None
if ZONEINFOFILE:
for cachedname, tzinfo in CACHE:
if cachedname == name:
break
else:
tf = TarFile.open(ZONEINFOFILE)
try:
zonefile = tf.extractfile(name)
except KeyError:
tzinfo = None
else:
tzinfo = tzfile(zonefile)
tf.close()
CACHE.insert(0, (name, tzinfo))
del CACHE[CACHESIZE:]
return tzinfo
def rebuild(filename, tag=None, format="gz"):
import tempfile, shutil
tmpdir = tempfile.mkdtemp()
zonedir = os.path.join(tmpdir, "zoneinfo")
moduledir = os.path.dirname(__file__)
if tag: tag = "-"+tag
targetname = "zoneinfo%s.tar.%s" % (tag, format)
try:
tf = TarFile.open(filename)
# The "backwards" zone file contains links to other files, so must be
# processed as last
for name in sorted(tf.getnames(),
key=lambda k: k != "backward" and k or "z"):
if not (name.endswith(".sh") or
name.endswith(".tab") or
name == "leapseconds"):
tf.extract(name, tmpdir)
filepath = os.path.join(tmpdir, name)
os.system("zic -d %s %s" % (zonedir, filepath))
tf.close()
target = os.path.join(moduledir, targetname)
for entry in os.listdir(moduledir):
if entry.startswith("zoneinfo") and ".tar." in entry:
os.unlink(os.path.join(moduledir, entry))
tf = TarFile.open(target, "w:%s" % format)
for entry in os.listdir(zonedir):
entrypath = os.path.join(zonedir, entry)
tf.add(entrypath, entry)
tf.close()
finally:
shutil.rmtree(tmpdir)
| gpl-3.0 |
artemh/asuswrt-merlin | release/src/router/libxml2/python/tests/xpathret.py | 87 | 1312 | #!/usr/bin/python -u
import sys
import libxml2
#memory debug specific
libxml2.debugMemory(1)
#
# A document hosting the nodes returned from the extension function
#
mydoc = libxml2.newDoc("1.0")
def foo(ctx, str):
global mydoc
#
# test returning a node set works as expected
#
parent = mydoc.newDocNode(None, 'p', None)
mydoc.addChild(parent)
node = mydoc.newDocText(str)
parent.addChild(node)
return [parent]
doc = libxml2.parseFile("tst.xml")
ctxt = doc.xpathNewContext()
libxml2.registerXPathFunction(ctxt._o, "foo", None, foo)
res = ctxt.xpathEval("foo('hello')")
if type(res) != type([]):
print "Failed to return a nodeset"
sys.exit(1)
if len(res) != 1:
print "Unexpected nodeset size"
sys.exit(1)
node = res[0]
if node.name != 'p':
print "Unexpected nodeset element result"
sys.exit(1)
node = node.children
if node.type != 'text':
print "Unexpected nodeset element children type"
sys.exit(1)
if node.content != 'hello':
print "Unexpected nodeset element children content"
sys.exit(1)
doc.freeDoc()
mydoc.freeDoc()
ctxt.xpathFreeContext()
#memory debug specific
libxml2.cleanupParser()
if libxml2.debugMemory(1) == 0:
print "OK"
else:
print "Memory leak %d bytes" % (libxml2.debugMemory(1))
libxml2.dumpMemory()
| gpl-2.0 |
kushalbhola/MyStuff | Practice/PythonApplication/env/Lib/site-packages/pandas/tests/indexes/multi/test_contains.py | 2 | 3306 | import numpy as np
import pytest
from pandas.compat import PYPY
import pandas as pd
from pandas import MultiIndex
import pandas.util.testing as tm
def test_contains_top_level():
midx = MultiIndex.from_product([["A", "B"], [1, 2]])
assert "A" in midx
assert "A" not in midx._engine
def test_contains_with_nat():
# MI with a NaT
mi = MultiIndex(
levels=[["C"], pd.date_range("2012-01-01", periods=5)],
codes=[[0, 0, 0, 0, 0, 0], [-1, 0, 1, 2, 3, 4]],
names=[None, "B"],
)
assert ("C", pd.Timestamp("2012-01-01")) in mi
for val in mi.values:
assert val in mi
def test_contains(idx):
assert ("foo", "two") in idx
assert ("bar", "two") not in idx
assert None not in idx
@pytest.mark.skipif(not PYPY, reason="tuples cmp recursively on PyPy")
def test_isin_nan_pypy():
idx = MultiIndex.from_arrays([["foo", "bar"], [1.0, np.nan]])
tm.assert_numpy_array_equal(idx.isin([("bar", np.nan)]), np.array([False, True]))
tm.assert_numpy_array_equal(
idx.isin([("bar", float("nan"))]), np.array([False, True])
)
def test_isin():
values = [("foo", 2), ("bar", 3), ("quux", 4)]
idx = MultiIndex.from_arrays([["qux", "baz", "foo", "bar"], np.arange(4)])
result = idx.isin(values)
expected = np.array([False, False, True, True])
tm.assert_numpy_array_equal(result, expected)
# empty, return dtype bool
idx = MultiIndex.from_arrays([[], []])
result = idx.isin(values)
assert len(result) == 0
assert result.dtype == np.bool_
@pytest.mark.skipif(PYPY, reason="tuples cmp recursively on PyPy")
def test_isin_nan_not_pypy():
idx = MultiIndex.from_arrays([["foo", "bar"], [1.0, np.nan]])
tm.assert_numpy_array_equal(idx.isin([("bar", np.nan)]), np.array([False, False]))
tm.assert_numpy_array_equal(
idx.isin([("bar", float("nan"))]), np.array([False, False])
)
def test_isin_level_kwarg():
idx = MultiIndex.from_arrays([["qux", "baz", "foo", "bar"], np.arange(4)])
vals_0 = ["foo", "bar", "quux"]
vals_1 = [2, 3, 10]
expected = np.array([False, False, True, True])
tm.assert_numpy_array_equal(expected, idx.isin(vals_0, level=0))
tm.assert_numpy_array_equal(expected, idx.isin(vals_0, level=-2))
tm.assert_numpy_array_equal(expected, idx.isin(vals_1, level=1))
tm.assert_numpy_array_equal(expected, idx.isin(vals_1, level=-1))
msg = "Too many levels: Index has only 2 levels, not 6"
with pytest.raises(IndexError, match=msg):
idx.isin(vals_0, level=5)
msg = "Too many levels: Index has only 2 levels, -5 is not a valid level number"
with pytest.raises(IndexError, match=msg):
idx.isin(vals_0, level=-5)
with pytest.raises(KeyError, match=r"'Level 1\.0 not found'"):
idx.isin(vals_0, level=1.0)
with pytest.raises(KeyError, match=r"'Level -1\.0 not found'"):
idx.isin(vals_1, level=-1.0)
with pytest.raises(KeyError, match="'Level A not found'"):
idx.isin(vals_1, level="A")
idx.names = ["A", "B"]
tm.assert_numpy_array_equal(expected, idx.isin(vals_0, level="A"))
tm.assert_numpy_array_equal(expected, idx.isin(vals_1, level="B"))
with pytest.raises(KeyError, match="'Level C not found'"):
idx.isin(vals_1, level="C")
| apache-2.0 |
slonik-az/cython | tests/run/withstat_py.py | 29 | 4281 | import sys
def typename(t):
name = type(t).__name__
if sys.version_info < (2,5):
if name == 'classobj' and issubclass(t, MyException):
name = 'type'
elif name == 'instance' and isinstance(t, MyException):
name = 'MyException'
return "<type '%s'>" % name
class MyException(Exception):
pass
class ContextManager(object):
def __init__(self, value, exit_ret = None):
self.value = value
self.exit_ret = exit_ret
def __exit__(self, a, b, tb):
print("exit %s %s %s" % (typename(a), typename(b), typename(tb)))
return self.exit_ret
def __enter__(self):
print("enter")
return self.value
def no_as():
"""
>>> no_as()
enter
hello
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
"""
with ContextManager("value"):
print("hello")
def basic():
"""
>>> basic()
enter
value
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
"""
with ContextManager("value") as x:
print(x)
def with_pass():
"""
>>> with_pass()
enter
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
"""
with ContextManager("value") as x:
pass
def with_return():
"""
>>> print(with_return())
enter
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
value
"""
with ContextManager("value") as x:
return x
def with_break():
"""
>>> print(with_break())
enter
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
a
"""
for c in list("abc"):
with ContextManager("value") as x:
break
print("FAILED")
return c
def with_continue():
"""
>>> print(with_continue())
enter
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
enter
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
enter
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
c
"""
for c in list("abc"):
with ContextManager("value") as x:
continue
print("FAILED")
return c
def with_exception(exit_ret):
"""
>>> with_exception(None)
enter
value
exit <type 'type'> <type 'MyException'> <type 'traceback'>
outer except
>>> with_exception(True)
enter
value
exit <type 'type'> <type 'MyException'> <type 'traceback'>
"""
try:
with ContextManager("value", exit_ret=exit_ret) as value:
print(value)
raise MyException()
except:
print("outer except")
def with_real_lock():
"""
>>> with_real_lock()
about to acquire lock
holding lock
lock no longer held
"""
from threading import Lock
lock = Lock()
print("about to acquire lock")
with lock:
print("holding lock")
print("lock no longer held")
def functions_in_with():
"""
>>> f = functions_in_with()
enter
exit <type 'type'> <type 'MyException'> <type 'traceback'>
outer except
>>> f(1)[0]
1
>>> print(f(1)[1])
value
"""
try:
with ContextManager("value") as value:
def f(x): return x, value
make = lambda x:x()
raise make(MyException)
except:
print("outer except")
return f
def multitarget():
"""
>>> multitarget()
enter
1 2 3 4 5
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
"""
with ContextManager((1, 2, (3, (4, 5)))) as (a, b, (c, (d, e))):
print('%s %s %s %s %s' % (a, b, c, d, e))
def tupletarget():
"""
>>> tupletarget()
enter
(1, 2, (3, (4, 5)))
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
"""
with ContextManager((1, 2, (3, (4, 5)))) as t:
print(t)
class GetManager(object):
def get(self, *args):
return ContextManager(*args)
def manager_from_expression():
"""
>>> manager_from_expression()
enter
1
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
enter
2
exit <type 'NoneType'> <type 'NoneType'> <type 'NoneType'>
"""
with GetManager().get(1) as x:
print(x)
g = GetManager()
with g.get(2) as x:
print(x)
| apache-2.0 |
darkdukey/sdkbox-facebook-sample-v2 | tools/cocos2d-console/console/cocos2d.py | 4 | 2601 | #!/usr/bin/python
# ----------------------------------------------------------------------------
# cocos2d-console: command line tool manager for cocos2d
#
# Author: Ricardo Quesada
# Copyright 2013 (C) Zynga, Inc
#
# License: MIT
# ----------------------------------------------------------------------------
'''
Command line tool manager for cocos2d
'''
__docformat__ = 'restructuredtext'
# python
import sys
import re
import ConfigParser
import os
COCOS2D_CONSOLE_VERSION = '0.1'
#
# Plugins should be a sublass of CCJSPlugin
#
class CCPlugin(object):
# returns help
@staticmethod
def brief_description(self):
pass
# Constructor
def __init__(self):
pass
# Run it
def run(self, argv):
pass
# get_class from: http://stackoverflow.com/a/452981
def get_class(kls):
parts = kls.split('.')
module = ".".join(parts[:-1])
if len(parts) == 1:
m = sys.modules[__name__]
m = getattr(m, parts[0])
else:
m = __import__(module)
for comp in parts[1:]:
m = getattr(m, comp)
return m
def parse_plugins():
classes = {}
cp = ConfigParser.ConfigParser()
# read global config file
cocos2d_path = os.path.dirname(os.path.abspath(sys.argv[0]))
cp.read(os.path.join(cocos2d_path, "cocos2d.ini"))
# override it with local config
cp.read("~/.cocos2d-js/cocos2d.ini")
for s in cp.sections():
if s.startswith('plugin '):
pluginname = re.match('plugin\s+"?(\w+)"?', s)
if pluginname:
key = pluginname.group(1)
for o in cp.options(s):
classname = cp.get(s, o)
classes[key] = get_class(classname)
return classes
def help():
print "\n%s %s - cocos2d console: A command line tool for cocos2d" % (sys.argv[0], COCOS2D_CONSOLE_VERSION)
print "\nAvailable commands:"
classes = parse_plugins()
for key in classes.keys():
print "\t%s" % classes[key].brief_description()
print "\t"
print "\nExample:"
print "\t%s new --help" % sys.argv[0]
print "\t%s jscompile --help" % sys.argv[0]
print "\t%s luacompile --help" % sys.argv[0]
sys.exit(-1)
if __name__ == "__main__":
if len(sys.argv) == 1 or sys.argv[1] == '-h':
help()
command = sys.argv[1]
argv = sys.argv[2:]
plugins = parse_plugins()
if command in plugins:
plugin = plugins[command]
plugin().run(argv)
else:
print "Error: argument '%s' not found" % command
print "Try with %s -h" % sys.argv[0]
| mit |
mendersoftware/meta-mender-qemu | scripts/list-all-org-repos.py | 1 | 1955 | #!/usr/bin/python3
import argparse
import json
import re
import subprocess
import sys
parser = argparse.ArgumentParser()
parser.add_argument(
"--token",
help="Github Personal Access token, get it from https://github.com/settings/tokens.",
)
parser.add_argument(
"--org",
default="mendersoftware",
help="Organization to get repositories for. Defaults to mendersoftware",
)
args = parser.parse_args()
def process_response(body):
# Cut headers.
body = body[body.find("\r\n\r\n") + 4 :]
repos = json.loads(body)
for repo in repos:
print(repo["ssh_url"])
base_curl_args = [
"curl",
"-si",
"-H",
"Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"--compressed",
]
if args.token:
base_curl_args += ["-H", "Authorization: bearer %s" % args.token]
else:
sys.stderr.write(
"Warning: Running without token, private repositories will not be listed.\n"
)
url = "https://api.github.com/orgs/%s/repos" % args.org
output = None
try:
while True:
output = subprocess.check_output(base_curl_args + [url]).decode()
process_response(output)
# Example header (typically the one you are requesting is not present):
# Link: <https://api.github.com/organizations/15040539/repos?page=1>; rel="prev", <https://api.github.com/organizations/15040539/repos?page=3>; rel="next", <https://api.github.com/organizations/15040539/repos?page=3>; rel="last", <https://api.github.com/organizations/15040539/repos?page=1>; rel="first"
link_header = re.search(
r'^link:.*<([^>]*)>\s*;\s*rel="next"\s*,\s*<([^>]*)>\s*;\s*rel="last"',
output,
flags=re.MULTILINE | re.IGNORECASE,
)
if link_header is None or url == link_header.group(2):
break
url = link_header.group(1)
except:
print("Got exception, last response was:")
print(output)
raise
| apache-2.0 |
subho007/androguard | androguard/session.py | 14 | 4216 | import hashlib
import collections
from androguard.core import androconf
from androguard.core.bytecodes.apk import *
from androguard.core.bytecodes.dvm import *
from androguard.core.analysis.analysis import *
from androguard.decompiler.decompiler import *
from androguard.misc import save_session, load_session
class Session(object):
def __init__(self):
self.setupObjects()
def save(self, filename):
save_session([self.analyzed_files,
self.analyzed_digest,
self.analyzed_apk,
self.analyzed_dex], filename)
def load(self, filename):
self.analyzed_files, self.analyzed_digest, self.analyzed_apk, self.analyzed_dex = load_session(filename)
def setupObjects(self):
self.analyzed_files = collections.OrderedDict()
self.analyzed_digest = {}
self.analyzed_apk = {}
self.analyzed_dex = {}
def reset(self):
self.setupObjects()
def isOpen(self):
return self.analyzed_digest != {}
def addAPK(self, filename, data):
digest = hashlib.sha256(data).hexdigest()
androconf.debug("add APK:%s" % digest)
apk = APK(data, True)
self.analyzed_apk[digest] = apk
self.analyzed_files[filename].append(digest)
self.analyzed_digest[digest] = filename
androconf.debug("added APK:%s" % digest)
return (digest, apk)
def addDEX(self, filename, data):
digest = hashlib.sha256(data).hexdigest()
androconf.debug("add DEX:%s" % digest)
d = DalvikVMFormat(data)
androconf.debug("VMAnalysis ...")
dx = newVMAnalysis(d)
dx.create_xref()
d.set_decompiler(DecompilerDAD(d, dx))
androconf.debug("added DEX:%s" % digest)
self.analyzed_dex[digest] = (d, dx)
self.analyzed_files[filename].append(digest)
self.analyzed_digest[digest] = filename
return (digest, d, dx)
def add(self, filename, raw_data):
ret = is_android_raw(raw_data)
if ret:
self.analyzed_files[filename] = []
digest = hashlib.sha256(raw_data).hexdigest()
if ret == "APK":
apk_digest, apk = self.addAPK(filename, raw_data)
self.addDEX(filename, apk.get_dex())
elif ret == "DEX":
self.addDEX(filename, raw_data)
else:
return False
return True
return False
def get_classes(self):
idx = 0
for filename in self.analyzed_files:
for digest in self.analyzed_files[filename]:
if digest in self.analyzed_dex:
d, _ = self.analyzed_dex[digest]
yield idx, filename, digest, d.get_classes()
idx += 1
def get_analysis(self, current_class):
for digest in self.analyzed_dex:
d, dx = self.analyzed_dex[digest]
if dx.is_class_present(current_class.get_name()):
return dx
return None
def get_format(self, current_class):
for digest in self.analyzed_dex:
d, dx = self.analyzed_dex[digest]
if dx.is_class_present(current_class.get_name()):
return d
return None
def get_filename_by_class(self, current_class):
for digest in self.analyzed_dex:
d, dx = self.analyzed_dex[digest]
if dx.is_class_present(current_class.get_name()):
return self.analyzed_digest[digest]
return None
def get_digest_by_class(self, current_class):
for digest in self.analyzed_dex:
d, dx = self.analyzed_dex[digest]
if dx.is_class_present(current_class.get_name()):
return digest
return None
def get_strings(self):
for digest in self.analyzed_dex:
d, dx = self.analyzed_dex[digest]
yield digest, self.analyzed_digest[digest], dx.get_strings_analysis()
def get_nb_strings(self):
nb = 0
for digest in self.analyzed_dex:
d, dx = self.analyzed_dex[digest]
nb += len(dx.get_strings_analysis())
return nb
| apache-2.0 |
vmthunder/nova | nova/api/openstack/compute/contrib/server_password.py | 17 | 2930 | # Copyright (c) 2012 Nebula, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The server password extension."""
import webob
from nova.api.metadata import password
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import compute
from nova import exception
authorize = extensions.extension_authorizer('compute', 'server_password')
class ServerPasswordTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('password', selector='password')
root.text = unicode
return xmlutil.MasterTemplate(root, 1)
class ServerPasswordController(object):
"""The Server Password API controller for the OpenStack API."""
def __init__(self):
self.compute_api = compute.API()
def _get_instance(self, context, server_id):
try:
return self.compute_api.get(context, server_id, want_objects=True)
except exception.InstanceNotFound as exp:
raise webob.exc.HTTPNotFound(explanation=exp.format_message())
@wsgi.serializers(xml=ServerPasswordTemplate)
def index(self, req, server_id):
context = req.environ['nova.context']
authorize(context)
instance = self._get_instance(context, server_id)
passw = password.extract_password(instance)
return {'password': passw or ''}
@wsgi.response(204)
def delete(self, req, server_id):
context = req.environ['nova.context']
authorize(context)
instance = self._get_instance(context, server_id)
meta = password.convert_password(context, None)
instance.system_metadata.update(meta)
instance.save()
class Server_password(extensions.ExtensionDescriptor):
"""Server password support."""
name = "ServerPassword"
alias = "os-server-password"
namespace = ("http://docs.openstack.org/compute/ext/"
"server-password/api/v2")
updated = "2012-11-29T00:00:00Z"
def get_resources(self):
resources = []
res = extensions.ResourceExtension(
'os-server-password',
controller=ServerPasswordController(),
collection_actions={'delete': 'DELETE'},
parent=dict(member_name='server', collection_name='servers'))
resources.append(res)
return resources
| apache-2.0 |
M4rtinK/anaconda | tests/nosetests/regex_tests/username_test.py | 3 | 3827 | #!/usr/bin/python3
# vim:set fileencoding=utf-8
#
# Copyright (C) 2010-2013 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
import unittest
from regexcheck import regex_match
from pyanaconda.core.regexes import GECOS_VALID, NAME_VALID, GROUPLIST_SIMPLE_VALID
class UsernameRegexTestCase(unittest.TestCase):
def gecos_test(self):
"""Test a list of possible Full Name values."""
# These are valid full names
good_tests = [
'',
'George',
'George Burdell',
'George P. Burdell',
'Ğeorgé P. Burdełl',
'Burdell, George',
'g/p/b'
]
# These are invalid full names
bad_tests = ['George:Burdell']
if not regex_match(GECOS_VALID, good_tests, bad_tests):
self.fail()
def username_test(self):
"""Test a list of possible username values."""
good_tests = [
'gburdell',
'GBurdell',
'gburdell$',
'g_burdell',
'_burdell',
'gggggggggggggggggggggggggburdell', # 32 characters
'ggggggggggggggggggggggggburdell$',
'_',
'r',
'ro',
'roo',
'roota',
]
bad_tests = [
'',
'-gburdell', # can't start with a hyphen
'gburdełl', # invalid character
'g:burdell',
'g burdell',
'g,burdell',
'ggggggggggggggggggggggggggburdell', # 33 characters
'gggggggggggggggggggggggggburdell$',
' gburdell',
':gburdell',
'$',
'-'
]
# The group name checks for the same thing as the user name
if not regex_match(NAME_VALID, good_tests, bad_tests):
self.fail()
def grouplist_simple_test(self):
good_tests = [
'',
'gburdell',
' gburdell',
' \tgburdell',
'gburdell ',
'gburdell \t',
' gburdell ',
'gburdell,wheel',
'gburdell, wheel',
' gburdell, wheel',
'gburdell, wheel ',
' gburdell, wheel ',
'gburdell, wheel',
'gburdell,wheel, mock'
]
bad_tests = [
',',
'-invalid',
'gburdell, -invalid',
'gburdell,',
'gburdell, ',
',gburdell',
' ,gburdell',
',gburdell,'
'gburdell, wheel,'
]
if not regex_match(GROUPLIST_SIMPLE_VALID, good_tests, bad_tests):
self.fail()
| gpl-2.0 |
asnorkin/sentiment_analysis | site/lib/python2.7/site-packages/numpy/polynomial/tests/test_legendre.py | 58 | 18006 | """Tests for legendre module.
"""
from __future__ import division, absolute_import, print_function
import numpy as np
import numpy.polynomial.legendre as leg
from numpy.polynomial.polynomial import polyval
from numpy.testing import (
TestCase, assert_almost_equal, assert_raises,
assert_equal, assert_, run_module_suite)
L0 = np.array([1])
L1 = np.array([0, 1])
L2 = np.array([-1, 0, 3])/2
L3 = np.array([0, -3, 0, 5])/2
L4 = np.array([3, 0, -30, 0, 35])/8
L5 = np.array([0, 15, 0, -70, 0, 63])/8
L6 = np.array([-5, 0, 105, 0, -315, 0, 231])/16
L7 = np.array([0, -35, 0, 315, 0, -693, 0, 429])/16
L8 = np.array([35, 0, -1260, 0, 6930, 0, -12012, 0, 6435])/128
L9 = np.array([0, 315, 0, -4620, 0, 18018, 0, -25740, 0, 12155])/128
Llist = [L0, L1, L2, L3, L4, L5, L6, L7, L8, L9]
def trim(x):
return leg.legtrim(x, tol=1e-6)
class TestConstants(TestCase):
def test_legdomain(self):
assert_equal(leg.legdomain, [-1, 1])
def test_legzero(self):
assert_equal(leg.legzero, [0])
def test_legone(self):
assert_equal(leg.legone, [1])
def test_legx(self):
assert_equal(leg.legx, [0, 1])
class TestArithmetic(TestCase):
x = np.linspace(-1, 1, 100)
def test_legadd(self):
for i in range(5):
for j in range(5):
msg = "At i=%d, j=%d" % (i, j)
tgt = np.zeros(max(i, j) + 1)
tgt[i] += 1
tgt[j] += 1
res = leg.legadd([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_legsub(self):
for i in range(5):
for j in range(5):
msg = "At i=%d, j=%d" % (i, j)
tgt = np.zeros(max(i, j) + 1)
tgt[i] += 1
tgt[j] -= 1
res = leg.legsub([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_legmulx(self):
assert_equal(leg.legmulx([0]), [0])
assert_equal(leg.legmulx([1]), [0, 1])
for i in range(1, 5):
tmp = 2*i + 1
ser = [0]*i + [1]
tgt = [0]*(i - 1) + [i/tmp, 0, (i + 1)/tmp]
assert_equal(leg.legmulx(ser), tgt)
def test_legmul(self):
# check values of result
for i in range(5):
pol1 = [0]*i + [1]
val1 = leg.legval(self.x, pol1)
for j in range(5):
msg = "At i=%d, j=%d" % (i, j)
pol2 = [0]*j + [1]
val2 = leg.legval(self.x, pol2)
pol3 = leg.legmul(pol1, pol2)
val3 = leg.legval(self.x, pol3)
assert_(len(pol3) == i + j + 1, msg)
assert_almost_equal(val3, val1*val2, err_msg=msg)
def test_legdiv(self):
for i in range(5):
for j in range(5):
msg = "At i=%d, j=%d" % (i, j)
ci = [0]*i + [1]
cj = [0]*j + [1]
tgt = leg.legadd(ci, cj)
quo, rem = leg.legdiv(tgt, ci)
res = leg.legadd(leg.legmul(quo, ci), rem)
assert_equal(trim(res), trim(tgt), err_msg=msg)
class TestEvaluation(TestCase):
# coefficients of 1 + 2*x + 3*x**2
c1d = np.array([2., 2., 2.])
c2d = np.einsum('i,j->ij', c1d, c1d)
c3d = np.einsum('i,j,k->ijk', c1d, c1d, c1d)
# some random values in [-1, 1)
x = np.random.random((3, 5))*2 - 1
y = polyval(x, [1., 2., 3.])
def test_legval(self):
#check empty input
assert_equal(leg.legval([], [1]).size, 0)
#check normal input)
x = np.linspace(-1, 1)
y = [polyval(x, c) for c in Llist]
for i in range(10):
msg = "At i=%d" % i
tgt = y[i]
res = leg.legval(x, [0]*i + [1])
assert_almost_equal(res, tgt, err_msg=msg)
#check that shape is preserved
for i in range(3):
dims = [2]*i
x = np.zeros(dims)
assert_equal(leg.legval(x, [1]).shape, dims)
assert_equal(leg.legval(x, [1, 0]).shape, dims)
assert_equal(leg.legval(x, [1, 0, 0]).shape, dims)
def test_legval2d(self):
x1, x2, x3 = self.x
y1, y2, y3 = self.y
#test exceptions
assert_raises(ValueError, leg.legval2d, x1, x2[:2], self.c2d)
#test values
tgt = y1*y2
res = leg.legval2d(x1, x2, self.c2d)
assert_almost_equal(res, tgt)
#test shape
z = np.ones((2, 3))
res = leg.legval2d(z, z, self.c2d)
assert_(res.shape == (2, 3))
def test_legval3d(self):
x1, x2, x3 = self.x
y1, y2, y3 = self.y
#test exceptions
assert_raises(ValueError, leg.legval3d, x1, x2, x3[:2], self.c3d)
#test values
tgt = y1*y2*y3
res = leg.legval3d(x1, x2, x3, self.c3d)
assert_almost_equal(res, tgt)
#test shape
z = np.ones((2, 3))
res = leg.legval3d(z, z, z, self.c3d)
assert_(res.shape == (2, 3))
def test_leggrid2d(self):
x1, x2, x3 = self.x
y1, y2, y3 = self.y
#test values
tgt = np.einsum('i,j->ij', y1, y2)
res = leg.leggrid2d(x1, x2, self.c2d)
assert_almost_equal(res, tgt)
#test shape
z = np.ones((2, 3))
res = leg.leggrid2d(z, z, self.c2d)
assert_(res.shape == (2, 3)*2)
def test_leggrid3d(self):
x1, x2, x3 = self.x
y1, y2, y3 = self.y
#test values
tgt = np.einsum('i,j,k->ijk', y1, y2, y3)
res = leg.leggrid3d(x1, x2, x3, self.c3d)
assert_almost_equal(res, tgt)
#test shape
z = np.ones((2, 3))
res = leg.leggrid3d(z, z, z, self.c3d)
assert_(res.shape == (2, 3)*3)
class TestIntegral(TestCase):
def test_legint(self):
# check exceptions
assert_raises(ValueError, leg.legint, [0], .5)
assert_raises(ValueError, leg.legint, [0], -1)
assert_raises(ValueError, leg.legint, [0], 1, [0, 0])
# test integration of zero polynomial
for i in range(2, 5):
k = [0]*(i - 2) + [1]
res = leg.legint([0], m=i, k=k)
assert_almost_equal(res, [0, 1])
# check single integration with integration constant
for i in range(5):
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [1/scl]
legpol = leg.poly2leg(pol)
legint = leg.legint(legpol, m=1, k=[i])
res = leg.leg2poly(legint)
assert_almost_equal(trim(res), trim(tgt))
# check single integration with integration constant and lbnd
for i in range(5):
scl = i + 1
pol = [0]*i + [1]
legpol = leg.poly2leg(pol)
legint = leg.legint(legpol, m=1, k=[i], lbnd=-1)
assert_almost_equal(leg.legval(-1, legint), i)
# check single integration with integration constant and scaling
for i in range(5):
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [2/scl]
legpol = leg.poly2leg(pol)
legint = leg.legint(legpol, m=1, k=[i], scl=2)
res = leg.leg2poly(legint)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with default k
for i in range(5):
for j in range(2, 5):
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j):
tgt = leg.legint(tgt, m=1)
res = leg.legint(pol, m=j)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with defined k
for i in range(5):
for j in range(2, 5):
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j):
tgt = leg.legint(tgt, m=1, k=[k])
res = leg.legint(pol, m=j, k=list(range(j)))
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with lbnd
for i in range(5):
for j in range(2, 5):
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j):
tgt = leg.legint(tgt, m=1, k=[k], lbnd=-1)
res = leg.legint(pol, m=j, k=list(range(j)), lbnd=-1)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with scaling
for i in range(5):
for j in range(2, 5):
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j):
tgt = leg.legint(tgt, m=1, k=[k], scl=2)
res = leg.legint(pol, m=j, k=list(range(j)), scl=2)
assert_almost_equal(trim(res), trim(tgt))
def test_legint_axis(self):
# check that axis keyword works
c2d = np.random.random((3, 4))
tgt = np.vstack([leg.legint(c) for c in c2d.T]).T
res = leg.legint(c2d, axis=0)
assert_almost_equal(res, tgt)
tgt = np.vstack([leg.legint(c) for c in c2d])
res = leg.legint(c2d, axis=1)
assert_almost_equal(res, tgt)
tgt = np.vstack([leg.legint(c, k=3) for c in c2d])
res = leg.legint(c2d, k=3, axis=1)
assert_almost_equal(res, tgt)
class TestDerivative(TestCase):
def test_legder(self):
# check exceptions
assert_raises(ValueError, leg.legder, [0], .5)
assert_raises(ValueError, leg.legder, [0], -1)
# check that zeroth derivative does nothing
for i in range(5):
tgt = [0]*i + [1]
res = leg.legder(tgt, m=0)
assert_equal(trim(res), trim(tgt))
# check that derivation is the inverse of integration
for i in range(5):
for j in range(2, 5):
tgt = [0]*i + [1]
res = leg.legder(leg.legint(tgt, m=j), m=j)
assert_almost_equal(trim(res), trim(tgt))
# check derivation with scaling
for i in range(5):
for j in range(2, 5):
tgt = [0]*i + [1]
res = leg.legder(leg.legint(tgt, m=j, scl=2), m=j, scl=.5)
assert_almost_equal(trim(res), trim(tgt))
def test_legder_axis(self):
# check that axis keyword works
c2d = np.random.random((3, 4))
tgt = np.vstack([leg.legder(c) for c in c2d.T]).T
res = leg.legder(c2d, axis=0)
assert_almost_equal(res, tgt)
tgt = np.vstack([leg.legder(c) for c in c2d])
res = leg.legder(c2d, axis=1)
assert_almost_equal(res, tgt)
class TestVander(TestCase):
# some random values in [-1, 1)
x = np.random.random((3, 5))*2 - 1
def test_legvander(self):
# check for 1d x
x = np.arange(3)
v = leg.legvander(x, 3)
assert_(v.shape == (3, 4))
for i in range(4):
coef = [0]*i + [1]
assert_almost_equal(v[..., i], leg.legval(x, coef))
# check for 2d x
x = np.array([[1, 2], [3, 4], [5, 6]])
v = leg.legvander(x, 3)
assert_(v.shape == (3, 2, 4))
for i in range(4):
coef = [0]*i + [1]
assert_almost_equal(v[..., i], leg.legval(x, coef))
def test_legvander2d(self):
# also tests polyval2d for non-square coefficient array
x1, x2, x3 = self.x
c = np.random.random((2, 3))
van = leg.legvander2d(x1, x2, [1, 2])
tgt = leg.legval2d(x1, x2, c)
res = np.dot(van, c.flat)
assert_almost_equal(res, tgt)
# check shape
van = leg.legvander2d([x1], [x2], [1, 2])
assert_(van.shape == (1, 5, 6))
def test_legvander3d(self):
# also tests polyval3d for non-square coefficient array
x1, x2, x3 = self.x
c = np.random.random((2, 3, 4))
van = leg.legvander3d(x1, x2, x3, [1, 2, 3])
tgt = leg.legval3d(x1, x2, x3, c)
res = np.dot(van, c.flat)
assert_almost_equal(res, tgt)
# check shape
van = leg.legvander3d([x1], [x2], [x3], [1, 2, 3])
assert_(van.shape == (1, 5, 24))
class TestFitting(TestCase):
def test_legfit(self):
def f(x):
return x*(x - 1)*(x - 2)
def f2(x):
return x**4 + x**2 + 1
# Test exceptions
assert_raises(ValueError, leg.legfit, [1], [1], -1)
assert_raises(TypeError, leg.legfit, [[1]], [1], 0)
assert_raises(TypeError, leg.legfit, [], [1], 0)
assert_raises(TypeError, leg.legfit, [1], [[[1]]], 0)
assert_raises(TypeError, leg.legfit, [1, 2], [1], 0)
assert_raises(TypeError, leg.legfit, [1], [1, 2], 0)
assert_raises(TypeError, leg.legfit, [1], [1], 0, w=[[1]])
assert_raises(TypeError, leg.legfit, [1], [1], 0, w=[1, 1])
assert_raises(ValueError, leg.legfit, [1], [1], [-1,])
assert_raises(ValueError, leg.legfit, [1], [1], [2, -1, 6])
assert_raises(TypeError, leg.legfit, [1], [1], [])
# Test fit
x = np.linspace(0, 2)
y = f(x)
#
coef3 = leg.legfit(x, y, 3)
assert_equal(len(coef3), 4)
assert_almost_equal(leg.legval(x, coef3), y)
coef3 = leg.legfit(x, y, [0, 1, 2, 3])
assert_equal(len(coef3), 4)
assert_almost_equal(leg.legval(x, coef3), y)
#
coef4 = leg.legfit(x, y, 4)
assert_equal(len(coef4), 5)
assert_almost_equal(leg.legval(x, coef4), y)
coef4 = leg.legfit(x, y, [0, 1, 2, 3, 4])
assert_equal(len(coef4), 5)
assert_almost_equal(leg.legval(x, coef4), y)
# check things still work if deg is not in strict increasing
coef4 = leg.legfit(x, y, [2, 3, 4, 1, 0])
assert_equal(len(coef4), 5)
assert_almost_equal(leg.legval(x, coef4), y)
#
coef2d = leg.legfit(x, np.array([y, y]).T, 3)
assert_almost_equal(coef2d, np.array([coef3, coef3]).T)
coef2d = leg.legfit(x, np.array([y, y]).T, [0, 1, 2, 3])
assert_almost_equal(coef2d, np.array([coef3, coef3]).T)
# test weighting
w = np.zeros_like(x)
yw = y.copy()
w[1::2] = 1
y[0::2] = 0
wcoef3 = leg.legfit(x, yw, 3, w=w)
assert_almost_equal(wcoef3, coef3)
wcoef3 = leg.legfit(x, yw, [0, 1, 2, 3], w=w)
assert_almost_equal(wcoef3, coef3)
#
wcoef2d = leg.legfit(x, np.array([yw, yw]).T, 3, w=w)
assert_almost_equal(wcoef2d, np.array([coef3, coef3]).T)
wcoef2d = leg.legfit(x, np.array([yw, yw]).T, [0, 1, 2, 3], w=w)
assert_almost_equal(wcoef2d, np.array([coef3, coef3]).T)
# test scaling with complex values x points whose square
# is zero when summed.
x = [1, 1j, -1, -1j]
assert_almost_equal(leg.legfit(x, x, 1), [0, 1])
assert_almost_equal(leg.legfit(x, x, [0, 1]), [0, 1])
# test fitting only even Legendre polynomials
x = np.linspace(-1, 1)
y = f2(x)
coef1 = leg.legfit(x, y, 4)
assert_almost_equal(leg.legval(x, coef1), y)
coef2 = leg.legfit(x, y, [0, 2, 4])
assert_almost_equal(leg.legval(x, coef2), y)
assert_almost_equal(coef1, coef2)
class TestCompanion(TestCase):
def test_raises(self):
assert_raises(ValueError, leg.legcompanion, [])
assert_raises(ValueError, leg.legcompanion, [1])
def test_dimensions(self):
for i in range(1, 5):
coef = [0]*i + [1]
assert_(leg.legcompanion(coef).shape == (i, i))
def test_linear_root(self):
assert_(leg.legcompanion([1, 2])[0, 0] == -.5)
class TestGauss(TestCase):
def test_100(self):
x, w = leg.leggauss(100)
# test orthogonality. Note that the results need to be normalized,
# otherwise the huge values that can arise from fast growing
# functions like Laguerre can be very confusing.
v = leg.legvander(x, 99)
vv = np.dot(v.T * w, v)
vd = 1/np.sqrt(vv.diagonal())
vv = vd[:, None] * vv * vd
assert_almost_equal(vv, np.eye(100))
# check that the integral of 1 is correct
tgt = 2.0
assert_almost_equal(w.sum(), tgt)
class TestMisc(TestCase):
def test_legfromroots(self):
res = leg.legfromroots([])
assert_almost_equal(trim(res), [1])
for i in range(1, 5):
roots = np.cos(np.linspace(-np.pi, 0, 2*i + 1)[1::2])
pol = leg.legfromroots(roots)
res = leg.legval(roots, pol)
tgt = 0
assert_(len(pol) == i + 1)
assert_almost_equal(leg.leg2poly(pol)[-1], 1)
assert_almost_equal(res, tgt)
def test_legroots(self):
assert_almost_equal(leg.legroots([1]), [])
assert_almost_equal(leg.legroots([1, 2]), [-.5])
for i in range(2, 5):
tgt = np.linspace(-1, 1, i)
res = leg.legroots(leg.legfromroots(tgt))
assert_almost_equal(trim(res), trim(tgt))
def test_legtrim(self):
coef = [2, -1, 1, 0]
# Test exceptions
assert_raises(ValueError, leg.legtrim, coef, -1)
# Test results
assert_equal(leg.legtrim(coef), coef[:-1])
assert_equal(leg.legtrim(coef, 1), coef[:-3])
assert_equal(leg.legtrim(coef, 2), [0])
def test_legline(self):
assert_equal(leg.legline(3, 4), [3, 4])
def test_leg2poly(self):
for i in range(10):
assert_almost_equal(leg.leg2poly([0]*i + [1]), Llist[i])
def test_poly2leg(self):
for i in range(10):
assert_almost_equal(leg.poly2leg(Llist[i]), [0]*i + [1])
def test_weight(self):
x = np.linspace(-1, 1, 11)
tgt = 1.
res = leg.legweight(x)
assert_almost_equal(res, tgt)
if __name__ == "__main__":
run_module_suite()
| mit |
darjeeling/django | tests/m2m_recursive/models.py | 90 | 1029 | """
Many-to-many relationships between the same two tables
In this example, a ``Person`` can have many friends, who are also ``Person``
objects. Friendship is a symmetrical relationship - if I am your friend, you
are my friend. Here, ``friends`` is an example of a symmetrical
``ManyToManyField``.
A ``Person`` can also have many idols - but while I may idolize you, you may
not think the same of me. Here, ``idols`` is an example of a non-symmetrical
``ManyToManyField``. Only recursive ``ManyToManyField`` fields may be
non-symmetrical, and they are symmetrical by default.
This test validates that the many-to-many table is created using a mangled name
if there is a name clash, and tests that symmetry is preserved where
appropriate.
"""
from django.db import models
class Person(models.Model):
name = models.CharField(max_length=20)
friends = models.ManyToManyField('self')
idols = models.ManyToManyField('self', symmetrical=False, related_name='stalkers')
def __str__(self):
return self.name
| bsd-3-clause |
blacklin/kbengine | kbe/src/lib/python/Lib/lib2to3/fixes/fix_sys_exc.py | 203 | 1034 | """Fixer for sys.exc_{type, value, traceback}
sys.exc_type -> sys.exc_info()[0]
sys.exc_value -> sys.exc_info()[1]
sys.exc_traceback -> sys.exc_info()[2]
"""
# By Jeff Balogh and Benjamin Peterson
# Local imports
from .. import fixer_base
from ..fixer_util import Attr, Call, Name, Number, Subscript, Node, syms
class FixSysExc(fixer_base.BaseFix):
# This order matches the ordering of sys.exc_info().
exc_info = ["exc_type", "exc_value", "exc_traceback"]
BM_compatible = True
PATTERN = """
power< 'sys' trailer< dot='.' attribute=(%s) > >
""" % '|'.join("'%s'" % e for e in exc_info)
def transform(self, node, results):
sys_attr = results["attribute"][0]
index = Number(self.exc_info.index(sys_attr.value))
call = Call(Name("exc_info"), prefix=sys_attr.prefix)
attr = Attr(Name("sys"), call)
attr[1].children[0].prefix = results["dot"].prefix
attr.append(Subscript(index))
return Node(syms.power, attr, prefix=node.prefix)
| lgpl-3.0 |
kurtrwall/wagtail | wagtail/wagtailsearch/tests/test_elasticsearch_backend.py | 2 | 41987 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import datetime
import json
import os
import time
import unittest
import warnings
import mock
from django.core import management
from django.db.models import Q
from django.test import TestCase
from django.utils.six import StringIO
from elasticsearch.serializer import JSONSerializer
from wagtail.tests.search import models
from wagtail.utils.deprecation import RemovedInWagtail18Warning
from wagtail.wagtailsearch.backends import get_search_backend
from wagtail.wagtailsearch.backends.elasticsearch import ElasticsearchSearchBackend
from .test_backends import BackendTests
class TestElasticsearchSearchBackend(BackendTests, TestCase):
backend_path = 'wagtail.wagtailsearch.backends.elasticsearch'
def test_search_with_spaces_only(self):
# Search for some space characters and hope it doesn't crash
results = self.backend.search(" ", models.SearchTest)
# Queries are lazily evaluated, force it to run
list(results)
# Didn't crash, yay!
def test_filter_on_non_filterindex_field(self):
# id is not listed in the search_fields for SearchTest; this should raise a FieldError
from wagtail.wagtailsearch.backends.base import FieldError
with self.assertRaises(FieldError):
list(self.backend.search("Hello", models.SearchTest, filters=dict(id=42)))
def test_filter_with_unsupported_lookup_type(self):
from wagtail.wagtailsearch.backends.base import FilterError
with self.assertRaises(FilterError):
list(self.backend.search("Hello", models.SearchTest, filters=dict(title__iregex='h(ea)llo')))
def test_partial_search(self):
# Reset the index
self.backend.reset_index()
self.backend.add_type(models.SearchTest)
self.backend.add_type(models.SearchTestChild)
# Add some test data
obj = models.SearchTest()
obj.title = "HelloWorld"
obj.live = True
obj.save()
self.backend.add(obj)
# Refresh the index
self.backend.refresh_index()
# Search and check
results = self.backend.search("HelloW", models.SearchTest.objects.all())
self.assertEqual(len(results), 1)
self.assertEqual(results[0].id, obj.id)
def test_child_partial_search(self):
# Reset the index
self.backend.reset_index()
self.backend.add_type(models.SearchTest)
self.backend.add_type(models.SearchTestChild)
obj = models.SearchTestChild()
obj.title = "WorldHello"
obj.subtitle = "HelloWorld"
obj.live = True
obj.save()
self.backend.add(obj)
# Refresh the index
self.backend.refresh_index()
# Search and check
results = self.backend.search("HelloW", models.SearchTest.objects.all())
self.assertEqual(len(results), 1)
self.assertEqual(results[0].id, obj.id)
def test_ascii_folding(self):
# Reset the index
self.backend.reset_index()
self.backend.add_type(models.SearchTest)
self.backend.add_type(models.SearchTestChild)
# Add some test data
obj = models.SearchTest()
obj.title = "Ĥéllø"
obj.live = True
obj.save()
self.backend.add(obj)
# Refresh the index
self.backend.refresh_index()
# Search and check
results = self.backend.search("Hello", models.SearchTest.objects.all())
self.assertEqual(len(results), 1)
self.assertEqual(results[0].id, obj.id)
def test_query_analyser(self):
"""
This is testing that fields that use edgengram_analyzer as their index analyser do not
have it also as their query analyser
"""
# Reset the index
self.backend.reset_index()
self.backend.add_type(models.SearchTest)
self.backend.add_type(models.SearchTestChild)
# Add some test data
obj = models.SearchTest()
obj.title = "Hello"
obj.live = True
obj.save()
self.backend.add(obj)
# Refresh the index
self.backend.refresh_index()
# Test search for "Hello"
results = self.backend.search("Hello", models.SearchTest.objects.all())
# Should find the result
self.assertEqual(len(results), 1)
# Test search for "Horse"
results = self.backend.search("Horse", models.SearchTest.objects.all())
# Even though they both start with the letter "H". This should not be considered a match
self.assertEqual(len(results), 0)
def test_search_with_hyphen(self):
"""
This tests that punctuation characters are treated the same
way in both indexing and querying.
See: https://github.com/torchbox/wagtail/issues/937
"""
# Reset the index
self.backend.reset_index()
self.backend.add_type(models.SearchTest)
self.backend.add_type(models.SearchTestChild)
# Add some test data
obj = models.SearchTest()
obj.title = "Hello-World"
obj.live = True
obj.save()
self.backend.add(obj)
# Refresh the index
self.backend.refresh_index()
# Test search for "Hello-World"
results = self.backend.search("Hello-World", models.SearchTest.objects.all())
# Should find the result
self.assertEqual(len(results), 1)
def test_custom_ordering(self):
# Reset the index
self.backend.reset_index()
self.backend.add_type(models.SearchTest)
# Add some test data
# a is more relevant, but b is more recent
a = models.SearchTest()
a.title = "Hello Hello World"
a.live = True
a.published_date = datetime.date(2015, 10, 11)
a.save()
self.backend.add(a)
b = models.SearchTest()
b.title = "Hello World"
b.live = True
b.published_date = datetime.date(2015, 10, 12)
b.save()
self.backend.add(b)
# Refresh the index
self.backend.refresh_index()
# Do a search ordered by relevence
results = self.backend.search("Hello", models.SearchTest.objects.all())
self.assertEqual(list(results), [a, b])
# Do a search ordered by published date
results = self.backend.search(
"Hello", models.SearchTest.objects.order_by('-published_date'), order_by_relevance=False
)
self.assertEqual(list(results), [b, a])
def test_and_operator_with_single_field(self):
# Testing for bug #1859
# Reset the index
self.backend.reset_index()
self.backend.add_type(models.SearchTest)
a = models.SearchTest()
a.title = "Hello World"
a.live = True
a.published_date = datetime.date(2015, 10, 12)
a.save()
self.backend.add(a)
# Refresh the index
self.backend.refresh_index()
# Run query with "and" operator and single field
results = self.backend.search("Hello World", models.SearchTest, operator='and', fields=['title'])
self.assertEqual(list(results), [a])
def test_update_index_command_schema_only(self):
# Reset the index, this should clear out the index
self.backend.reset_index()
# Give Elasticsearch some time to catch up...
time.sleep(1)
results = self.backend.search(None, models.SearchTest)
self.assertEqual(set(results), set())
# Run update_index command
with self.ignore_deprecation_warnings():
# ignore any DeprecationWarnings thrown by models with old-style indexed_fields definitions
management.call_command(
'update_index', backend_name=self.backend_name, schema_only=True, interactive=False, stdout=StringIO()
)
# Unlike the test_update_index_command test. This should not give any results
results = self.backend.search(None, models.SearchTest)
self.assertEqual(set(results), set())
class TestElasticsearchSearchQuery(TestCase):
def assertDictEqual(self, a, b):
default = JSONSerializer().default
self.assertEqual(
json.dumps(a, sort_keys=True, default=default), json.dumps(b, sort_keys=True, default=default)
)
query_class = ElasticsearchSearchBackend.query_class
def test_simple(self):
# Create a query
query = self.query_class(models.SearchTest.objects.all(), "Hello")
# Check it
expected_result = {'filtered': {
'filter': {'prefix': {'content_type': 'searchtests_searchtest'}},
'query': {'multi_match': {'query': 'Hello', 'fields': ['_all', '_partials']}}
}}
self.assertDictEqual(query.get_query(), expected_result)
def test_none_query_string(self):
# Create a query
query = self.query_class(models.SearchTest.objects.all(), None)
# Check it
expected_result = {'filtered': {
'filter': {'prefix': {'content_type': 'searchtests_searchtest'}},
'query': {'match_all': {}}
}}
self.assertDictEqual(query.get_query(), expected_result)
def test_and_operator(self):
# Create a query
query = self.query_class(models.SearchTest.objects.all(), "Hello", operator='and')
# Check it
expected_result = {'filtered': {
'filter': {'prefix': {'content_type': 'searchtests_searchtest'}},
'query': {'multi_match': {'query': 'Hello', 'fields': ['_all', '_partials'], 'operator': 'and'}}
}}
self.assertDictEqual(query.get_query(), expected_result)
def test_filter(self):
# Create a query
query = self.query_class(models.SearchTest.objects.filter(title="Test"), "Hello")
# Check it
expected_result = {'filtered': {'filter': {'and': [
{'prefix': {'content_type': 'searchtests_searchtest'}},
{'term': {'title_filter': 'Test'}}
]}, 'query': {'multi_match': {'query': 'Hello', 'fields': ['_all', '_partials']}}}}
self.assertDictEqual(query.get_query(), expected_result)
def test_and_filter(self):
# Create a query
query = self.query_class(models.SearchTest.objects.filter(title="Test", live=True), "Hello")
# Check it
expected_result = {'filtered': {'filter': {'and': [
{'prefix': {'content_type': 'searchtests_searchtest'}},
{'and': [{'term': {'live_filter': True}}, {'term': {'title_filter': 'Test'}}]}
]}, 'query': {'multi_match': {'query': 'Hello', 'fields': ['_all', '_partials']}}}}
# Make sure field filters are sorted (as they can be in any order which may cause false positives)
query = query.get_query()
field_filters = query['filtered']['filter']['and'][1]['and']
field_filters[:] = sorted(field_filters, key=lambda f: list(f['term'].keys())[0])
self.assertDictEqual(query, expected_result)
def test_or_filter(self):
# Create a query
query = self.query_class(models.SearchTest.objects.filter(Q(title="Test") | Q(live=True)), "Hello")
# Make sure field filters are sorted (as they can be in any order which may cause false positives)
query = query.get_query()
field_filters = query['filtered']['filter']['and'][1]['or']
field_filters[:] = sorted(field_filters, key=lambda f: list(f['term'].keys())[0])
# Check it
expected_result = {'filtered': {'filter': {'and': [
{'prefix': {'content_type': 'searchtests_searchtest'}},
{'or': [{'term': {'live_filter': True}}, {'term': {'title_filter': 'Test'}}]}
]}, 'query': {'multi_match': {'query': 'Hello', 'fields': ['_all', '_partials']}}}}
self.assertDictEqual(query, expected_result)
def test_negated_filter(self):
# Create a query
query = self.query_class(models.SearchTest.objects.exclude(live=True), "Hello")
# Check it
expected_result = {'filtered': {'filter': {'and': [
{'prefix': {'content_type': 'searchtests_searchtest'}},
{'not': {'term': {'live_filter': True}}}
]}, 'query': {'multi_match': {'query': 'Hello', 'fields': ['_all', '_partials']}}}}
self.assertDictEqual(query.get_query(), expected_result)
def test_fields(self):
# Create a query
query = self.query_class(models.SearchTest.objects.all(), "Hello", fields=['title'])
# Check it
expected_result = {'filtered': {
'filter': {'prefix': {'content_type': 'searchtests_searchtest'}},
'query': {'match': {'title': 'Hello'}}
}}
self.assertDictEqual(query.get_query(), expected_result)
def test_fields_with_and_operator(self):
# Create a query
query = self.query_class(models.SearchTest.objects.all(), "Hello", fields=['title'], operator='and')
# Check it
expected_result = {'filtered': {
'filter': {'prefix': {'content_type': 'searchtests_searchtest'}},
'query': {'match': {'title': {'query': 'Hello', 'operator': 'and'}}}
}}
self.assertDictEqual(query.get_query(), expected_result)
def test_multiple_fields(self):
# Create a query
query = self.query_class(models.SearchTest.objects.all(), "Hello", fields=['title', 'content'])
# Check it
expected_result = {'filtered': {
'filter': {'prefix': {'content_type': 'searchtests_searchtest'}},
'query': {'multi_match': {'fields': ['title', 'content'], 'query': 'Hello'}}
}}
self.assertDictEqual(query.get_query(), expected_result)
def test_multiple_fields_with_and_operator(self):
# Create a query
query = self.query_class(
models.SearchTest.objects.all(), "Hello", fields=['title', 'content'], operator='and'
)
# Check it
expected_result = {'filtered': {
'filter': {'prefix': {'content_type': 'searchtests_searchtest'}},
'query': {'multi_match': {'fields': ['title', 'content'], 'query': 'Hello', 'operator': 'and'}}
}}
self.assertDictEqual(query.get_query(), expected_result)
def test_exact_lookup(self):
# Create a query
query = self.query_class(models.SearchTest.objects.filter(title__exact="Test"), "Hello")
# Check it
expected_result = {'filtered': {'filter': {'and': [
{'prefix': {'content_type': 'searchtests_searchtest'}},
{'term': {'title_filter': 'Test'}}
]}, 'query': {'multi_match': {'query': 'Hello', 'fields': ['_all', '_partials']}}}}
self.assertDictEqual(query.get_query(), expected_result)
def test_none_lookup(self):
# Create a query
query = self.query_class(models.SearchTest.objects.filter(title=None), "Hello")
# Check it
expected_result = {'filtered': {'filter': {'and': [
{'prefix': {'content_type': 'searchtests_searchtest'}},
{'missing': {'field': 'title_filter'}}
]}, 'query': {'multi_match': {'query': 'Hello', 'fields': ['_all', '_partials']}}}}
self.assertDictEqual(query.get_query(), expected_result)
def test_isnull_true_lookup(self):
# Create a query
query = self.query_class(models.SearchTest.objects.filter(title__isnull=True), "Hello")
# Check it
expected_result = {'filtered': {'filter': {'and': [
{'prefix': {'content_type': 'searchtests_searchtest'}},
{'missing': {'field': 'title_filter'}}
]}, 'query': {'multi_match': {'query': 'Hello', 'fields': ['_all', '_partials']}}}}
self.assertDictEqual(query.get_query(), expected_result)
def test_isnull_false_lookup(self):
# Create a query
query = self.query_class(models.SearchTest.objects.filter(title__isnull=False), "Hello")
# Check it
expected_result = {'filtered': {'filter': {'and': [
{'prefix': {'content_type': 'searchtests_searchtest'}},
{'not': {'missing': {'field': 'title_filter'}}}
]}, 'query': {'multi_match': {'query': 'Hello', 'fields': ['_all', '_partials']}}}}
self.assertDictEqual(query.get_query(), expected_result)
def test_startswith_lookup(self):
# Create a query
query = self.query_class(models.SearchTest.objects.filter(title__startswith="Test"), "Hello")
# Check it
expected_result = {'filtered': {'filter': {'and': [
{'prefix': {'content_type': 'searchtests_searchtest'}},
{'prefix': {'title_filter': 'Test'}}
]}, 'query': {'multi_match': {'query': 'Hello', 'fields': ['_all', '_partials']}}}}
self.assertDictEqual(query.get_query(), expected_result)
def test_gt_lookup(self):
# This also tests conversion of python dates to strings
# Create a query
query = self.query_class(
models.SearchTest.objects.filter(published_date__gt=datetime.datetime(2014, 4, 29)), "Hello"
)
# Check it
expected_result = {'filtered': {'filter': {'and': [
{'prefix': {'content_type': 'searchtests_searchtest'}},
{'range': {'published_date_filter': {'gt': '2014-04-29'}}}
]}, 'query': {'multi_match': {'query': 'Hello', 'fields': ['_all', '_partials']}}}}
self.assertDictEqual(query.get_query(), expected_result)
def test_lt_lookup(self):
# Create a query
query = self.query_class(
models.SearchTest.objects.filter(published_date__lt=datetime.datetime(2014, 4, 29)), "Hello"
)
# Check it
expected_result = {'filtered': {'filter': {'and': [
{'prefix': {'content_type': 'searchtests_searchtest'}},
{'range': {'published_date_filter': {'lt': '2014-04-29'}}}
]}, 'query': {'multi_match': {'query': 'Hello', 'fields': ['_all', '_partials']}}}}
self.assertDictEqual(query.get_query(), expected_result)
def test_gte_lookup(self):
# Create a query
query = self.query_class(
models.SearchTest.objects.filter(published_date__gte=datetime.datetime(2014, 4, 29)), "Hello"
)
# Check it
expected_result = {'filtered': {'filter': {'and': [
{'prefix': {'content_type': 'searchtests_searchtest'}},
{'range': {'published_date_filter': {'gte': '2014-04-29'}}}
]}, 'query': {'multi_match': {'query': 'Hello', 'fields': ['_all', '_partials']}}}}
self.assertDictEqual(query.get_query(), expected_result)
def test_lte_lookup(self):
# Create a query
query = self.query_class(
models.SearchTest.objects.filter(published_date__lte=datetime.datetime(2014, 4, 29)), "Hello"
)
# Check it
expected_result = {'filtered': {'filter': {'and': [
{'prefix': {'content_type': 'searchtests_searchtest'}},
{'range': {'published_date_filter': {'lte': '2014-04-29'}}}
]}, 'query': {'multi_match': {'query': 'Hello', 'fields': ['_all', '_partials']}}}}
self.assertDictEqual(query.get_query(), expected_result)
def test_range_lookup(self):
start_date = datetime.datetime(2014, 4, 29)
end_date = datetime.datetime(2014, 8, 19)
# Create a query
query = self.query_class(
models.SearchTest.objects.filter(published_date__range=(start_date, end_date)), "Hello"
)
# Check it
expected_result = {'filtered': {'filter': {'and': [
{'prefix': {'content_type': 'searchtests_searchtest'}},
{'range': {'published_date_filter': {'gte': '2014-04-29', 'lte': '2014-08-19'}}}
]}, 'query': {'multi_match': {'query': 'Hello', 'fields': ['_all', '_partials']}}}}
self.assertDictEqual(query.get_query(), expected_result)
def test_custom_ordering(self):
# Create a query
query = self.query_class(
models.SearchTest.objects.order_by('published_date'), "Hello", order_by_relevance=False
)
# Check it
expected_result = [{'published_date_filter': 'asc'}]
self.assertDictEqual(query.get_sort(), expected_result)
def test_custom_ordering_reversed(self):
# Create a query
query = self.query_class(
models.SearchTest.objects.order_by('-published_date'), "Hello", order_by_relevance=False
)
# Check it
expected_result = [{'published_date_filter': 'desc'}]
self.assertDictEqual(query.get_sort(), expected_result)
def test_custom_ordering_multiple(self):
# Create a query
query = self.query_class(
models.SearchTest.objects.order_by('published_date', 'live'), "Hello", order_by_relevance=False
)
# Check it
expected_result = [{'published_date_filter': 'asc'}, {'live_filter': 'asc'}]
self.assertDictEqual(query.get_sort(), expected_result)
class TestElasticsearchSearchResults(TestCase):
def assertDictEqual(self, a, b):
default = JSONSerializer().default
self.assertEqual(
json.dumps(a, sort_keys=True, default=default), json.dumps
)
def setUp(self):
self.objects = []
for i in range(3):
self.objects.append(models.SearchTest.objects.create(title=str(i)))
def get_results(self):
backend = ElasticsearchSearchBackend({})
query = mock.MagicMock()
query.queryset = models.SearchTest.objects.all()
query.get_query.return_value = 'QUERY'
query.get_sort.return_value = None
return backend.results_class(backend, query)
def construct_search_response(self, results):
return {
'_shards': {'failed': 0, 'successful': 5, 'total': 5},
'hits': {
'hits': [
{
'_id': 'searchtests_searchtest:' + str(result),
'_index': 'wagtail',
'_score': 1,
'_type': 'searchtests_searchtest',
'fields': {
'pk': [str(result)],
}
}
for result in results
],
'max_score': 1,
'total': len(results)
},
'timed_out': False,
'took': 2
}
@mock.patch('elasticsearch.Elasticsearch.search')
def test_basic_search(self, search):
search.return_value = self.construct_search_response([])
results = self.get_results()
list(results) # Performs search
search.assert_any_call(
from_=0,
body={'query': 'QUERY'},
_source=False,
fields='pk',
index='wagtail'
)
@mock.patch('elasticsearch.Elasticsearch.search')
def test_get_single_item(self, search):
# Need to return something to prevent index error
search.return_value = self.construct_search_response([self.objects[0].id])
results = self.get_results()
results[10] # Performs search
search.assert_any_call(
from_=10,
body={'query': 'QUERY'},
_source=False,
fields='pk',
index='wagtail',
size=1
)
@mock.patch('elasticsearch.Elasticsearch.search')
def test_slice_results(self, search):
search.return_value = self.construct_search_response([])
results = self.get_results()[1:4]
list(results) # Performs search
search.assert_any_call(
from_=1,
body={'query': 'QUERY'},
_source=False,
fields='pk',
index='wagtail',
size=3
)
@mock.patch('elasticsearch.Elasticsearch.search')
def test_slice_results_multiple_times(self, search):
search.return_value = self.construct_search_response([])
results = self.get_results()[10:][:10]
list(results) # Performs search
search.assert_any_call(
from_=10,
body={'query': 'QUERY'},
_source=False,
fields='pk',
index='wagtail',
size=10
)
@mock.patch('elasticsearch.Elasticsearch.search')
def test_slice_results_and_get_item(self, search):
# Need to return something to prevent index error
search.return_value = self.construct_search_response([self.objects[0].id])
results = self.get_results()[10:]
results[10] # Performs search
search.assert_any_call(
from_=20,
body={'query': 'QUERY'},
_source=False,
fields='pk',
index='wagtail',
size=1
)
@mock.patch('elasticsearch.Elasticsearch.search')
def test_result_returned(self, search):
search.return_value = self.construct_search_response([self.objects[0].id])
results = self.get_results()
self.assertEqual(results[0], self.objects[0])
@mock.patch('elasticsearch.Elasticsearch.search')
def test_len_1(self, search):
search.return_value = self.construct_search_response([self.objects[0].id])
results = self.get_results()
self.assertEqual(len(results), 1)
@mock.patch('elasticsearch.Elasticsearch.search')
def test_len_2(self, search):
search.return_value = self.construct_search_response([self.objects[0].id, self.objects[1].id])
results = self.get_results()
self.assertEqual(len(results), 2)
@mock.patch('elasticsearch.Elasticsearch.search')
def test_duplicate_results(self, search): # Duplicates will not be removed
search.return_value = self.construct_search_response([self.objects[0].id, self.objects[0].id])
results = list(self.get_results()) # Must cast to list so we only create one query
self.assertEqual(len(results), 2)
self.assertEqual(results[0], self.objects[0])
self.assertEqual(results[1], self.objects[0])
@mock.patch('elasticsearch.Elasticsearch.search')
def test_result_order(self, search):
search.return_value = self.construct_search_response(
[self.objects[0].id, self.objects[1].id, self.objects[2].id]
)
results = list(self.get_results()) # Must cast to list so we only create one query
self.assertEqual(results[0], self.objects[0])
self.assertEqual(results[1], self.objects[1])
self.assertEqual(results[2], self.objects[2])
@mock.patch('elasticsearch.Elasticsearch.search')
def test_result_order_2(self, search):
search.return_value = self.construct_search_response(
[self.objects[2].id, self.objects[1].id, self.objects[0].id]
)
results = list(self.get_results()) # Must cast to list so we only create one query
self.assertEqual(results[0], self.objects[2])
self.assertEqual(results[1], self.objects[1])
self.assertEqual(results[2], self.objects[0])
class TestElasticsearchMapping(TestCase):
def assertDictEqual(self, a, b):
default = JSONSerializer().default
self.assertEqual(
json.dumps(a, sort_keys=True, default=default), json.dumps(b, sort_keys=True, default=default)
)
def setUp(self):
# Create ES mapping
self.es_mapping = ElasticsearchSearchBackend.mapping_class(models.SearchTest)
# Create ES document
self.obj = models.SearchTest(title="Hello")
self.obj.save()
self.obj.tags.add("a tag")
def test_get_document_type(self):
self.assertEqual(self.es_mapping.get_document_type(), 'searchtests_searchtest')
def test_get_mapping(self):
# Build mapping
mapping = self.es_mapping.get_mapping()
# Check
expected_result = {
'searchtests_searchtest': {
'properties': {
'pk': {'index': 'not_analyzed', 'type': 'string', 'store': 'yes', 'include_in_all': False},
'content_type': {'index': 'not_analyzed', 'type': 'string', 'include_in_all': False},
'_partials': {'index_analyzer': 'edgengram_analyzer', 'include_in_all': False, 'type': 'string'},
'live_filter': {'index': 'not_analyzed', 'type': 'boolean', 'include_in_all': False},
'published_date_filter': {'index': 'not_analyzed', 'type': 'date', 'include_in_all': False},
'title': {'type': 'string', 'include_in_all': True, 'index_analyzer': 'edgengram_analyzer'},
'title_filter': {'index': 'not_analyzed', 'type': 'string', 'include_in_all': False},
'content': {'type': 'string', 'include_in_all': True},
'callable_indexed_field': {'type': 'string', 'include_in_all': True},
'tags': {
'type': 'nested',
'properties': {
'name': {'type': 'string', 'include_in_all': True, 'index_analyzer': 'edgengram_analyzer'},
'slug_filter': {'index': 'not_analyzed', 'type': 'string', 'include_in_all': False},
}
},
}
}
}
self.assertDictEqual(mapping, expected_result)
def test_get_document_id(self):
self.assertEqual(self.es_mapping.get_document_id(self.obj), 'searchtests_searchtest:' + str(self.obj.pk))
def test_get_document(self):
# Get document
document = self.es_mapping.get_document(self.obj)
# Sort partials
if '_partials' in document:
document['_partials'].sort()
# Check
expected_result = {
'pk': str(self.obj.pk),
'content_type': 'searchtests_searchtest',
'_partials': ['Hello', 'a tag'],
'live_filter': False,
'published_date_filter': None,
'title': 'Hello',
'title_filter': 'Hello',
'callable_indexed_field': 'Callable',
'content': '',
'tags': [
{
'name': 'a tag',
'slug_filter': 'a-tag',
}
],
}
self.assertDictEqual(document, expected_result)
class TestElasticsearchMappingInheritance(TestCase):
def assertDictEqual(self, a, b):
default = JSONSerializer().default
self.assertEqual(
json.dumps(a, sort_keys=True, default=default), json.dumps(b, sort_keys=True, default=default)
)
def setUp(self):
# Create ES mapping
self.es_mapping = ElasticsearchSearchBackend.mapping_class(models.SearchTestChild)
# Create ES document
self.obj = models.SearchTestChild(title="Hello", subtitle="World", page_id=1)
self.obj.save()
self.obj.tags.add("a tag")
def test_get_document_type(self):
self.assertEqual(self.es_mapping.get_document_type(), 'searchtests_searchtest_searchtests_searchtestchild')
def test_get_mapping(self):
# Build mapping
mapping = self.es_mapping.get_mapping()
# Check
expected_result = {
'searchtests_searchtest_searchtests_searchtestchild': {
'properties': {
# New
'extra_content': {'type': 'string', 'include_in_all': True},
'subtitle': {'type': 'string', 'include_in_all': True, 'index_analyzer': 'edgengram_analyzer'},
'page': {
'type': 'nested',
'properties': {
'title': {'type': 'string', 'include_in_all': True, 'index_analyzer': 'edgengram_analyzer'},
'search_description': {'type': 'string', 'include_in_all': True},
'live_filter': {'index': 'not_analyzed', 'type': 'boolean', 'include_in_all': False},
}
},
# Inherited
'pk': {'index': 'not_analyzed', 'type': 'string', 'store': 'yes', 'include_in_all': False},
'content_type': {'index': 'not_analyzed', 'type': 'string', 'include_in_all': False},
'_partials': {'index_analyzer': 'edgengram_analyzer', 'include_in_all': False, 'type': 'string'},
'live_filter': {'index': 'not_analyzed', 'type': 'boolean', 'include_in_all': False},
'published_date_filter': {'index': 'not_analyzed', 'type': 'date', 'include_in_all': False},
'title': {'type': 'string', 'include_in_all': True, 'index_analyzer': 'edgengram_analyzer'},
'title_filter': {'index': 'not_analyzed', 'type': 'string', 'include_in_all': False},
'content': {'type': 'string', 'include_in_all': True},
'callable_indexed_field': {'type': 'string', 'include_in_all': True},
'tags': {
'type': 'nested',
'properties': {
'name': {'type': 'string', 'include_in_all': True, 'index_analyzer': 'edgengram_analyzer'},
'slug_filter': {'index': 'not_analyzed', 'type': 'string', 'include_in_all': False},
}
},
}
}
}
self.assertDictEqual(mapping, expected_result)
def test_get_document_id(self):
# This must be tests_searchtest instead of 'tests_searchtest_tests_searchtestchild'
# as it uses the contents base content type name.
# This prevents the same object being accidentally indexed twice.
self.assertEqual(self.es_mapping.get_document_id(self.obj), 'searchtests_searchtest:' + str(self.obj.pk))
def test_get_document(self):
# Build document
document = self.es_mapping.get_document(self.obj)
# Sort partials
if '_partials' in document:
document['_partials'].sort()
# Check
expected_result = {
# New
'extra_content': '',
'subtitle': 'World',
'page': {
'title': 'Root',
'search_description': '',
'live_filter': True,
},
# Changed
'content_type': 'searchtests_searchtest_searchtests_searchtestchild',
# Inherited
'pk': str(self.obj.pk),
'_partials': ['Hello', 'Root', 'World', 'a tag'],
'live_filter': False,
'published_date_filter': None,
'title': 'Hello',
'title_filter': 'Hello',
'callable_indexed_field': 'Callable',
'content': '',
'tags': [
{
'name': 'a tag',
'slug_filter': 'a-tag',
}
],
}
self.assertDictEqual(document, expected_result)
class TestBackendConfiguration(TestCase):
def test_default_settings(self):
backend = ElasticsearchSearchBackend(params={})
self.assertEqual(len(backend.hosts), 1)
self.assertEqual(backend.hosts[0]['host'], 'localhost')
self.assertEqual(backend.hosts[0]['port'], 9200)
self.assertEqual(backend.hosts[0]['use_ssl'], False)
def test_hosts(self):
# This tests that HOSTS goes to es_hosts
backend = ElasticsearchSearchBackend(params={
'HOSTS': [
{
'host': '127.0.0.1',
'port': 9300,
'use_ssl': True,
'verify_certs': True,
}
]
})
self.assertEqual(len(backend.hosts), 1)
self.assertEqual(backend.hosts[0]['host'], '127.0.0.1')
self.assertEqual(backend.hosts[0]['port'], 9300)
self.assertEqual(backend.hosts[0]['use_ssl'], True)
def test_urls(self):
# This test backwards compatibility with old URLS setting
backend = ElasticsearchSearchBackend(params={
'URLS': [
'http://localhost:12345',
'https://127.0.0.1:54321',
'http://username:password@elasticsearch.mysite.com',
'https://elasticsearch.mysite.com/hello',
],
})
self.assertEqual(len(backend.hosts), 4)
self.assertEqual(backend.hosts[0]['host'], 'localhost')
self.assertEqual(backend.hosts[0]['port'], 12345)
self.assertEqual(backend.hosts[0]['use_ssl'], False)
self.assertEqual(backend.hosts[1]['host'], '127.0.0.1')
self.assertEqual(backend.hosts[1]['port'], 54321)
self.assertEqual(backend.hosts[1]['use_ssl'], True)
self.assertEqual(backend.hosts[2]['host'], 'elasticsearch.mysite.com')
self.assertEqual(backend.hosts[2]['port'], 80)
self.assertEqual(backend.hosts[2]['use_ssl'], False)
self.assertEqual(backend.hosts[2]['http_auth'], ('username', 'password'))
self.assertEqual(backend.hosts[3]['host'], 'elasticsearch.mysite.com')
self.assertEqual(backend.hosts[3]['port'], 443)
self.assertEqual(backend.hosts[3]['use_ssl'], True)
self.assertEqual(backend.hosts[3]['url_prefix'], '/hello')
@unittest.skipUnless(os.environ.get('ELASTICSEARCH_URL', False), "ELASTICSEARCH_URL not set")
class TestRebuilder(TestCase):
def assertDictEqual(self, a, b):
default = JSONSerializer().default
self.assertEqual(
json.dumps(a, sort_keys=True, default=default), json.dumps(b, sort_keys=True, default=default)
)
def setUp(self):
self.backend = get_search_backend('elasticsearch')
self.es = self.backend.es
self.rebuilder = self.backend.get_rebuilder()
self.backend.reset_index()
def test_start_creates_index(self):
# First, make sure the index is deleted
try:
self.es.indices.delete(self.backend.index_name)
except self.NotFoundError:
pass
self.assertFalse(self.es.indices.exists(self.backend.index_name))
# Run start
self.rebuilder.start()
# Check the index exists
self.assertTrue(self.es.indices.exists(self.backend.index_name))
def test_start_deletes_existing_index(self):
# Put an alias into the index so we can check it was deleted
self.es.indices.put_alias(name='this_index_should_be_deleted', index=self.backend.index_name)
self.assertTrue(
self.es.indices.exists_alias(name='this_index_should_be_deleted', index=self.backend.index_name)
)
# Run start
self.rebuilder.start()
# The alias should be gone (proving the index was deleted and recreated)
self.assertFalse(
self.es.indices.exists_alias(name='this_index_should_be_deleted', index=self.backend.index_name)
)
@unittest.skipUnless(os.environ.get('ELASTICSEARCH_URL', False), "ELASTICSEARCH_URL not set")
class TestAtomicRebuilder(TestCase):
def setUp(self):
self.backend = get_search_backend('elasticsearch')
self.backend.rebuilder_class = self.backend.atomic_rebuilder_class
self.es = self.backend.es
self.rebuilder = self.backend.get_rebuilder()
self.backend.reset_index()
def test_start_creates_new_index(self):
# Rebuilder should make up a new index name that doesn't currently exist
self.assertFalse(self.es.indices.exists(self.rebuilder.index.name))
# Run start
self.rebuilder.start()
# Check the index exists
self.assertTrue(self.es.indices.exists(self.rebuilder.index.name))
def test_start_doesnt_delete_current_index(self):
# Get current index name
current_index_name = list(self.es.indices.get_alias(name=self.rebuilder.alias.name).keys())[0]
# Run start
self.rebuilder.start()
# The index should still exist
self.assertTrue(self.es.indices.exists(current_index_name))
# And the alias should still point to it
self.assertTrue(self.es.indices.exists_alias(name=self.rebuilder.alias.name, index=current_index_name))
def test_finish_updates_alias(self):
# Run start
self.rebuilder.start()
# Check that the alias doesn't point to new index
self.assertFalse(
self.es.indices.exists_alias(name=self.rebuilder.alias.name, index=self.rebuilder.index.name)
)
# Run finish
self.rebuilder.finish()
# Check that the alias now points to the new index
self.assertTrue(self.es.indices.exists_alias(name=self.rebuilder.alias.name, index=self.rebuilder.index.name))
def test_finish_deletes_old_index(self):
# Get current index name
current_index_name = list(self.es.indices.get_alias(name=self.rebuilder.alias.name).keys())[0]
# Run start
self.rebuilder.start()
# Index should still exist
self.assertTrue(self.es.indices.exists(current_index_name))
# Run finish
self.rebuilder.finish()
# Index should be gone
self.assertFalse(self.es.indices.exists(current_index_name))
class TestOldNameDeprecationWarning(TestCase):
def test_old_name_deprecation(self):
from wagtail.wagtailsearch.backends.elasticsearch import ElasticSearch
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
ElasticSearch({})
self.assertEqual(len(w), 1)
self.assertIs(w[0].category, RemovedInWagtail18Warning)
| bsd-3-clause |
siosio/intellij-community | python/helpers/py3only/docutils/core.py | 44 | 29431 | # $Id: core.py 7466 2012-06-25 14:56:51Z milde $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
Calling the ``publish_*`` convenience functions (or instantiating a
`Publisher` object) with component names will result in default
behavior. For custom behavior (setting component options), create
custom component objects first, and pass *them* to
``publish_*``/`Publisher`. See `The Docutils Publisher`_.
.. _The Docutils Publisher: http://docutils.sf.net/docs/api/publisher.html
"""
__docformat__ = 'reStructuredText'
import pprint
import sys
import docutils.readers.doctree
from docutils import __version__, __version_details__, SettingsSpec
from docutils import frontend, io, utils, readers, writers
from docutils.frontend import OptionParser
from docutils.utils.error_reporting import ErrorOutput, ErrorString
class Publisher:
"""
A facade encapsulating the high-level logic of a Docutils system.
"""
def __init__(self, reader=None, parser=None, writer=None,
source=None, source_class=io.FileInput,
destination=None, destination_class=io.FileOutput,
settings=None):
"""
Initial setup. If any of `reader`, `parser`, or `writer` are not
specified, the corresponding ``set_...`` method should be called with
a component name (`set_reader` sets the parser as well).
"""
self.document = None
"""The document tree (`docutils.nodes` objects)."""
self.reader = reader
"""A `docutils.readers.Reader` instance."""
self.parser = parser
"""A `docutils.parsers.Parser` instance."""
self.writer = writer
"""A `docutils.writers.Writer` instance."""
for component in 'reader', 'parser', 'writer':
assert not isinstance(getattr(self, component), str), (
'passed string "%s" as "%s" parameter; pass an instance, '
'or use the "%s_name" parameter instead (in '
'docutils.core.publish_* convenience functions).'
% (getattr(self, component), component, component))
self.source = source
"""The source of input data, a `docutils.io.Input` instance."""
self.source_class = source_class
"""The class for dynamically created source objects."""
self.destination = destination
"""The destination for docutils output, a `docutils.io.Output`
instance."""
self.destination_class = destination_class
"""The class for dynamically created destination objects."""
self.settings = settings
"""An object containing Docutils settings as instance attributes.
Set by `self.process_command_line()` or `self.get_settings()`."""
self._stderr = ErrorOutput()
def set_reader(self, reader_name, parser, parser_name):
"""Set `self.reader` by name."""
reader_class = readers.get_reader_class(reader_name)
self.reader = reader_class(parser, parser_name)
self.parser = self.reader.parser
def set_writer(self, writer_name):
"""Set `self.writer` by name."""
writer_class = writers.get_writer_class(writer_name)
self.writer = writer_class()
def set_components(self, reader_name, parser_name, writer_name):
if self.reader is None:
self.set_reader(reader_name, self.parser, parser_name)
if self.parser is None:
if self.reader.parser is None:
self.reader.set_parser(parser_name)
self.parser = self.reader.parser
if self.writer is None:
self.set_writer(writer_name)
def setup_option_parser(self, usage=None, description=None,
settings_spec=None, config_section=None,
**defaults):
if config_section:
if not settings_spec:
settings_spec = SettingsSpec()
settings_spec.config_section = config_section
parts = config_section.split()
if len(parts) > 1 and parts[-1] == 'application':
settings_spec.config_section_dependencies = ['applications']
#@@@ Add self.source & self.destination to components in future?
option_parser = OptionParser(
components=(self.parser, self.reader, self.writer, settings_spec),
defaults=defaults, read_config_files=True,
usage=usage, description=description)
return option_parser
def get_settings(self, usage=None, description=None,
settings_spec=None, config_section=None, **defaults):
"""
Set and return default settings (overrides in `defaults` dict).
Set components first (`self.set_reader` & `self.set_writer`).
Explicitly setting `self.settings` disables command line option
processing from `self.publish()`.
"""
option_parser = self.setup_option_parser(
usage, description, settings_spec, config_section, **defaults)
self.settings = option_parser.get_default_values()
return self.settings
def process_programmatic_settings(self, settings_spec,
settings_overrides,
config_section):
if self.settings is None:
defaults = (settings_overrides or {}).copy()
# Propagate exceptions by default when used programmatically:
defaults.setdefault('traceback', True)
self.get_settings(settings_spec=settings_spec,
config_section=config_section,
**defaults)
def process_command_line(self, argv=None, usage=None, description=None,
settings_spec=None, config_section=None,
**defaults):
"""
Pass an empty list to `argv` to avoid reading `sys.argv` (the
default).
Set components first (`self.set_reader` & `self.set_writer`).
"""
option_parser = self.setup_option_parser(
usage, description, settings_spec, config_section, **defaults)
if argv is None:
argv = sys.argv[1:]
# converting to Unicode (Python 3 does this automatically):
if sys.version_info < (3,0):
# TODO: make this failsafe and reversible?
argv_encoding = (frontend.locale_encoding or 'ascii')
argv = [a.decode(argv_encoding) for a in argv]
self.settings = option_parser.parse_args(argv)
def set_io(self, source_path=None, destination_path=None):
if self.source is None:
self.set_source(source_path=source_path)
if self.destination is None:
self.set_destination(destination_path=destination_path)
def set_source(self, source=None, source_path=None):
if source_path is None:
source_path = self.settings._source
else:
self.settings._source = source_path
# Raise IOError instead of system exit with `tracback == True`
# TODO: change io.FileInput's default behaviour and remove this hack
try:
self.source = self.source_class(
source=source, source_path=source_path,
encoding=self.settings.input_encoding)
except TypeError:
self.source = self.source_class(
source=source, source_path=source_path,
encoding=self.settings.input_encoding)
def set_destination(self, destination=None, destination_path=None):
if destination_path is None:
destination_path = self.settings._destination
else:
self.settings._destination = destination_path
self.destination = self.destination_class(
destination=destination, destination_path=destination_path,
encoding=self.settings.output_encoding,
error_handler=self.settings.output_encoding_error_handler)
def apply_transforms(self):
self.document.transformer.populate_from_components(
(self.source, self.reader, self.reader.parser, self.writer,
self.destination))
self.document.transformer.apply_transforms()
def publish(self, argv=None, usage=None, description=None,
settings_spec=None, settings_overrides=None,
config_section=None, enable_exit_status=False):
"""
Process command line options and arguments (if `self.settings` not
already set), run `self.reader` and then `self.writer`. Return
`self.writer`'s output.
"""
exit = None
try:
if self.settings is None:
self.process_command_line(
argv, usage, description, settings_spec, config_section,
**(settings_overrides or {}))
self.set_io()
self.document = self.reader.read(self.source, self.parser,
self.settings)
self.apply_transforms()
output = self.writer.write(self.document, self.destination)
self.writer.assemble_parts()
except SystemExit as error:
exit = 1
exit_status = error.code
except Exception as error:
if not self.settings: # exception too early to report nicely
raise
if self.settings.traceback: # Propagate exceptions?
self.debugging_dumps()
raise
self.report_Exception(error)
exit = True
exit_status = 1
self.debugging_dumps()
if (enable_exit_status and self.document
and (self.document.reporter.max_level
>= self.settings.exit_status_level)):
sys.exit(self.document.reporter.max_level + 10)
elif exit:
sys.exit(exit_status)
return output
def debugging_dumps(self):
if not self.document:
return
if self.settings.dump_settings:
print('\n::: Runtime settings:', file=self._stderr)
print(pprint.pformat(self.settings.__dict__), file=self._stderr)
if self.settings.dump_internals:
print('\n::: Document internals:', file=self._stderr)
print(pprint.pformat(self.document.__dict__), file=self._stderr)
if self.settings.dump_transforms:
print('\n::: Transforms applied:', file=self._stderr)
print((' (priority, transform class, '
'pending node details, keyword args)'), file=self._stderr)
print(pprint.pformat(
[(priority, '%s.%s' % (xclass.__module__, xclass.__name__),
pending and pending.details, kwargs)
for priority, xclass, pending, kwargs
in self.document.transformer.applied]), file=self._stderr)
if self.settings.dump_pseudo_xml:
print('\n::: Pseudo-XML:', file=self._stderr)
print(self.document.pformat().encode(
'raw_unicode_escape'), file=self._stderr)
def report_Exception(self, error):
if isinstance(error, utils.SystemMessage):
self.report_SystemMessage(error)
elif isinstance(error, UnicodeEncodeError):
self.report_UnicodeError(error)
elif isinstance(error, io.InputError):
self._stderr.write('Unable to open source file for reading:\n'
' %s\n' % ErrorString(error))
elif isinstance(error, io.OutputError):
self._stderr.write(
'Unable to open destination file for writing:\n'
' %s\n' % ErrorString(error))
else:
print('%s' % ErrorString(error), file=self._stderr)
print(("""\
Exiting due to error. Use "--traceback" to diagnose.
Please report errors to <docutils-users@lists.sf.net>.
Include "--traceback" output, Docutils version (%s [%s]),
Python version (%s), your OS type & version, and the
command line used.""" % (__version__, __version_details__,
sys.version.split()[0])), file=self._stderr)
def report_SystemMessage(self, error):
print(('Exiting due to level-%s (%s) system message.'
% (error.level,
utils.Reporter.levels[error.level])), file=self._stderr)
def report_UnicodeError(self, error):
data = error.object[error.start:error.end]
self._stderr.write(
'%s\n'
'\n'
'The specified output encoding (%s) cannot\n'
'handle all of the output.\n'
'Try setting "--output-encoding-error-handler" to\n'
'\n'
'* "xmlcharrefreplace" (for HTML & XML output);\n'
' the output will contain "%s" and should be usable.\n'
'* "backslashreplace" (for other output formats);\n'
' look for "%s" in the output.\n'
'* "replace"; look for "?" in the output.\n'
'\n'
'"--output-encoding-error-handler" is currently set to "%s".\n'
'\n'
'Exiting due to error. Use "--traceback" to diagnose.\n'
'If the advice above doesn\'t eliminate the error,\n'
'please report it to <docutils-users@lists.sf.net>.\n'
'Include "--traceback" output, Docutils version (%s),\n'
'Python version (%s), your OS type & version, and the\n'
'command line used.\n'
% (ErrorString(error),
self.settings.output_encoding,
data.encode('ascii', 'xmlcharrefreplace'),
data.encode('ascii', 'backslashreplace'),
self.settings.output_encoding_error_handler,
__version__, sys.version.split()[0]))
default_usage = '%prog [options] [<source> [<destination>]]'
default_description = ('Reads from <source> (default is stdin) and writes to '
'<destination> (default is stdout). See '
'<http://docutils.sf.net/docs/user/config.html> for '
'the full reference.')
def publish_cmdline(reader=None, reader_name='standalone',
parser=None, parser_name='restructuredtext',
writer=None, writer_name='pseudoxml',
settings=None, settings_spec=None,
settings_overrides=None, config_section=None,
enable_exit_status=True, argv=None,
usage=default_usage, description=default_description):
"""
Set up & run a `Publisher` for command-line-based file I/O (input and
output file paths taken automatically from the command line). Return the
encoded string output also.
Parameters: see `publish_programmatically` for the remainder.
- `argv`: Command-line argument list to use instead of ``sys.argv[1:]``.
- `usage`: Usage string, output if there's a problem parsing the command
line.
- `description`: Program description, output for the "--help" option
(along with command-line option descriptions).
"""
pub = Publisher(reader, parser, writer, settings=settings)
pub.set_components(reader_name, parser_name, writer_name)
output = pub.publish(
argv, usage, description, settings_spec, settings_overrides,
config_section=config_section, enable_exit_status=enable_exit_status)
return output
def publish_file(source=None, source_path=None,
destination=None, destination_path=None,
reader=None, reader_name='standalone',
parser=None, parser_name='restructuredtext',
writer=None, writer_name='pseudoxml',
settings=None, settings_spec=None, settings_overrides=None,
config_section=None, enable_exit_status=False):
"""
Set up & run a `Publisher` for programmatic use with file-like I/O.
Return the encoded string output also.
Parameters: see `publish_programmatically`.
"""
output, pub = publish_programmatically(
source_class=io.FileInput, source=source, source_path=source_path,
destination_class=io.FileOutput,
destination=destination, destination_path=destination_path,
reader=reader, reader_name=reader_name,
parser=parser, parser_name=parser_name,
writer=writer, writer_name=writer_name,
settings=settings, settings_spec=settings_spec,
settings_overrides=settings_overrides,
config_section=config_section,
enable_exit_status=enable_exit_status)
return output
def publish_string(source, source_path=None, destination_path=None,
reader=None, reader_name='standalone',
parser=None, parser_name='restructuredtext',
writer=None, writer_name='pseudoxml',
settings=None, settings_spec=None,
settings_overrides=None, config_section=None,
enable_exit_status=False):
"""
Set up & run a `Publisher` for programmatic use with string I/O. Return
the encoded string or Unicode string output.
For encoded string output, be sure to set the 'output_encoding' setting to
the desired encoding. Set it to 'unicode' for unencoded Unicode string
output. Here's one way::
publish_string(..., settings_overrides={'output_encoding': 'unicode'})
Similarly for Unicode string input (`source`)::
publish_string(..., settings_overrides={'input_encoding': 'unicode'})
Parameters: see `publish_programmatically`.
"""
output, pub = publish_programmatically(
source_class=io.StringInput, source=source, source_path=source_path,
destination_class=io.StringOutput,
destination=None, destination_path=destination_path,
reader=reader, reader_name=reader_name,
parser=parser, parser_name=parser_name,
writer=writer, writer_name=writer_name,
settings=settings, settings_spec=settings_spec,
settings_overrides=settings_overrides,
config_section=config_section,
enable_exit_status=enable_exit_status)
return output
def publish_parts(source, source_path=None, source_class=io.StringInput,
destination_path=None,
reader=None, reader_name='standalone',
parser=None, parser_name='restructuredtext',
writer=None, writer_name='pseudoxml',
settings=None, settings_spec=None,
settings_overrides=None, config_section=None,
enable_exit_status=False):
"""
Set up & run a `Publisher`, and return a dictionary of document parts.
Dictionary keys are the names of parts, and values are Unicode strings;
encoding is up to the client. For programmatic use with string I/O.
For encoded string input, be sure to set the 'input_encoding' setting to
the desired encoding. Set it to 'unicode' for unencoded Unicode string
input. Here's how::
publish_parts(..., settings_overrides={'input_encoding': 'unicode'})
Parameters: see `publish_programmatically`.
"""
output, pub = publish_programmatically(
source=source, source_path=source_path, source_class=source_class,
destination_class=io.StringOutput,
destination=None, destination_path=destination_path,
reader=reader, reader_name=reader_name,
parser=parser, parser_name=parser_name,
writer=writer, writer_name=writer_name,
settings=settings, settings_spec=settings_spec,
settings_overrides=settings_overrides,
config_section=config_section,
enable_exit_status=enable_exit_status)
return pub.writer.parts
def publish_doctree(source, source_path=None,
source_class=io.StringInput,
reader=None, reader_name='standalone',
parser=None, parser_name='restructuredtext',
settings=None, settings_spec=None,
settings_overrides=None, config_section=None,
enable_exit_status=False):
"""
Set up & run a `Publisher` for programmatic use with string I/O.
Return the document tree.
For encoded string input, be sure to set the 'input_encoding' setting to
the desired encoding. Set it to 'unicode' for unencoded Unicode string
input. Here's one way::
publish_doctree(..., settings_overrides={'input_encoding': 'unicode'})
Parameters: see `publish_programmatically`.
"""
pub = Publisher(reader=reader, parser=parser, writer=None,
settings=settings,
source_class=source_class,
destination_class=io.NullOutput)
pub.set_components(reader_name, parser_name, 'null')
pub.process_programmatic_settings(
settings_spec, settings_overrides, config_section)
pub.set_source(source, source_path)
pub.set_destination(None, None)
output = pub.publish(enable_exit_status=enable_exit_status)
return pub.document
def publish_from_doctree(document, destination_path=None,
writer=None, writer_name='pseudoxml',
settings=None, settings_spec=None,
settings_overrides=None, config_section=None,
enable_exit_status=False):
"""
Set up & run a `Publisher` to render from an existing document
tree data structure, for programmatic use with string I/O. Return
the encoded string output.
Note that document.settings is overridden; if you want to use the settings
of the original `document`, pass settings=document.settings.
Also, new document.transformer and document.reporter objects are
generated.
For encoded string output, be sure to set the 'output_encoding' setting to
the desired encoding. Set it to 'unicode' for unencoded Unicode string
output. Here's one way::
publish_from_doctree(
..., settings_overrides={'output_encoding': 'unicode'})
Parameters: `document` is a `docutils.nodes.document` object, an existing
document tree.
Other parameters: see `publish_programmatically`.
"""
reader = docutils.readers.doctree.Reader(parser_name='null')
pub = Publisher(reader, None, writer,
source=io.DocTreeInput(document),
destination_class=io.StringOutput, settings=settings)
if not writer and writer_name:
pub.set_writer(writer_name)
pub.process_programmatic_settings(
settings_spec, settings_overrides, config_section)
pub.set_destination(None, destination_path)
return pub.publish(enable_exit_status=enable_exit_status)
def publish_cmdline_to_binary(reader=None, reader_name='standalone',
parser=None, parser_name='restructuredtext',
writer=None, writer_name='pseudoxml',
settings=None, settings_spec=None,
settings_overrides=None, config_section=None,
enable_exit_status=True, argv=None,
usage=default_usage, description=default_description,
destination=None, destination_class=io.BinaryFileOutput
):
"""
Set up & run a `Publisher` for command-line-based file I/O (input and
output file paths taken automatically from the command line). Return the
encoded string output also.
This is just like publish_cmdline, except that it uses
io.BinaryFileOutput instead of io.FileOutput.
Parameters: see `publish_programmatically` for the remainder.
- `argv`: Command-line argument list to use instead of ``sys.argv[1:]``.
- `usage`: Usage string, output if there's a problem parsing the command
line.
- `description`: Program description, output for the "--help" option
(along with command-line option descriptions).
"""
pub = Publisher(reader, parser, writer, settings=settings,
destination_class=destination_class)
pub.set_components(reader_name, parser_name, writer_name)
output = pub.publish(
argv, usage, description, settings_spec, settings_overrides,
config_section=config_section, enable_exit_status=enable_exit_status)
return output
def publish_programmatically(source_class, source, source_path,
destination_class, destination, destination_path,
reader, reader_name,
parser, parser_name,
writer, writer_name,
settings, settings_spec,
settings_overrides, config_section,
enable_exit_status):
"""
Set up & run a `Publisher` for custom programmatic use. Return the
encoded string output and the Publisher object.
Applications should not need to call this function directly. If it does
seem to be necessary to call this function directly, please write to the
Docutils-develop mailing list
<http://docutils.sf.net/docs/user/mailing-lists.html#docutils-develop>.
Parameters:
* `source_class` **required**: The class for dynamically created source
objects. Typically `io.FileInput` or `io.StringInput`.
* `source`: Type depends on `source_class`:
- If `source_class` is `io.FileInput`: Either a file-like object
(must have 'read' and 'close' methods), or ``None``
(`source_path` is opened). If neither `source` nor
`source_path` are supplied, `sys.stdin` is used.
- If `source_class` is `io.StringInput` **required**: The input
string, either an encoded 8-bit string (set the
'input_encoding' setting to the correct encoding) or a Unicode
string (set the 'input_encoding' setting to 'unicode').
* `source_path`: Type depends on `source_class`:
- `io.FileInput`: Path to the input file, opened if no `source`
supplied.
- `io.StringInput`: Optional. Path to the file or object that produced
`source`. Only used for diagnostic output.
* `destination_class` **required**: The class for dynamically created
destination objects. Typically `io.FileOutput` or `io.StringOutput`.
* `destination`: Type depends on `destination_class`:
- `io.FileOutput`: Either a file-like object (must have 'write' and
'close' methods), or ``None`` (`destination_path` is opened). If
neither `destination` nor `destination_path` are supplied,
`sys.stdout` is used.
- `io.StringOutput`: Not used; pass ``None``.
* `destination_path`: Type depends on `destination_class`:
- `io.FileOutput`: Path to the output file. Opened if no `destination`
supplied.
- `io.StringOutput`: Path to the file or object which will receive the
output; optional. Used for determining relative paths (stylesheets,
source links, etc.).
* `reader`: A `docutils.readers.Reader` object.
* `reader_name`: Name or alias of the Reader class to be instantiated if
no `reader` supplied.
* `parser`: A `docutils.parsers.Parser` object.
* `parser_name`: Name or alias of the Parser class to be instantiated if
no `parser` supplied.
* `writer`: A `docutils.writers.Writer` object.
* `writer_name`: Name or alias of the Writer class to be instantiated if
no `writer` supplied.
* `settings`: A runtime settings (`docutils.frontend.Values`) object, for
dotted-attribute access to runtime settings. It's the end result of the
`SettingsSpec`, config file, and option processing. If `settings` is
passed, it's assumed to be complete and no further setting/config/option
processing is done.
* `settings_spec`: A `docutils.SettingsSpec` subclass or object. Provides
extra application-specific settings definitions independently of
components. In other words, the application becomes a component, and
its settings data is processed along with that of the other components.
Used only if no `settings` specified.
* `settings_overrides`: A dictionary containing application-specific
settings defaults that override the defaults of other components.
Used only if no `settings` specified.
* `config_section`: A string, the name of the configuration file section
for this application. Overrides the ``config_section`` attribute
defined by `settings_spec`. Used only if no `settings` specified.
* `enable_exit_status`: Boolean; enable exit status at end of processing?
"""
pub = Publisher(reader, parser, writer, settings=settings,
source_class=source_class,
destination_class=destination_class)
pub.set_components(reader_name, parser_name, writer_name)
pub.process_programmatic_settings(
settings_spec, settings_overrides, config_section)
pub.set_source(source, source_path)
pub.set_destination(destination, destination_path)
output = pub.publish(enable_exit_status=enable_exit_status)
return output, pub
| apache-2.0 |
suyashphadtare/vestasi-update-erp | erpnext/patches/v4_0/global_defaults_to_system_settings.py | 39 | 1360 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from collections import Counter
from frappe.core.doctype.user.user import STANDARD_USERS
def execute():
frappe.reload_doc("core", "doctype", "system_settings")
system_settings = frappe.get_doc("System Settings")
# set values from global_defauls
global_defaults = frappe.db.get_value("Global Defaults", None,
["time_zone", "date_format", "number_format", "float_precision", "session_expiry"], as_dict=True)
if global_defaults:
for key, val in global_defaults.items():
if not system_settings.get(key):
system_settings.set(key, val)
# language
if not system_settings.get("language"):
# find most common language
lang = frappe.db.sql_list("""select language from `tabUser`
where ifnull(language, '')!='' and language not like "Loading%%" and name not in ({standard_users})""".format(
standard_users=", ".join(["%s"]*len(STANDARD_USERS))), tuple(STANDARD_USERS))
lang = Counter(lang).most_common(1)
lang = (len(lang) > 0) and lang[0][0] or "english"
system_settings.language = lang
system_settings.ignore_mandatory = True
system_settings.save()
global_defaults = frappe.get_doc("Global Defaults")
global_defaults.ignore_mandatory = True
global_defaults.save()
| agpl-3.0 |
vityagi/azure-linux-extensions | DSC/test/env.py | 8 | 1260 | #!/usr/bin/env python
#
#CustomScript extension
#
# Copyright 2014 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
#append installer directory to sys.path
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(root)
manifestFile = os.path.join(root, 'HandlerManifest.json')
if os.path.exists(manifestFile):
import json
jsonData = open(manifestFile)
manifest = json.load(jsonData)
jsonData.close()
extName="{0}-{1}".format(manifest[0]["name"], manifest[0]["version"])
print("Start test: %s" % extName)
extDir=os.path.join("/var/lib/waagent", extName)
if os.path.isdir(extDir):
os.chdir(extDir)
print("Switching to dir: %s" % os.getcwd())
| apache-2.0 |
Nikea/VisTrails | examples/vtk_examples/Annotation/cubeAxes.py | 15 | 3797 | #!/usr/bin/env python
# This example demonstrates the use of vtkCubeAxesActor2D to indicate
# the position in space that the camera is currently viewing. The
# vtkCubeAxesActor2D draws axes on the bounding box of the data set
# and labels the axes with x-y-z coordinates.
import vtk
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Create a vtkBYUReader and read in a data set.
fohe = vtk.vtkBYUReader()
fohe.SetGeometryFileName(VTK_DATA_ROOT + "/Data/teapot.g")
# Create a vtkPolyDataNormals filter to calculate the normals of the
# data set.
normals = vtk.vtkPolyDataNormals()
normals.SetInputConnection(fohe.GetOutputPort())
# Set up the associated mapper and actor.
foheMapper = vtk.vtkPolyDataMapper()
foheMapper.SetInputConnection(normals.GetOutputPort())
foheActor = vtk.vtkLODActor()
foheActor.SetMapper(foheMapper)
# Create a vtkOutlineFilter to draw the bounding box of the data set.
# Also create the associated mapper and actor.
outline = vtk.vtkOutlineFilter()
outline.SetInputConnection(normals.GetOutputPort())
mapOutline = vtk.vtkPolyDataMapper()
mapOutline.SetInputConnection(outline.GetOutputPort())
outlineActor = vtk.vtkActor()
outlineActor.SetMapper(mapOutline)
outlineActor.GetProperty().SetColor(0, 0, 0)
# Create a vtkCamera, and set the camera parameters.
camera = vtk.vtkCamera()
camera.SetClippingRange(1.60187, 20.0842)
camera.SetFocalPoint(0.21406, 1.5, 0)
camera.SetPosition(8.3761, 4.94858, 4.12505)
camera.SetViewUp(0.180325, 0.549245, -0.815974)
# Create a vtkLight, and set the light parameters.
light = vtk.vtkLight()
light.SetFocalPoint(0.21406, 1.5, 0)
light.SetPosition(8.3761, 4.94858, 4.12505)
# Create the Renderers. Assign them the appropriate viewport
# coordinates, active camera, and light.
ren = vtk.vtkRenderer()
ren.SetViewport(0, 0, 0.5, 1.0)
ren.SetActiveCamera(camera)
ren.AddLight(light)
ren2 = vtk.vtkRenderer()
ren2.SetViewport(0.5, 0, 1.0, 1.0)
ren2.SetActiveCamera(camera)
ren2.AddLight(light)
# Create the RenderWindow and RenderWindowInteractor.
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
renWin.AddRenderer(ren2)
renWin.SetWindowName("VTK - Cube Axes")
renWin.SetSize(600, 300)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer, and set the background.
ren.AddViewProp(foheActor)
ren.AddViewProp(outlineActor)
ren2.AddViewProp(foheActor)
ren2.AddViewProp(outlineActor)
ren.SetBackground(0.1, 0.2, 0.4)
ren2.SetBackground(0.1, 0.2, 0.4)
# Create a text property for both cube axes
tprop = vtk.vtkTextProperty()
tprop.SetColor(1, 1, 1)
tprop.ShadowOn()
# Create a vtkCubeAxesActor2D. Use the outer edges of the bounding box to
# draw the axes. Add the actor to the renderer.
axes = vtk.vtkCubeAxesActor2D()
axes.SetInput(normals.GetOutput())
axes.SetCamera(ren.GetActiveCamera())
axes.SetLabelFormat("%6.4g")
axes.SetFlyModeToOuterEdges()
axes.SetFontFactor(0.8)
axes.SetAxisTitleTextProperty(tprop)
axes.SetAxisLabelTextProperty(tprop)
ren.AddViewProp(axes)
# Create a vtkCubeAxesActor2D. Use the closest vertex to the camera to
# determine where to draw the axes. Add the actor to the renderer.
axes2 = vtk.vtkCubeAxesActor2D()
axes2.SetViewProp(foheActor)
axes2.SetCamera(ren2.GetActiveCamera())
axes2.SetLabelFormat("%6.4g")
axes2.SetFlyModeToClosestTriad()
axes2.SetFontFactor(0.8)
axes2.ScalingOff()
axes2.SetAxisTitleTextProperty(tprop)
axes2.SetAxisLabelTextProperty(tprop)
ren2.AddViewProp(axes2)
# Set up a check for aborting rendering.
def CheckAbort(obj, event):
# obj will be the object generating the event. In this case it
# is renWin.
if obj.GetEventPending() != 0:
obj.SetAbortRender(1)
renWin.AddObserver("AbortCheckEvent", CheckAbort)
iren.Initialize()
renWin.Render()
iren.Start()
| bsd-3-clause |
sasshka/x264 | tools/digress/errors.py | 144 | 1065 | """
Digress errors.
"""
class DigressError(Exception):
"""
Digress error base class.
"""
class NoSuchTestError(DigressError):
"""
Raised when no such test exists.
"""
class DisabledTestError(DigressError):
"""
Test is disabled.
"""
class SkippedTestError(DigressError):
"""
Test is marked as skipped.
"""
class DisabledCaseError(DigressError):
"""
Case is marked as disabled.
"""
class SkippedCaseError(DigressError):
"""
Case is marked as skipped.
"""
class FailedTestError(DigressError):
"""
Test failed.
"""
class ComparisonError(DigressError):
"""
Comparison failed.
"""
class IncomparableError(DigressError):
"""
Values cannot be compared.
"""
class AlreadyRunError(DigressError):
"""
Test/case has already been run.
"""
class SCMError(DigressError):
"""
Error occurred in SCM.
"""
def __init__(self, message):
self.message = message.replace("\n", " ")
def __str__(self):
return self.message
| gpl-2.0 |
Ensighten/ansible | test/units/playbook/test_playbook.py | 290 | 2230 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.playbook import Playbook
from ansible.vars import VariableManager
from units.mock.loader import DictDataLoader
class TestPlaybook(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_empty_playbook(self):
fake_loader = DictDataLoader({})
p = Playbook(loader=fake_loader)
def test_basic_playbook(self):
fake_loader = DictDataLoader({
"test_file.yml":"""
- hosts: all
""",
})
p = Playbook.load("test_file.yml", loader=fake_loader)
plays = p.get_plays()
def test_bad_playbook_files(self):
fake_loader = DictDataLoader({
# represents a playbook which is not a list of plays
"bad_list.yml": """
foo: bar
""",
# represents a playbook where a play entry is mis-formatted
"bad_entry.yml": """
-
- "This should be a mapping..."
""",
})
vm = VariableManager()
self.assertRaises(AnsibleParserError, Playbook.load, "bad_list.yml", vm, fake_loader)
self.assertRaises(AnsibleParserError, Playbook.load, "bad_entry.yml", vm, fake_loader)
| gpl-3.0 |
stdweird/aquilon | lib/python2.6/aquilon/aqdb/shell.py | 2 | 3101 | #!/usr/bin/env python2.6
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
_DIR = os.path.dirname(os.path.realpath(__file__))
_LIBDIR = os.path.join(_DIR, '..', '..')
if _LIBDIR not in sys.path:
sys.path.insert(0, _LIBDIR)
import aquilon.aqdb.depends # pylint: disable=W0611
import argparse
from ipaddr import IPv4Address, IPv4Network # pylint: disable=W0611
import ms.modulecmd
from IPython.config.loader import Config as IPyConfig
from IPython.frontend.terminal.embed import InteractiveShellEmbed
from aquilon.config import Config # pylint: disable=W0611
# pylint: disable=W0614
from aquilon.aqdb.model import * # pylint: disable=W0401
from aquilon.aqdb.dsdb import * # pylint: disable=W0401
from aquilon.aqdb.db_factory import DbFactory
db = DbFactory()
Base.metadata.bind = db.engine
session = s = db.Session()
_banner = '<<<Welcome to the Aquilon shell (courtesy of IPython). Ctrl-D to quit>>>\n'
def main():
parser = argparse.ArgumentParser(
description='An ipython shell, useful for testing and exploring aqdb')
parser.add_argument('-v', action='count', dest='verbose',
help='increase verbosity by adding more (-vv), etc.')
opts = parser.parse_args()
if opts.verbose >= 1:
db.engine.echo = True
if db.engine.url.drivername == 'sqlite':
prompt = str(db.engine.url).split('///')[1]
else:
# couldn't use the underlying dbapi connection.current_schema
# from the engine as it too is ''
user = db.engine.url.username or os.environ.get("USER")
host = db.engine.url.host or 'LOCALHOST'
prompt = '%s@%s' % (user, host)
if db.engine.url.database:
prompt += '/%s'
prompt += '> '
ipycfg = IPyConfig()
ipycfg.PromptManager.in_template = prompt
ipycfg.PlaintextFormatter.pprint = True
ipycfg.InteractiveShell.separate_in = ''
ipycfg.InteractiveShell.separate_out = ''
ipycfg.InteractiveShell.separate_out2 = ''
ipycfg.InteractiveShell.colors = 'Linux'
ipshell = InteractiveShellEmbed(config=ipycfg, banner1=_banner)
ipshell()
def graph_schema(db=db, file_name="/tmp/aqdb_schema"):
""" Produces a png image of the schema. """
import aquilon.aqdb.utils.schema2dot as s2d
s2d.write_schema_png(db.meta, "%s.png" % file_name)
s2d.write_schema_dot(db.meta, "%s.dot" % file_name)
if __name__ == '__main__':
main()
| apache-2.0 |
pscedu/slash2 | utils/ssh.py | 2 | 4852 | import paramiko, getpass, logging
import os, re
from time import sleep
log = logging.getLogger("slash2")
class SSH(object):
"""Helpful SSH abstractions for executing remote applications."""
def __init__(self, user, host, password=None, port=22):
"""Initialize SSH object.
Args:
user: username.
host: hostname of connection.
password: user's password. If None, stdin will be prompted for pass.
If the user is using auth_keys, an empty string will work.
port: port of destination's sshd.
Raises: SSHException."""
self.user = user
self.host = host
self.ssh = paramiko.SSHClient()
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
#Get password from stdin
if password is None:
password = getpass.getpass("{0}'s password: ".format(user))
#Initialize connection
try:
self.ssh.connect(host, username=user, password=password, port=port)
except Exception:
raise paramiko.SSHException
def list_screen_socks(self):
"""Return a list of open screen sockets."""
socks = []
sock_re = re.compile(r"\s+\d+\.(.+?)\s+.*?")
sock_ls = self.run("screen -ls", quiet=True)["out"]
for sock in sock_ls:
sock_match = sock_re.match(sock)
if sock_match:
socks.append(sock_match.group(1))
return socks
def run_screen(self, cmd, sock_name, timeout=None):
"""Remotely execute a command in a screen session. If timeout is reached, screen will be renamed and kept open.
Args:
cmd: command string to be executed.
sock_name: screen session's socket name.
timeout: maximum execution time."""
socks = self.list_screen_socks()
#Sock already exists!
exists = any([s.startswith(sock_name) for s in socks])
if exists:
return False
log.debug("Launching screen: {0} at {1}".format(sock_name, self.host))
#Sanitize newlines
cmd = cmd.strip("\t\n ;")
sane_cmd = ""
for line in cmd.splitlines():
sane_cmd += line.strip() + ";"
cmd = sane_cmd
#Debug -- log the cmds being run
[log.debug(c) for c in cmd.split(";")]
if timeout:
timed_cmd = ""
for line in cmd.split(";"):
if len(line) > 0:
timed_cmd += "timeout --signal=9 {0} {1}; ".format(timeout, line)
cmd = timed_cmd
#Add return code catch to each command
cmd = cmd.replace(";", "; ck; ")
#Wrap the command with a bash condition to rename and keep the screen session open
shell_script = "ck(){{ c=$?; echo $c; if [[ $c != 0 ]]; then screen -S {0} -X zombie kr; if [[ $c == 137 ]]; then screen -S {0} -X sessionname {0}-timed; else screen -S {0} -X sessionname {0}-error; fi; exit; fi; }}".format(sock_name)
cmd = "screen -S {0} -d -L -m $SHELL -c '{2}; {1}'"\
.format(sock_name, cmd, shell_script)
chan = self.ssh.get_transport().open_session()
chan.exec_command(cmd)
print cmd
return True
def wait_for_screen(self, sock_name, sleep_duration=3):
"""Blocks until a screen sock is removed or timesout.
Args:
sock_name: socket to be looking for.
sleep_duration: time to sleep inbetween checks.
Returns:
dict {
timedout: true/false
finished: true/false
errored: error code
}"""
result = {
"timedout": False,
"finished": False,
"errored": False
}
#initial rest
sleep(sleep_duration)
while True:
alive = False
for sock in self.list_screen_socks():
if sock == sock_name:
alive = True
break
if sock == sock_name + "-timed":
#Screen timed out
result["timedout"] = True
return result
elif sock == sock_name + "-error":
result["errored"] = True
return result
#If it is still running, sleep for a second
if alive: sleep(sleep_duration)
else:
result["finished"] = True
return result
def run(self, cmd, timeout=None, quiet=False):
"""Remotely execute a command.
Args:
cmd: command string to be executed.
timeout: maximum execution time.
Returns:
dict {
out: stdout.
err: stderr.
exit: exit code of the cmd.
timeout returns 137.
}"""
#Sanitize newlines
cmd = cmd.replace("\n", ";")
#Debug -- log the cmds being run
if not quiet:
[log.debug(c) for c in cmd.split(";")]
if timeout:
cmd = "timeout --signal=9 {0} {1}".format(timeout, cmd)
chan = self.ssh.get_transport().open_session()
chan.exec_command(cmd)
result = {
"out" : list(chan.makefile("rb")),
"err" : list(chan.makefile_stderr("rb")),
"exit": chan.recv_exit_status()
}
return result
| gpl-2.0 |
marctc/django | tests/utils_tests/test_crypto.py | 447 | 4581 | from __future__ import unicode_literals
import binascii
import hashlib
import unittest
from django.utils.crypto import constant_time_compare, pbkdf2
class TestUtilsCryptoMisc(unittest.TestCase):
def test_constant_time_compare(self):
# It's hard to test for constant time, just test the result.
self.assertTrue(constant_time_compare(b'spam', b'spam'))
self.assertFalse(constant_time_compare(b'spam', b'eggs'))
self.assertTrue(constant_time_compare('spam', 'spam'))
self.assertFalse(constant_time_compare('spam', 'eggs'))
class TestUtilsCryptoPBKDF2(unittest.TestCase):
# http://tools.ietf.org/html/draft-josefsson-pbkdf2-test-vectors-06
rfc_vectors = [
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 1,
"dklen": 20,
"digest": hashlib.sha1,
},
"result": "0c60c80f961f0e71f3a9b524af6012062fe037a6",
},
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 2,
"dklen": 20,
"digest": hashlib.sha1,
},
"result": "ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957",
},
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 4096,
"dklen": 20,
"digest": hashlib.sha1,
},
"result": "4b007901b765489abead49d926f721d065a429c1",
},
# # this takes way too long :(
# {
# "args": {
# "password": "password",
# "salt": "salt",
# "iterations": 16777216,
# "dklen": 20,
# "digest": hashlib.sha1,
# },
# "result": "eefe3d61cd4da4e4e9945b3d6ba2158c2634e984",
# },
{
"args": {
"password": "passwordPASSWORDpassword",
"salt": "saltSALTsaltSALTsaltSALTsaltSALTsalt",
"iterations": 4096,
"dklen": 25,
"digest": hashlib.sha1,
},
"result": "3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038",
},
{
"args": {
"password": "pass\0word",
"salt": "sa\0lt",
"iterations": 4096,
"dklen": 16,
"digest": hashlib.sha1,
},
"result": "56fa6aa75548099dcc37d7f03425e0c3",
},
]
regression_vectors = [
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 1,
"dklen": 20,
"digest": hashlib.sha256,
},
"result": "120fb6cffcf8b32c43e7225256c4f837a86548c9",
},
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 1,
"dklen": 20,
"digest": hashlib.sha512,
},
"result": "867f70cf1ade02cff3752599a3a53dc4af34c7a6",
},
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 1000,
"dklen": 0,
"digest": hashlib.sha512,
},
"result": ("afe6c5530785b6cc6b1c6453384731bd5ee432ee"
"549fd42fb6695779ad8a1c5bf59de69c48f774ef"
"c4007d5298f9033c0241d5ab69305e7b64eceeb8d"
"834cfec"),
},
# Check leading zeros are not stripped (#17481)
{
"args": {
"password": b'\xba',
"salt": "salt",
"iterations": 1,
"dklen": 20,
"digest": hashlib.sha1,
},
"result": '0053d3b91a7f1e54effebd6d68771e8a6e0b2c5b',
},
]
def test_public_vectors(self):
for vector in self.rfc_vectors:
result = pbkdf2(**vector['args'])
self.assertEqual(binascii.hexlify(result).decode('ascii'),
vector['result'])
def test_regression_vectors(self):
for vector in self.regression_vectors:
result = pbkdf2(**vector['args'])
self.assertEqual(binascii.hexlify(result).decode('ascii'),
vector['result'])
| bsd-3-clause |
ChawalitK/odoo | doc/_extensions/odoo/switcher.py | 262 | 1500 | from docutils import nodes, utils
from docutils.parsers.rst import Directive
from pygments.lexers import get_lexer_by_name
def setup(app):
app.add_directive('switcher', SwitcherDirective)
app.add_directive('case', CaseDirective)
class SwitcherDirective(Directive):
has_content = True
def run(self):
self.assert_has_content()
body = nodes.compound('\n'.join(self.content), classes=['tabs'])
self.state.nested_parse(self.content, self.content_offset, body)
titles = []
for child in body.children:
if isinstance(child, nodes.literal_block):
titles.append(get_lexer_by_name(child['language']).name)
else:
assert child['names'], ("A switcher case must be either a "\
"code block or a compound with a name")
titles.append(' '.join(child['names']))
tabs = nodes.bullet_list('', *[
nodes.list_item('', nodes.Text(title))
for title in titles
])
node = nodes.compound('', tabs, body, classes=['content-switcher'])
return [node]
class CaseDirective(Directive):
required_arguments = 1
final_argument_whitespace = True
has_content = True
def run(self):
self.assert_has_content()
node = nodes.compound('\n'.join(self.content), names=[self.arguments[0]])
self.state.nested_parse(self.content, self.content_offset, node)
return [node]
| gpl-3.0 |
dsajkl/123 | scripts/metrics/publish.py | 69 | 1310 | """
Publish Build Stats.
"""
import os
import subprocess
from dogapi import dog_http_api
from coverage_metrics import CoverageMetrics
class PublishStats(object):
"""
Publish stats to DataDog.
"""
def __init__(self, api_key):
dog_http_api.api_key = api_key
@staticmethod
def report_metrics(metrics):
"""
Send metrics to DataDog.
Arguments:
metrics (dict): data to publish
"""
for key, value in metrics.iteritems():
print u"Sending {} ==> {}%".format(key, value)
dog_http_api.metric(key, value)
def main(api_key):
"""
Send Stats for everything to DataDog.
"""
dir_path = os.path.dirname(os.path.relpath(__file__))
unit_reports_cmd = ['find', 'reports', '-name', '"coverage.xml"']
unit_report_paths = subprocess.check_output(unit_reports_cmd)
cov_metrics = CoverageMetrics(os.path.join(dir_path, 'unit_test_groups.json'), unit_report_paths)
coverage_metrics = cov_metrics.coverage_metrics()
# Publish Coverage Stats to DataDog
PublishStats(api_key).report_metrics(coverage_metrics)
if __name__ == "__main__":
API_KEY = os.environ.get('DATADOG_API_KEY')
if API_KEY:
main(API_KEY)
else:
print 'SKIP: Publish Stats to Datadog'
| agpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.