repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
arrow-/simQuad | ground_station/gyro_scope.py | 2 | 5471 | '''
-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
IMPORTANT!!
It is suggested you run this script with mpu_level2.ino first to see and understand
its operation.
Basically this script EXPECTS:
Arduino is providing space separated gyro readings @ ~5ms intervals (via MPU Interrupt).
* Each serial packet must be ASCII and look like:
[x_gyro]<space>[y_gyro]<space>[z_gyro]<newline>
+ You need to specify correct Serial port
+ You need to set the Y-limits of the plot axis.
+ You need to use correct value of "dt".
+ You need to set the correct conversion factor for Gyro readings.
Mode 0 1 2 3
Range +-250 +-500 +-1000 +-2000
Conv. 131 65.5 32.75 16.375
AND it DELIVERS:
* 3 axis loss-less Gyro readings plot (almost real time).
* 3D visualisation of current orientation based on gyro vals
If you want to just plot data in ~real time use {oscilloscope.py}.
-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
'''
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import serial, time
def rotate(v, axis, theta):
'''
Rotates "v" vector about "axis" vector by "theta" radians, returns vector
'''
c = np.cos(theta)
s = np.sin(theta)
t = 1-c
mat = np.array([ [c+axis[0]*axis[0]*t, axis[0]*axis[1]*t-axis[2]*s, axis[0]*axis[2]*t+axis[1]*s],
[axis[0]*axis[1]*t+axis[2]*s, c+axis[1]*axis[1]*t, axis[1]*axis[2]*t-axis[0]*s],
[axis[0]*axis[2]*t-axis[1]*s, axis[1]*axis[2]*t+axis[0]*s, c+axis[2]*axis[2]*t] ])
return mat.dot(v.T)
def calcPose(omega):
'''
Helper function. Finds the "d"-theta, then calls rotate.
Omega must be in ** degrees per second **
'''
theta = omega*dt*np.pi/180 #theta is "d-theta" in radians
rpy[1] = rotate(rpy[1], rpy[0], theta[0])
rpy[0] = rotate(rpy[0], rpy[1], theta[1])
rpy[2] = np.cross(rpy[0], rpy[1])
rpy[1] = rotate(rpy[1], rpy[2], theta[2])
rpy[0] = rotate(rpy[0], rpy[2], theta[2])
plt.ion()
# SET CORRECT PORT NUM HERE
arduino = serial.Serial('/dev/ttyACM0', 57600)
# dt is found experimentally. Contact Ananya for details. Basically this the time between
# 2 MPU(gyro) interrupts. The np.pi/180 converts deg/sec to rad/sec.
# SET CORRECT dt HERE. TIME IN SECONDS BETWEEN TWO SENSOR PACKETS AS RECVD. BY ARDUINO.
dt = .005 # 5msec
# rpy is original orientation. These vectors are updated by calcPose()
rpy = np.eye(3)
fig = plt.figure(figsize=(16,6))
axes = fig.add_subplot(121)
a3d = fig.add_subplot(122, projection='3d')
a3d.set_xlim(-1.2,1.2)
a3d.set_ylim(-1.2,1.2)
a3d.set_zlim(-1.2,1.2)
a3d.scatter([0], [0], [0], s=40)
r, = a3d.plot([0,1], [0,0], [0,0], lw=2, c='black')
p, = a3d.plot([0,0], [0,1], [0,0], lw=2, c='red')
a3d.plot([0,2], [0,0], [0,0], c='cyan')
a3d.plot([0,0], [0,2], [0,0], c='brown')
a3d.plot([0,0], [0,0], [0,2], c='green')
a3d.plot([0,-2], [0,0], [0,0], ls='--', c='cyan')
a3d.plot([0,0], [0,-2], [0,0], ls='--', c='brown')
a3d.plot([0,0], [0,0], [0,-2], ls='--', c='green')
num_samples = 0
buff = 0
# "buff" counts till 50. Every time it reaches fifty, plt.draw() is called, since
# plt.draw() is a costly operation. Normal list append and pose calculations are fast.
# So, do those diligently, for every sample, but update display
# rarely (while ensuring smooth animation).
gyro_x = [0]
gyro_y = [0] # gyro data lists. I use them like queues.
gyro_z = [0]
t = [0]
# scopes is a list of 3 matplotlib.Line_2D objects.
scopes = [axes.plot(t, gyro_x, label=r'$\omega_x$')[0], axes.plot(t, gyro_y, label=r'$\omega_y$')[0], axes.plot(t, gyro_z, label=r'$\omega_z$')[0]]
axes.legend(prop=dict(size=14))
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,
ncol=3, mode="expand", borderaxespad=0.)
axes.set_ylim(-505, 505) # SET CORRECT Y-LIM HERE
conversion = 65.5 #Gyro 500 SET CORRECT CONV FACTOR HERE
# Refer datasheet. Convert ADC result into a Physical measurement.
# If you don't understand this, pls. leave project.
print 'Me Ready'
time.sleep(2)
#Handshake MAY BE REDUNDANT
print arduino.inWaiting()
arduino.flushInput()
arduino.write('e')
print 'Sent Request...'
data = [0]*6
while True:
try:
num = arduino.read(12)
num = [ord(x) for x in num]
except:
print 'Serial error!'
raise RuntimeError
_ind=0 #this var is connected to for loop below!!
for i in range(0,12, 2):
data[_ind] = (num[i]<<8)|num[i+1]
if data[_ind] & 0x8000:
data[_ind] = data[_ind] - 0x10000
_ind += 1
#print data[3:]
datas = np.array([float(data[3])/conversion, float(data[4])/conversion, float(data[5])/conversion])
gyro_x.append(datas[0])
gyro_y.append(datas[1])
gyro_z.append(datas[2])
num_samples += 1
t.append(num_samples)
calcPose(datas) #This function updates the global variable: "rpy"
if num_samples>200:
del t[0]
del gyro_x[0]
del gyro_y[0]
del gyro_z[0]
axes.set_xlim(t[0], num_samples)
scopes[0].set_data(t, gyro_x)
scopes[1].set_data(t, gyro_y)
scopes[2].set_data(t, gyro_z)
# pose matrix is just an easier way of giving input to the .set_data()
# and .set_3d_properties() methods. You see, line needs 2 (end) points:
# the rpy entries AND THE ORIGIN. pose matrix does just that: specifies
# BOTH end points.
pose = np.array([np.array([np.zeros(3), rpy[0]]).T, np.array([np.zeros(3), rpy[1]]).T, np.array([np.zeros(3), rpy[2]]).T])
r.set_data(pose[0][:2])
r.set_3d_properties(pose[0][2])
p.set_data(pose[1][:2])
p.set_3d_properties(pose[1][2])
if buff>25:
buff=0
plt.draw()
buff += 1
plt.ioff()
plt.show() | gpl-2.0 |
tectronics/mythbox | resources/test/mythboxtest/mythtv/test_inject_conn.py | 7 | 5223 | #
# MythBox for XBMC - http://mythbox.googlecode.com
# Copyright (C) 2011 analogue@yahoo.com
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
import mythboxtest
import time
import unittest2 as unittest
import util_mock
from mockito import Mock
from mythbox import pool
from mythbox.mythtv.db import MythDatabaseFactory
from mythbox.platform import Platform
from mythbox.settings import MythSettings
from mythbox.util import run_async, OnDemandConfig
from mythbox.mythtv.conn import ConnectionFactory, inject_conn
log = mythboxtest.getLogger('mythbox.unittest')
class SafeClient(object):
def __init__(self):
pass
@inject_conn
def getTuners(self):
self.conn().getLoad()
@inject_conn
def getJobs(self):
self.conn().getUptime()
@inject_conn
def outerNestedAccessToDb(self):
self.conn().getUptime()
self.middleNestedAccessToDb()
@inject_conn
def middleNestedAccessToDb(self):
self.conn().getTuners()
self.innerNestedAccessToDb()
@inject_conn
def innerNestedAccessToDb(self):
self.conn().getLoad()
@inject_conn
def delayed(self, delay):
time.sleep(delay)
self.conn().getLoad()
class OtherClient(object):
@inject_conn
def getRecordingGroups(self):
log.debug('uptime = %s' % self.conn().getUptime())
class InjectConnDecoratorTest(unittest.TestCase):
def setUp(self):
p = Platform()
bus = Mock()
translator = util_mock.Translator(p, langInfo=util_mock.XBMCLangInfo(p))
settings = MythSettings(p, translator)
domainCache = Mock()
privateConfig = OnDemandConfig()
settings.put('mysql_host', privateConfig.get('mysql_host'))
settings.put('mysql_database', privateConfig.get('mysql_database'))
settings.put('mysql_user', privateConfig.get('mysql_user'))
settings.put('mysql_password', privateConfig.get('mysql_password'))
self.dbPool = pool.pools['dbPool'] = pool.Pool(MythDatabaseFactory(settings=settings, translator=translator, domainCache=domainCache))
self.connPool = pool.pools['connPool'] = pool.Pool(ConnectionFactory(settings=settings, translator=translator, platform=p, bus=bus))
def tearDown(self):
self.connPool.shutdown()
self.connPool = None
del pool.pools['connPool']
self.dbPool.shutdown()
self.dbPool = None
del pool.pools['dbPool']
@run_async
def getTunersAsync(self, client):
client.delayed(1) # make sure theres overlap with delay
def test_pound_with_20_threads(self):
client = SafeClient()
workers = []
for i in xrange(20):
workers.append(self.getTunersAsync(client))
time.sleep(0.1)
for w in workers:
w.join()
self.assertTrue(self.connPool.size() > 0)
self.assertTrue(self.connPool.available() > 0)
log.debug('connPool size = %d' % self.connPool.size())
log.debug('connPool avail = %d' % self.connPool.available())
self.connPool.shrink()
self.assertEqual(0, self.connPool.size())
self.assertEqual(0, self.connPool.available())
log.debug('connPool size = %d' % self.connPool.size())
log.debug('connPool avail = %d' % self.connPool.available())
def test_multiple_calls_to_tls_resource_in_same_thread_allocates_only_one_resource(self):
client = SafeClient()
client.getTuners()
client.getJobs()
self.assertEquals(1, self.connPool.available())
self.assertEquals(1, self.connPool.size())
self.connPool.shrink()
def test_nesting_method_calls_to_tls_resources_in_same_thread_allocates_only_one_resources(self):
client = SafeClient()
client.outerNestedAccessToDb() # impl calls other methods which are also decorated with @inject_conn
self.assertEquals(1, self.connPool.available())
self.assertEquals(1, self.connPool.size())
self.connPool.shrink()
def test_resource_is_shared_by_methods_in_objects_of_differing_class_but_in_the_same_thread(self):
client1 = SafeClient()
client2 = OtherClient()
client1.getJobs()
client2.getRecordingGroups()
self.assertEquals(1, self.connPool.available())
self.assertEquals(1, self.connPool.size())
self.connPool.shrink()
| gpl-2.0 |
dfunckt/django | tests/model_fields/test_decimalfield.py | 49 | 2895 | from decimal import Decimal
from django.core import validators
from django.core.exceptions import ValidationError
from django.db import models
from django.test import TestCase
from .models import BigD, Foo
class DecimalFieldTests(TestCase):
def test_to_python(self):
f = models.DecimalField(max_digits=4, decimal_places=2)
self.assertEqual(f.to_python(3), Decimal('3'))
self.assertEqual(f.to_python('3.14'), Decimal('3.14'))
with self.assertRaises(ValidationError):
f.to_python('abc')
def test_default(self):
f = models.DecimalField(default=Decimal('0.00'))
self.assertEqual(f.get_default(), Decimal('0.00'))
def test_format(self):
f = models.DecimalField(max_digits=5, decimal_places=1)
self.assertEqual(f._format(f.to_python(2)), '2.0')
self.assertEqual(f._format(f.to_python('2.6')), '2.6')
self.assertIsNone(f._format(None))
def test_get_prep_value(self):
f = models.DecimalField(max_digits=5, decimal_places=1)
self.assertIsNone(f.get_prep_value(None))
self.assertEqual(f.get_prep_value('2.4'), Decimal('2.4'))
def test_filter_with_strings(self):
"""
Should be able to filter decimal fields using strings (#8023).
"""
foo = Foo.objects.create(a='abc', d=Decimal('12.34'))
self.assertEqual(list(Foo.objects.filter(d='12.34')), [foo])
def test_save_without_float_conversion(self):
"""
Ensure decimals don't go through a corrupting float conversion during
save (#5079).
"""
bd = BigD(d='12.9')
bd.save()
bd = BigD.objects.get(pk=bd.pk)
self.assertEqual(bd.d, Decimal('12.9'))
def test_lookup_really_big_value(self):
"""
Really big values can be used in a filter statement.
"""
# This should not crash.
Foo.objects.filter(d__gte=100000000000)
def test_max_digits_validation(self):
field = models.DecimalField(max_digits=2)
expected_message = validators.DecimalValidator.messages['max_digits'] % {'max': 2}
with self.assertRaisesMessage(ValidationError, expected_message):
field.clean(100, None)
def test_max_decimal_places_validation(self):
field = models.DecimalField(decimal_places=1)
expected_message = validators.DecimalValidator.messages['max_decimal_places'] % {'max': 1}
with self.assertRaisesMessage(ValidationError, expected_message):
field.clean(Decimal('0.99'), None)
def test_max_whole_digits_validation(self):
field = models.DecimalField(max_digits=3, decimal_places=1)
expected_message = validators.DecimalValidator.messages['max_whole_digits'] % {'max': 2}
with self.assertRaisesMessage(ValidationError, expected_message):
field.clean(Decimal('999'), None)
| bsd-3-clause |
jorik041/scikit-learn | doc/sphinxext/github_link.py | 314 | 2661 | from operator import attrgetter
import inspect
import subprocess
import os
import sys
from functools import partial
REVISION_CMD = 'git rev-parse --short HEAD'
def _get_git_revision():
try:
revision = subprocess.check_output(REVISION_CMD.split()).strip()
except subprocess.CalledProcessError:
print('Failed to execute git to get revision')
return None
return revision.decode('utf-8')
def _linkcode_resolve(domain, info, package, url_fmt, revision):
"""Determine a link to online source for a class/method/function
This is called by sphinx.ext.linkcode
An example with a long-untouched module that everyone has
>>> _linkcode_resolve('py', {'module': 'tty',
... 'fullname': 'setraw'},
... package='tty',
... url_fmt='http://hg.python.org/cpython/file/'
... '{revision}/Lib/{package}/{path}#L{lineno}',
... revision='xxxx')
'http://hg.python.org/cpython/file/xxxx/Lib/tty/tty.py#L18'
"""
if revision is None:
return
if domain not in ('py', 'pyx'):
return
if not info.get('module') or not info.get('fullname'):
return
class_name = info['fullname'].split('.')[0]
if type(class_name) != str:
# Python 2 only
class_name = class_name.encode('utf-8')
module = __import__(info['module'], fromlist=[class_name])
obj = attrgetter(info['fullname'])(module)
try:
fn = inspect.getsourcefile(obj)
except Exception:
fn = None
if not fn:
try:
fn = inspect.getsourcefile(sys.modules[obj.__module__])
except Exception:
fn = None
if not fn:
return
fn = os.path.relpath(fn,
start=os.path.dirname(__import__(package).__file__))
try:
lineno = inspect.getsourcelines(obj)[1]
except Exception:
lineno = ''
return url_fmt.format(revision=revision, package=package,
path=fn, lineno=lineno)
def make_linkcode_resolve(package, url_fmt):
"""Returns a linkcode_resolve function for the given URL format
revision is a git commit reference (hash or name)
package is the name of the root module of the package
url_fmt is along the lines of ('https://github.com/USER/PROJECT/'
'blob/{revision}/{package}/'
'{path}#L{lineno}')
"""
revision = _get_git_revision()
return partial(_linkcode_resolve, revision=revision, package=package,
url_fmt=url_fmt)
| bsd-3-clause |
hernandito/SickRage | sickbeard/providers/generic.py | 1 | 26015 | # coding=utf-8
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import datetime
import os
import re
import itertools
from random import shuffle
from base64 import b16encode, b32decode
import requests
from hachoir_parser import createParser
import sickbeard
from sickbeard import helpers, classes, logger, db
from sickbeard.common import MULTI_EP_RESULT, SEASON_RESULT
from sickbeard import tvcache
from sickbeard.name_parser.parser import NameParser, InvalidNameException, InvalidShowException
from sickbeard.common import Quality
from sickbeard.common import user_agents
from sickrage.helper.common import sanitize_filename
from sickrage.helper.encoding import ek
from sickrage.helper.exceptions import ex
from sickrage.show.Show import Show
from sickbeard import show_name_helpers
class GenericProvider(object):
NZB = "nzb"
TORRENT = "torrent"
def __init__(self, name):
# these need to be set in the subclass
self.providerType = None
self.name = name
self.urls = {}
self.url = ''
self.public = False
self.show = None
self.supportsBacklog = True
self.supportsAbsoluteNumbering = False
self.anime_only = False
self.search_mode = None
self.search_fallback = False
self.enabled = False
self.enable_daily = False
self.enable_backlog = False
self.cache = tvcache.TVCache(self)
self.session = requests.Session()
shuffle(user_agents)
self.headers = {'User-Agent': user_agents[0]}
self.btCacheURLS = [
'http://torcache.net/torrent/{torrent_hash}.torrent',
'http://thetorrent.org/torrent/{torrent_hash}.torrent',
'http://btdig.com/torrent/{torrent_hash}.torrent',
# 'http://torrage.com/torrent/{torrent_hash}.torrent',
# 'http://itorrents.org/torrent/{torrent_hash}.torrent',
]
shuffle(self.btCacheURLS)
self.proper_strings = ['PROPER|REPACK|REAL']
def getID(self):
return GenericProvider.makeID(self.name)
@staticmethod
def makeID(name):
return re.sub(r"[^\w\d_]", "_", name.strip().lower())
def imageName(self):
return self.getID() + '.png'
# pylint: disable=no-self-use,unused-variable
# Method could be a function, Unused variable
def _checkAuth(self):
return True
def _doLogin(self):
return True
def isActive(self):
return False
def isEnabled(self):
return self.enabled
def getResult(self, episodes):
"""
Returns a result of the correct type for this provider
"""
if self.providerType == GenericProvider.NZB:
result = classes.NZBSearchResult(episodes)
elif self.providerType == GenericProvider.TORRENT:
result = classes.TorrentSearchResult(episodes)
else:
result = classes.SearchResult(episodes)
result.provider = self
return result
def getURL(self, url, post_data=None, params=None, timeout=30, json=False, needBytes=False):
"""
By default this is just a simple urlopen call but this method should be overridden
for providers with special URL requirements (like cookies)
"""
return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout,
session=self.session, json=json, needBytes=needBytes)
def _makeURL(self, result):
urls = []
filename = u''
if result.url.startswith('magnet'):
try:
torrent_hash = re.findall(r'urn:btih:([\w]{32,40})', result.url)[0].upper()
try:
torrent_name = re.findall('dn=([^&]+)', result.url)[0]
except Exception:
torrent_name = 'NO_DOWNLOAD_NAME'
if len(torrent_hash) == 32:
torrent_hash = b16encode(b32decode(torrent_hash)).upper()
if not torrent_hash:
logger.log(u"Unable to extract torrent hash from magnet: " + ex(result.url), logger.ERROR)
return urls, filename
urls = [x.format(torrent_hash=torrent_hash, torrent_name=torrent_name) for x in self.btCacheURLS]
except Exception:
logger.log(u"Unable to extract torrent hash or name from magnet: " + ex(result.url), logger.ERROR)
return urls, filename
else:
urls = [result.url]
if self.providerType == GenericProvider.TORRENT:
filename = ek(os.path.join, sickbeard.TORRENT_DIR, sanitize_filename(result.name) + '.' + self.providerType)
elif self.providerType == GenericProvider.NZB:
filename = ek(os.path.join, sickbeard.NZB_DIR, sanitize_filename(result.name) + '.' + self.providerType)
return urls, filename
def downloadResult(self, result):
"""
Save the result to disk.
"""
# check for auth
if not self._doLogin():
return False
urls, filename = self._makeURL(result)
for url in urls:
if 'NO_DOWNLOAD_NAME' in url:
continue
if url.startswith('http'):
self.headers.update({'Referer': '/'.join(url.split('/')[:3]) + '/'})
logger.log(u"Downloading a result from " + self.name + " at " + url)
# Support for Jackett/TorzNab
if url.endswith(GenericProvider.TORRENT) and filename.endswith(GenericProvider.NZB):
filename = filename.rsplit('.', 1)[0] + '.' + GenericProvider.TORRENT
if helpers.download_file(url, filename, session=self.session, headers=self.headers):
if self._verify_download(filename):
logger.log(u"Saved result to " + filename, logger.INFO)
return True
else:
logger.log(u"Could not download %s" % url, logger.WARNING)
helpers.remove_file_failed(filename)
if len(urls):
logger.log(u"Failed to download any results", logger.WARNING)
return False
def _verify_download(self, file_name=None):
"""
Checks the saved file to see if it was actually valid, if not then consider the download a failure.
"""
# primitive verification of torrents, just make sure we didn't get a text file or something
if file_name.endswith(GenericProvider.TORRENT):
try:
parser = createParser(file_name)
if parser:
# pylint: disable=protected-access
# Access to a protected member of a client class
mime_type = parser._getMimeType()
try:
parser.stream._input.close()
except Exception:
pass
if mime_type == 'application/x-bittorrent':
return True
except Exception as e:
logger.log(u"Failed to validate torrent file: " + ex(e), logger.DEBUG)
logger.log(u"Result is not a valid torrent file", logger.DEBUG)
return False
return True
def searchRSS(self, episodes):
return self.cache.findNeededEpisodes(episodes)
def getQuality(self, item, anime=False):
"""
Figures out the quality of the given RSS item node
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
Returns a Quality value obtained from the node's data
"""
(title, url) = self._get_title_and_url(item)
quality = Quality.sceneQuality(title, anime)
return quality
# pylint: disable=no-self-use,unused-argument
def _doSearch(self, search_params, search_mode='eponly', epcount=0, age=0, epObj=None):
return []
def _get_season_search_strings(self, episode):
return []
def _get_episode_search_strings(self, eb_obj, add_string=''):
return []
def _get_title_and_url(self, item):
"""
Retrieves the title and URL data from the item XML node
item: An elementtree.ElementTree element representing the <item> tag of the RSS feed
Returns: A tuple containing two strings representing title and URL respectively
"""
title = item.get('title', '')
if title:
title = u'' + title.replace(' ', '.')
url = item.get('link', '')
if url:
url = url.replace('&', '&').replace('%26tr%3D', '&tr=')
return title, url
def _get_size(self, item):
"""Gets the size from the item"""
logger.log(u"Provider type doesn't have _get_size() implemented yet", logger.ERROR)
return -1
def findSearchResults(self, show, episodes, search_mode, manualSearch=False, downCurQuality=False):
self._checkAuth()
self.show = show
results = {}
itemList = []
searched_scene_season = None
for epObj in episodes:
# search cache for episode result
cacheResult = self.cache.searchCache(epObj, manualSearch, downCurQuality)
if cacheResult:
if epObj.episode not in results:
results[epObj.episode] = cacheResult
else:
results[epObj.episode].extend(cacheResult)
# found result, search next episode
continue
# skip if season already searched
if len(episodes) > 1 and search_mode == 'sponly' and searched_scene_season == epObj.scene_season:
continue
# mark season searched for season pack searches so we can skip later on
searched_scene_season = epObj.scene_season
search_strings = []
if len(episodes) > 1 and search_mode == 'sponly':
# get season search results
search_strings = self._get_season_search_strings(epObj)
elif search_mode == 'eponly':
# get single episode search results
search_strings = self._get_episode_search_strings(epObj)
first = search_strings and isinstance(search_strings[0], dict) and 'rid' in search_strings[0]
if first:
logger.log(u'First search_string has rid', logger.DEBUG)
for curString in search_strings:
itemList += self._doSearch(curString, search_mode, len(episodes), epObj=epObj)
if first:
first = False
if itemList:
logger.log(u'First search_string had rid, and returned results, skipping query by string', logger.DEBUG)
break
else:
logger.log(u'First search_string had rid, but returned no results, searching with string query', logger.DEBUG)
# if we found what we needed already from cache then return results and exit
if len(results) == len(episodes):
return results
# sort list by quality
if len(itemList):
items = {}
itemsUnknown = []
for item in itemList:
quality = self.getQuality(item, anime=show.is_anime)
if quality == Quality.UNKNOWN:
itemsUnknown += [item]
else:
if quality not in items:
items[quality] = [item]
else:
items[quality].append(item)
itemList = list(itertools.chain(*[v for (k, v) in sorted(items.iteritems(), reverse=True)]))
itemList += itemsUnknown if itemsUnknown else []
# filter results
cl = []
for item in itemList:
(title, url) = self._get_title_and_url(item)
# parse the file name
try:
myParser = NameParser(parse_method=('normal', 'anime')[show.is_anime])
parse_result = myParser.parse(title)
except InvalidNameException:
logger.log(u"Unable to parse the filename " + title + " into a valid episode", logger.DEBUG)
continue
except InvalidShowException:
logger.log(u"Unable to parse the filename " + title + " into a valid show", logger.DEBUG)
continue
showObj = parse_result.show
quality = parse_result.quality
release_group = parse_result.release_group
version = parse_result.version
addCacheEntry = False
if not (showObj.air_by_date or showObj.sports):
if search_mode == 'sponly':
if len(parse_result.episode_numbers):
logger.log(
u"This is supposed to be a season pack search but the result " + title + " is not a valid season pack, skipping it",
logger.DEBUG)
addCacheEntry = True
if len(parse_result.episode_numbers) and (parse_result.season_number not in set([ep.season for ep in episodes])
or not [ep for ep in episodes if ep.scene_episode in parse_result.episode_numbers]):
logger.log(
u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring",
logger.DEBUG)
addCacheEntry = True
else:
if not len(parse_result.episode_numbers) and parse_result.season_number and not [ep for ep in
episodes if
ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
logger.log(
u"The result " + title + " doesn't seem to be a valid season that we are trying to snatch, ignoring",
logger.DEBUG)
addCacheEntry = True
elif len(parse_result.episode_numbers) and not [ep for ep in episodes if
ep.season == parse_result.season_number and ep.episode in parse_result.episode_numbers]:
logger.log(
u"The result " + title + " doesn't seem to be a valid episode that we are trying to snatch, ignoring",
logger.DEBUG)
addCacheEntry = True
if not addCacheEntry:
# we just use the existing info for normal searches
actual_season = parse_result.season_number
actual_episodes = parse_result.episode_numbers
else:
sameDaySpecial = False
if not parse_result.is_air_by_date:
logger.log(
u"This is supposed to be a date search but the result " + title + " didn't parse as one, skipping it",
logger.DEBUG)
addCacheEntry = True
else:
airdate = parse_result.air_date.toordinal()
myDB = db.DBConnection()
sql_results = myDB.select(
"SELECT season, episode FROM tv_episodes WHERE showid = ? AND airdate = ?",
[showObj.indexerid, airdate])
if len(sql_results) == 2:
if int(sql_results[0]['season']) == 0 and int(sql_results[1]['season']) != 0:
actual_season = int(sql_results[1]["season"])
actual_episodes = [int(sql_results[1]["episode"])]
sameDaySpecial = True
elif int(sql_results[1]['season']) == 0 and int(sql_results[0]['season']) != 0:
actual_season = int(sql_results[0]["season"])
actual_episodes = [int(sql_results[0]["episode"])]
sameDaySpecial = True
elif len(sql_results) != 1:
logger.log(
u"Tried to look up the date for the episode " + title + " but the database didn't give proper results, skipping it",
logger.WARNING)
addCacheEntry = True
if not addCacheEntry and not sameDaySpecial:
actual_season = int(sql_results[0]["season"])
actual_episodes = [int(sql_results[0]["episode"])]
# add parsed result to cache for usage later on
if addCacheEntry:
logger.log(u"Adding item from search to cache: " + title, logger.DEBUG)
# pylint: disable=protected-access
# Access to a protected member of a client class
ci = self.cache._addCacheEntry(title, url, parse_result=parse_result)
if ci is not None:
cl.append(ci)
continue
# make sure we want the episode
wantEp = True
for epNo in actual_episodes:
if not showObj.wantEpisode(actual_season, epNo, quality, manualSearch, downCurQuality):
wantEp = False
break
if not wantEp:
logger.log(
u"Ignoring result " + title + " because we don't want an episode that is " +
Quality.qualityStrings[
quality], logger.INFO)
continue
logger.log(u"Found result " + title + " at " + url, logger.DEBUG)
# make a result object
epObj = []
for curEp in actual_episodes:
epObj.append(showObj.getEpisode(actual_season, curEp))
result = self.getResult(epObj)
result.show = showObj
result.url = url
result.name = title
result.quality = quality
result.release_group = release_group
result.version = version
result.content = None
result.size = self._get_size(item)
if len(epObj) == 1:
epNum = epObj[0].episode
logger.log(u"Single episode result.", logger.DEBUG)
elif len(epObj) > 1:
epNum = MULTI_EP_RESULT
logger.log(u"Separating multi-episode result to check for later - result contains episodes: " + str(
parse_result.episode_numbers), logger.DEBUG)
elif len(epObj) == 0:
epNum = SEASON_RESULT
logger.log(u"Separating full season result to check for later", logger.DEBUG)
if epNum not in results:
results[epNum] = [result]
else:
results[epNum].append(result)
# check if we have items to add to cache
if len(cl) > 0:
# pylint: disable=protected-access
# Access to a protected member of a client class
myDB = self.cache._getDB()
myDB.mass_action(cl)
return results
def findPropers(self, search_date=None):
results = self.cache.listPropers(search_date)
return [classes.Proper(x['name'], x['url'], datetime.datetime.fromtimestamp(x['time']), self.show) for x in
results]
def seedRatio(self):
'''
Provider should override this value if custom seed ratio enabled
It should return the value of the provider seed ratio
'''
return ''
class NZBProvider(GenericProvider):
def __init__(self, name):
GenericProvider.__init__(self, name)
self.providerType = GenericProvider.NZB
def isActive(self):
return sickbeard.USE_NZBS and self.isEnabled()
def _get_size(self, item):
try:
size = item.get('links')[1].get('length', -1)
except IndexError:
size = -1
if not size:
logger.log(u"Size was not found in your provider response", logger.DEBUG)
return int(size)
class TorrentProvider(GenericProvider):
def __init__(self, name):
GenericProvider.__init__(self, name)
self.providerType = GenericProvider.TORRENT
def isActive(self):
return sickbeard.USE_TORRENTS and self.isEnabled()
def _get_title_and_url(self, item):
from feedparser.util import FeedParserDict
if isinstance(item, (dict, FeedParserDict)):
title = item.get('title', '')
download_url = item.get('url', '')
if not download_url:
download_url = item.get('link', '')
elif isinstance(item, (list, tuple)) and len(item) > 1:
title = item[0]
download_url = item[1]
# Temp global block `DIAMOND` releases
if title.endswith('DIAMOND'):
logger.log(u'Skipping DIAMOND release for mass fake releases.')
title = download_url = u'FAKERELEASE'
if title:
title = self._clean_title_from_provider(title)
if download_url:
download_url = download_url.replace('&', '&')
return (title, download_url)
def _get_size(self, item):
size = -1
if isinstance(item, dict):
size = item.get('size', -1)
elif isinstance(item, (list, tuple)) and len(item) > 2:
size = item[2]
# Make sure we didn't select seeds/leechers by accident
if not size or size < 1024*1024:
size = -1
return size
def _get_season_search_strings(self, ep_obj):
search_string = {'Season': []}
for show_name in set(show_name_helpers.allPossibleShowNames(self.show)):
if ep_obj.show.air_by_date or ep_obj.show.sports:
ep_string = show_name + ' ' + str(ep_obj.airdate).split('-')[0]
elif ep_obj.show.anime:
ep_string = show_name + ' ' + "%d" % ep_obj.scene_absolute_number
else:
ep_string = show_name + ' S%02d' % int(ep_obj.scene_season) # 1) showName.SXX
search_string['Season'].append(ep_string.encode('utf-8').strip())
return [search_string]
def _get_episode_search_strings(self, ep_obj, add_string=''):
search_string = {'Episode': []}
if not ep_obj:
return []
for show_name in set(show_name_helpers.allPossibleShowNames(ep_obj.show)):
ep_string = show_name + ' '
if ep_obj.show.air_by_date:
ep_string += str(ep_obj.airdate).replace('-', ' ')
elif ep_obj.show.sports:
ep_string += str(ep_obj.airdate).replace('-', ' ') + ('|', ' ')[len(self.proper_strings) > 1] + ep_obj.airdate.strftime('%b')
elif ep_obj.show.anime:
ep_string += "%02d" % int(ep_obj.scene_absolute_number)
else:
ep_string += sickbeard.config.naming_ep_type[2] % {'seasonnumber': ep_obj.scene_season,
'episodenumber': ep_obj.scene_episode}
if add_string:
ep_string = ep_string + ' %s' % add_string
search_string['Episode'].append(ep_string.encode('utf-8').strip())
return [search_string]
@staticmethod
def _clean_title_from_provider(title):
return (title or '').replace(' ', '.')
@property
def _custom_trackers(self):
return ('', '&tr=' + '&tr='.join(set([x.strip() for x in sickbeard.TRACKERS_LIST.split(',') if x.strip()])))[self.public] if sickbeard.TRACKERS_LIST else ''
def findPropers(self, search_date=datetime.datetime.today()):
results = []
myDB = db.DBConnection()
sqlResults = myDB.select(
'SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.airdate FROM tv_episodes AS e' +
' INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id)' +
' WHERE e.airdate >= ' + str(search_date.toordinal()) +
' AND e.status IN (' + ','.join([str(x) for x in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST]) + ')'
)
for sqlshow in sqlResults or []:
show = Show.find(sickbeard.showList, int(sqlshow["showid"]))
if show:
curEp = show.getEpisode(int(sqlshow["season"]), int(sqlshow["episode"]))
for term in self.proper_strings:
searchString = self._get_episode_search_strings(curEp, add_string=term)
for item in self._doSearch(searchString[0]):
title, url = self._get_title_and_url(item)
results.append(classes.Proper(title, url, datetime.datetime.today(), show))
return results
| gpl-3.0 |
ressu/SickGear | lib/hachoir_parser/audio/midi.py | 90 | 7912 | """
Musical Instrument Digital Interface (MIDI) audio file parser.
Documentation:
- Standard MIDI File Format, Dustin Caldwell (downloaded on wotsit.org)
Author: Victor Stinner
Creation: 27 december 2006
"""
from lib.hachoir_parser import Parser
from lib.hachoir_core.field import (FieldSet, Bits, ParserError,
String, UInt32, UInt24, UInt16, UInt8, Enum, RawBytes)
from lib.hachoir_core.endian import BIG_ENDIAN
from lib.hachoir_core.text_handler import textHandler, hexadecimal
from lib.hachoir_core.tools import createDict, humanDurationNanosec
from lib.hachoir_parser.common.tracker import NOTE_NAME
MAX_FILESIZE = 10 * 1024 * 1024
class Integer(Bits):
def __init__(self, parent, name, description=None):
Bits.__init__(self, parent, name, 8, description)
stream = parent.stream
addr = self.absolute_address
value = 0
while True:
bits = stream.readBits(addr, 8, parent.endian)
value = (value << 7) + (bits & 127)
if not(bits & 128):
break
addr += 8
self._size += 8
if 32 < self._size:
raise ParserError("Integer size is bigger than 32-bit")
self.createValue = lambda: value
def parseNote(parser):
yield Enum(UInt8(parser, "note", "Note number"), NOTE_NAME)
yield UInt8(parser, "velocity")
def parseControl(parser):
yield UInt8(parser, "control", "Controller number")
yield UInt8(parser, "value", "New value")
def parsePatch(parser):
yield UInt8(parser, "program", "New program number")
def parseChannel(parser):
yield UInt8(parser, "channel", "Channel number")
def parsePitch(parser):
yield UInt8(parser, "bottom", "(least sig) 7 bits of value")
yield UInt8(parser, "top", "(most sig) 7 bits of value")
def parseText(parser, size):
yield String(parser, "text", size)
def formatTempo(field):
return humanDurationNanosec(field.value*1000)
def parseTempo(parser, size):
yield textHandler(UInt24(parser, "microsec_quarter", "Microseconds per quarter note"), formatTempo)
def parseTimeSignature(parser, size):
yield UInt8(parser, "numerator", "Numerator of time signature")
yield UInt8(parser, "denominator", "denominator of time signature 2=quarter 3=eighth, etc.")
yield UInt8(parser, "nb_tick", "Number of ticks in metronome click")
yield UInt8(parser, "nb_32nd_note", "Number of 32nd notes to the quarter note")
class Command(FieldSet):
COMMAND = {}
for channel in xrange(16):
COMMAND[0x80+channel] = ("Note off (channel %u)" % channel, parseNote)
COMMAND[0x90+channel] = ("Note on (channel %u)" % channel, parseNote)
COMMAND[0xA0+channel] = ("Key after-touch (channel %u)" % channel, parseNote)
COMMAND[0xB0+channel] = ("Control change (channel %u)" % channel, parseControl)
COMMAND[0xC0+channel] = ("Program (patch) change (channel %u)" % channel, parsePatch)
COMMAND[0xD0+channel] = ("Channel after-touch (channel %u)" % channel, parseChannel)
COMMAND[0xE0+channel] = ("Pitch wheel change (channel %u)" % channel, parsePitch)
COMMAND_DESC = createDict(COMMAND, 0)
COMMAND_PARSER = createDict(COMMAND, 1)
META_COMMAND_TEXT = 1
META_COMMAND_NAME = 3
META_COMMAND = {
0x00: ("Sets the track's sequence number", None),
0x01: ("Text event", parseText),
0x02: ("Copyright info", parseText),
0x03: ("Sequence or Track name", parseText),
0x04: ("Track instrument name", parseText),
0x05: ("Lyric", parseText),
0x06: ("Marker", parseText),
0x07: ("Cue point", parseText),
0x2F: ("End of the track", None),
0x51: ("Set tempo", parseTempo),
0x58: ("Time Signature", parseTimeSignature),
0x59: ("Key signature", None),
0x7F: ("Sequencer specific information", None),
}
META_COMMAND_DESC = createDict(META_COMMAND, 0)
META_COMMAND_PARSER = createDict(META_COMMAND, 1)
def createFields(self):
yield Integer(self, "time", "Delta time in ticks")
yield Enum(textHandler(UInt8(self, "command"), hexadecimal), self.COMMAND_DESC)
command = self["command"].value
if command == 0xFF:
yield Enum(textHandler(UInt8(self, "meta_command"), hexadecimal), self.META_COMMAND_DESC)
yield UInt8(self, "data_len")
size = self["data_len"].value
if size:
command = self["meta_command"].value
if command in self.META_COMMAND_PARSER:
parser = self.META_COMMAND_PARSER[command]
else:
parser = None
if parser:
for field in parser(self, size):
yield field
else:
yield RawBytes(self, "data", size)
else:
if command not in self.COMMAND_PARSER:
raise ParserError("Unknown command: %s" % self["command"].display)
parser = self.COMMAND_PARSER[command]
for field in parser(self):
yield field
def createDescription(self):
if "meta_command" in self:
return self["meta_command"].display
else:
return self["command"].display
class Track(FieldSet):
def __init__(self, *args):
FieldSet.__init__(self, *args)
self._size = (8 + self["size"].value) * 8
def createFields(self):
yield String(self, "marker", 4, "Track marker (MTrk)", charset="ASCII")
yield UInt32(self, "size")
if True:
while not self.eof:
yield Command(self, "command[]")
else:
size = self["size"].value
if size:
yield RawBytes(self, "raw", size)
def createDescription(self):
command = self["command[0]"]
if "meta_command" in command \
and command["meta_command"].value in (Command.META_COMMAND_TEXT, Command.META_COMMAND_NAME) \
and "text" in command:
return command["text"].value.strip("\r\n")
else:
return ""
class Header(FieldSet):
static_size = 10*8
FILE_FORMAT = {
0: "Single track",
1: "Multiple tracks, synchronous",
2: "Multiple tracks, asynchronous",
}
def createFields(self):
yield UInt32(self, "size")
yield Enum(UInt16(self, "file_format"), self.FILE_FORMAT)
yield UInt16(self, "nb_track")
yield UInt16(self, "delta_time", "Delta-time ticks per quarter note")
def createDescription(self):
return "%s; %s tracks" % (
self["file_format"].display, self["nb_track"].value)
class MidiFile(Parser):
MAGIC = "MThd"
PARSER_TAGS = {
"id": "midi",
"category": "audio",
"file_ext": ["mid", "midi"],
"mime": (u"audio/mime", ),
"magic": ((MAGIC, 0),),
"min_size": 64,
"description": "MIDI audio"
}
endian = BIG_ENDIAN
def validate(self):
if self.stream.readBytes(0, 4) != self.MAGIC:
return "Invalid signature"
if self["header/size"].value != 6:
return "Invalid header size"
return True
def createFields(self):
yield String(self, "signature", 4, r"MIDI signature (MThd)", charset="ASCII")
yield Header(self, "header")
while not self.eof:
yield Track(self, "track[]")
def createDescription(self):
return "MIDI audio: %s" % self["header"].description
def createContentSize(self):
count = self["/header/nb_track"].value - 1
start = self["track[%u]" % count].absolute_address
# Search "End of track" of last track
end = self.stream.searchBytes("\xff\x2f\x00", start, MAX_FILESIZE*8)
if end is not None:
return end + 3*8
return None
| gpl-3.0 |
uzumaxy/pyprimes | src/pyprimes/strategic.py | 3 | 5593 | # -*- coding: utf-8 -*-
## Part of the pyprimes.py package.
##
## Copyright © 2014 Steven D'Aprano.
## See the file __init__.py for the licence terms for this software.
"""The module implements various prime generating and testing functions using
the Strategy design pattern, allowing the caller to easily experiment with
different algorithms and implementations.
The functions in this module will take at least one mandatory argument,
usually named either ``strategy`` or ``prover``.
strategy:
The ``strategy`` argument is used to delegate to a prime generator.
It must be a function which takes no arguments and returns an
iterator that yields primes. (A generator function is a convenient
way to manage this.)
This module makes no check that the strategy function actually
yields prime numbers. It is the caller's responsibility to ensure
that is the case.
prover:
The ``prover`` argument is used to delegate to a primality testing
function. It must be a function which takes a single argument, an
integer, and returns one of the following flags:
0 or False Number is definitely nonprime.
1 or True Number is definitely prime.
2 Number is a probable prime or pseudoprime.
Any other result will raise TypeError or ValueError.
This module makes no check to confirm that the prover function
actually tests for primality. It is the caller's responsibility to
ensure that is the case.
"""
from __future__ import division
from pyprimes.compat23 import next
__all__ = ['is_prime', 'next_prime', 'prev_prime', 'primes',
'trial_division',
]
# === Primality testing ===
def is_prime(prover, n):
"""Perform a primality test on n using the given prover.
See the docstring for this module for specifications for
the ``prover`` function.
>>> import pyprimes.awful
>>> is_prime(pyprimes.awful.isprime, 103)
True
>>> is_prime(pyprimes.awful.isprime, 105)
False
"""
flag = prover(n)
if flag is True or flag is False:
return flag
# Check for actual ints, not subclasses. Gosh this takes me back to
# Python 1.5 days...
if type(flag) is int:
if flag in (0, 1, 2):
return flag
raise ValueError('prover returned invalid int flag %d' % flag)
raise TypeError('expected bool or int but prover returned %r' % flag)
def trial_division(strategy, n):
"""Perform a trial division primality test using the given strategy.
See this module's docstring for specifications for the ``strategy``
function.
This performs an exact but slow primality test using trial division
by dividing by primes only. It returns True if the argument is a
prime number, otherwise False.
>>> import pyprimes.awful
>>> trial_division(pyprimes.awful.primes0, 11)
True
>>> trial_division(pyprimes.awful.primes0, 12)
False
For large values of n, this may be slow or run out of memory.
"""
if n < 2:
return False
if n == 2:
return True
if n % 2 == 0:
return False
limit = n**0.5 # FIXME: should use exact isqrt
for divisor in strategy():
if divisor > limit: break
if n % divisor == 0: return False
return True
# === Prime generators ===
def primes(strategy, start=None, end=None):
"""Yield primes using the given strategy function.
See this module's docstring for specifications for the ``strategy``
function.
If the optional arguments ``start`` and ``end`` are given, they must be
either None or an integer. Only primes in the half-open range ``start``
(inclusive) to ``end`` (exclusive) are yielded. If ``start`` is None,
the range begins at the lowest prime (namely 2), if ``end`` is None,
the range has no upper limit.
>>> from pyprimes.awful import turner
>>> list(primes(turner, 6, 30))
[7, 11, 13, 17, 19, 23, 29]
"""
#return filter_between(gen(), start, end)
it = strategy()
p = next(it)
if start is not None:
# Drop the primes below start as fast as possible, then yield.
while p < start:
p = next(it)
assert start is None or p >= start
if end is not None:
while p < end:
yield p
p = next(it)
else:
while True:
yield p
p = next(it)
# Then yield until end.
def next_prime(prover, n):
"""Return the first prime number strictly greater than n.
See the docstring for this module for specifications for
the ``prover`` function.
>>> import pyprimes.awful
>>> next_prime(pyprimes.awful.isprime, 97)
101
"""
if n < 2:
return 2
# Advance to the next odd number.
if n % 2 == 0: n += 1
else: n += 2
assert n%2 == 1
while not is_prime(prover, n):
n += 2
return n
def prev_prime(prover, n):
"""Return the first prime number strictly less than n.
See the docstring for this module for specifications for
the ``prover`` function.
>>> import pyprimes.awful
>>> prev_prime(pyprimes.awful.isprime, 100)
97
If there are no primes less than n, raises ValueError.
"""
if n <= 2:
raise ValueError('There are no smaller primes than 2.')
# Retreat to the previous odd number.
if n % 2 == 1: n -= 2
else: n -= 1
assert n%2 == 1
while not is_prime(prover, n):
n -= 2
return n
| mit |
diefans/ferment | src/ferment/scripts.py | 1 | 2313 | import click
import docker
from wheezy.template.engine import Engine
from wheezy.template.ext.core import CoreExtension
from wheezy.template.ext.code import CodeExtension
from wheezy.template.loader import DictLoader
from . import templates
import logging
LOG = logging.getLogger(__name__)
LOG_LEVELS = {
"info": logging.INFO,
"warn": logging.WARN,
"debug": logging.DEBUG,
"error": logging.ERROR,
"critical": logging.CRITICAL,
}
class Context(dict):
def __init__(self, *args, **kwargs):
self.__dict__ = self
super(Context, self).__init__(*args, **kwargs)
class FermConfig(object):
def __init__(self, path):
self.path = path
template_dct = {
'docker': templates.docker,
}
engine = Engine(
loader=DictLoader(template_dct),
extensions=[
CoreExtension(),
CodeExtension()
]
)
self.templates = {
name: engine.get_template(name) for name in template_dct
}
def get_config(self, config):
return self.templates['docker'].render(config)
@click.group()
@click.option(
"--log-level",
type=click.Choice([k for k, v in sorted(LOG_LEVELS.items(), key=lambda x: x[1])]),
default="info",
help="Logging level.")
@click.pass_context
def run(ctx, log_level):
logging.basicConfig(level=LOG_LEVELS[log_level])
ctx.obj = Context()
@run.group("docker")
@click.option(
"api", "--docker", "-d",
type=click.Path(),
default="unix://var/run/docker.sock",
help="The docker api socket."
)
@click.option(
"--cidr", "-c", default="172.18.0.0/16",
help="Docker CIDR."
)
@click.option(
"--interface", "-i", default="docker0",
help="Docker interface."
)
@click.pass_context
def docker_grp(ctx, api, cidr, interface):
ctx.obj.client = docker.Client(base_url=api)
ctx.obj.cidr = cidr
ctx.obj.interface = interface
@docker_grp.command(name="config")
@click.pass_context
def docker_config(ctx):
ferm = FermConfig(None)
# get all containers
containers = ctx.obj.client.containers()
ctx.obj.containers = [
ctx.obj.client.inspect_container(container['Id'])
for container in containers
]
click.echo(ferm.get_config(ctx.obj))
| apache-2.0 |
tedsunnyday/SE-Server | server/lib/passlib/utils/pbkdf2.py | 23 | 14682 | """passlib.pbkdf2 - PBKDF2 support
this module is getting increasingly poorly named.
maybe rename to "kdf" since it's getting more key derivation functions added.
"""
#=============================================================================
# imports
#=============================================================================
# core
import hashlib
import logging; log = logging.getLogger(__name__)
import re
from struct import pack
from warnings import warn
# site
try:
from M2Crypto import EVP as _EVP
except ImportError:
_EVP = None
# pkg
from passlib.exc import PasslibRuntimeWarning, ExpectedTypeError
from passlib.utils import join_bytes, to_native_str, bytes_to_int, int_to_bytes, join_byte_values
from passlib.utils.compat import b, bytes, BytesIO, irange, callable, int_types
# local
__all__ = [
"get_prf",
"pbkdf1",
"pbkdf2",
]
#=============================================================================
# hash helpers
#=============================================================================
# known hash names
_nhn_formats = dict(hashlib=0, iana=1)
_nhn_hash_names = [
# (hashlib/ssl name, iana name or standin, ... other known aliases)
# hashes with official IANA-assigned names
# (as of 2012-03 - http://www.iana.org/assignments/hash-function-text-names)
("md2", "md2"),
("md5", "md5"),
("sha1", "sha-1"),
("sha224", "sha-224", "sha2-224"),
("sha256", "sha-256", "sha2-256"),
("sha384", "sha-384", "sha2-384"),
("sha512", "sha-512", "sha2-512"),
# hashlib/ssl-supported hashes without official IANA names,
# hopefully compatible stand-ins have been chosen.
("md4", "md4"),
("sha", "sha-0", "sha0"),
("ripemd", "ripemd"),
("ripemd160", "ripemd-160"),
]
# cache for norm_hash_name()
_nhn_cache = {}
def norm_hash_name(name, format="hashlib"):
"""Normalize hash function name
:arg name:
Original hash function name.
This name can be a Python :mod:`~hashlib` digest name,
a SCRAM mechanism name, IANA assigned hash name, etc.
Case is ignored, and underscores are converted to hyphens.
:param format:
Naming convention to normalize to.
Possible values are:
* ``"hashlib"`` (the default) - normalizes name to be compatible
with Python's :mod:`!hashlib`.
* ``"iana"`` - normalizes name to IANA-assigned hash function name.
for hashes which IANA hasn't assigned a name for, issues a warning,
and then uses a heuristic to give a "best guess".
:returns:
Hash name, returned as native :class:`!str`.
"""
# check cache
try:
idx = _nhn_formats[format]
except KeyError:
raise ValueError("unknown format: %r" % (format,))
try:
return _nhn_cache[name][idx]
except KeyError:
pass
orig = name
# normalize input
if not isinstance(name, str):
name = to_native_str(name, 'utf-8', 'hash name')
name = re.sub("[_ /]", "-", name.strip().lower())
if name.startswith("scram-"):
name = name[6:]
if name.endswith("-plus"):
name = name[:-5]
# look through standard names and known aliases
def check_table(name):
for row in _nhn_hash_names:
if name in row:
_nhn_cache[orig] = row
return row[idx]
result = check_table(name)
if result:
return result
# try to clean name up, and recheck table
m = re.match("^(?P<name>[a-z]+)-?(?P<rev>\d)?-?(?P<size>\d{3,4})?$", name)
if m:
name, rev, size = m.group("name", "rev", "size")
if rev:
name += rev
if size:
name += "-" + size
result = check_table(name)
if result:
return result
# else we've done what we can
warn("norm_hash_name(): unknown hash: %r" % (orig,), PasslibRuntimeWarning)
name2 = name.replace("-", "")
row = _nhn_cache[orig] = (name2, name)
return row[idx]
# TODO: get_hash() func which wraps norm_hash_name(), hashlib.<attr>, and hashlib.new
#=============================================================================
# general prf lookup
#=============================================================================
_BNULL = b('\x00')
_XY_DIGEST = b(',\x1cb\xe0H\xa5\x82M\xfb>\xd6\x98\xef\x8e\xf9oQ\x85\xa3i')
_trans_5C = join_byte_values((x ^ 0x5C) for x in irange(256))
_trans_36 = join_byte_values((x ^ 0x36) for x in irange(256))
def _get_hmac_prf(digest):
"helper to return HMAC prf for specific digest"
def tag_wrapper(prf):
prf.__name__ = "hmac_" + digest
prf.__doc__ = ("hmac_%s(key, msg) -> digest;"
" generated by passlib.utils.pbkdf2.get_prf()" %
digest)
if _EVP and digest == "sha1":
# use m2crypto function directly for sha1, since that's it's default digest
try:
result = _EVP.hmac(b('x'),b('y'))
except ValueError: # pragma: no cover
pass
else:
if result == _XY_DIGEST:
return _EVP.hmac, 20
# don't expect to ever get here, but will fall back to pure-python if we do.
warn("M2Crypto.EVP.HMAC() returned unexpected result " # pragma: no cover -- sanity check
"during Passlib self-test!", PasslibRuntimeWarning)
elif _EVP:
# use m2crypto if it's present and supports requested digest
try:
result = _EVP.hmac(b('x'), b('y'), digest)
except ValueError:
pass
else:
# it does. so use M2Crypto's hmac & digest code
hmac_const = _EVP.hmac
def prf(key, msg):
return hmac_const(key, msg, digest)
digest_size = len(result)
tag_wrapper(prf)
return prf, digest_size
# fall back to hashlib-based implementation
digest_const = getattr(hashlib, digest, None)
if not digest_const:
raise ValueError("unknown hash algorithm: %r" % (digest,))
tmp = digest_const()
block_size = tmp.block_size
assert block_size >= 16, "unacceptably low block size"
digest_size = tmp.digest_size
del tmp
def prf(key, msg):
# simplified version of stdlib's hmac module
if len(key) > block_size:
key = digest_const(key).digest()
key += _BNULL * (block_size - len(key))
tmp = digest_const(key.translate(_trans_36) + msg).digest()
return digest_const(key.translate(_trans_5C) + tmp).digest()
tag_wrapper(prf)
return prf, digest_size
# cache mapping prf name/func -> (func, digest_size)
_prf_cache = {}
def _clear_prf_cache():
"helper for unit tests"
_prf_cache.clear()
def get_prf(name):
"""lookup pseudo-random family (prf) by name.
:arg name:
this must be the name of a recognized prf.
currently this only recognizes names with the format
:samp:`hmac-{digest}`, where :samp:`{digest}`
is the name of a hash function such as
``md5``, ``sha256``, etc.
this can also be a callable with the signature
``prf(secret, message) -> digest``,
in which case it will be returned unchanged.
:raises ValueError: if the name is not known
:raises TypeError: if the name is not a callable or string
:returns:
a tuple of :samp:`({func}, {digest_size})`.
* :samp:`{func}` is a function implementing
the specified prf, and has the signature
``func(secret, message) -> digest``.
* :samp:`{digest_size}` is an integer indicating
the number of bytes the function returns.
usage example::
>>> from passlib.utils.pbkdf2 import get_prf
>>> hmac_sha256, dsize = get_prf("hmac-sha256")
>>> hmac_sha256
<function hmac_sha256 at 0x1e37c80>
>>> dsize
32
>>> digest = hmac_sha256('password', 'message')
this function will attempt to return the fastest implementation
it can find; if M2Crypto is present, and supports the specified prf,
:func:`M2Crypto.EVP.hmac` will be used behind the scenes.
"""
global _prf_cache
if name in _prf_cache:
return _prf_cache[name]
if isinstance(name, str):
if name.startswith("hmac-") or name.startswith("hmac_"):
retval = _get_hmac_prf(name[5:])
else:
raise ValueError("unknown prf algorithm: %r" % (name,))
elif callable(name):
# assume it's a callable, use it directly
digest_size = len(name(b('x'),b('y')))
retval = (name, digest_size)
else:
raise ExpectedTypeError(name, "str or callable", "prf name")
_prf_cache[name] = retval
return retval
#=============================================================================
# pbkdf1 support
#=============================================================================
def pbkdf1(secret, salt, rounds, keylen=None, hash="sha1"):
"""pkcs#5 password-based key derivation v1.5
:arg secret: passphrase to use to generate key
:arg salt: salt string to use when generating key
:param rounds: number of rounds to use to generate key
:arg keylen: number of bytes to generate (if ``None``, uses digest's native size)
:param hash:
hash function to use. must be name of a hash recognized by hashlib.
:returns:
raw bytes of generated key
.. note::
This algorithm has been deprecated, new code should use PBKDF2.
Among other limitations, ``keylen`` cannot be larger
than the digest size of the specified hash.
"""
# validate secret & salt
if not isinstance(secret, bytes):
raise ExpectedTypeError(secret, "bytes", "secret")
if not isinstance(salt, bytes):
raise ExpectedTypeError(salt, "bytes", "salt")
# validate rounds
if not isinstance(rounds, int_types):
raise ExpectedTypeError(rounds, "int", "rounds")
if rounds < 1:
raise ValueError("rounds must be at least 1")
# resolve hash
try:
hash_const = getattr(hashlib, hash)
except AttributeError:
# check for ssl hash
# NOTE: if hash unknown, new() will throw ValueError, which we'd just
# reraise anyways; so instead of checking, we just let it get
# thrown during first use, below
# TODO: use builtin md4 class if hashlib doesn't have it.
def hash_const(msg):
return hashlib.new(hash, msg)
# prime pbkdf1 loop, get block size
block = hash_const(secret + salt).digest()
# validate keylen
if keylen is None:
keylen = len(block)
elif not isinstance(keylen, int_types):
raise ExpectedTypeError(keylen, "int or None", "keylen")
elif keylen < 0:
raise ValueError("keylen must be at least 0")
elif keylen > len(block):
raise ValueError("keylength too large for digest: %r > %r" %
(keylen, len(block)))
# main pbkdf1 loop
for _ in irange(rounds-1):
block = hash_const(block).digest()
return block[:keylen]
#=============================================================================
# pbkdf2
#=============================================================================
MAX_BLOCKS = 0xffffffff # 2**32-1
MAX_HMAC_SHA1_KEYLEN = MAX_BLOCKS*20
# NOTE: the pbkdf2 spec does not specify a maximum number of rounds.
# however, many of the hashes in passlib are currently clamped
# at the 32-bit limit, just for sanity. once realistic pbkdf2 rounds
# start approaching 24 bits, this limit will be raised.
def pbkdf2(secret, salt, rounds, keylen=None, prf="hmac-sha1"):
"""pkcs#5 password-based key derivation v2.0
:arg secret: passphrase to use to generate key
:arg salt: salt string to use when generating key
:param rounds: number of rounds to use to generate key
:arg keylen:
number of bytes to generate.
if set to ``None``, will use digest size of selected prf.
:param prf:
psuedo-random family to use for key strengthening.
this can be any string or callable accepted by :func:`get_prf`.
this defaults to ``"hmac-sha1"`` (the only prf explicitly listed in
the PBKDF2 specification)
:returns:
raw bytes of generated key
"""
# validate secret & salt
if not isinstance(secret, bytes):
raise ExpectedTypeError(secret, "bytes", "secret")
if not isinstance(salt, bytes):
raise ExpectedTypeError(salt, "bytes", "salt")
# validate rounds
if not isinstance(rounds, int_types):
raise ExpectedTypeError(rounds, "int", "rounds")
if rounds < 1:
raise ValueError("rounds must be at least 1")
# validate keylen
if keylen is not None:
if not isinstance(keylen, int_types):
raise ExpectedTypeError(keylen, "int or None", "keylen")
elif keylen < 0:
raise ValueError("keylen must be at least 0")
# special case for m2crypto + hmac-sha1
if prf == "hmac-sha1" and _EVP:
if keylen is None:
keylen = 20
# NOTE: doing check here, because M2crypto won't take 'long' instances
# (which this is when running under 32bit)
if keylen > MAX_HMAC_SHA1_KEYLEN:
raise ValueError("key length too long for digest")
# NOTE: as of 2012-4-4, m2crypto has buffer overflow issue
# which may cause segfaults if keylen > 32 (EVP_MAX_KEY_LENGTH).
# therefore we're avoiding m2crypto for large keys until that's fixed.
# see https://bugzilla.osafoundation.org/show_bug.cgi?id=13052
if keylen < 32:
return _EVP.pbkdf2(secret, salt, rounds, keylen)
# resolve prf
prf_func, digest_size = get_prf(prf)
if keylen is None:
keylen = digest_size
# figure out how many blocks we'll need
block_count = (keylen+digest_size-1)//digest_size
if block_count >= MAX_BLOCKS:
raise ValueError("key length too long for digest")
# build up result from blocks
def gen():
for i in irange(block_count):
digest = prf_func(secret, salt + pack(">L", i+1))
accum = bytes_to_int(digest)
for _ in irange(rounds-1):
digest = prf_func(secret, digest)
accum ^= bytes_to_int(digest)
yield int_to_bytes(accum, digest_size)
return join_bytes(gen())[:keylen]
#=============================================================================
# eof
#=============================================================================
| apache-2.0 |
eclee25/flu-SDI-exploratory-age | scripts/create_fluseverity_figs/export_zOR_classif.py | 1 | 10068 | #!/usr/bin/python
##############################################
###Python template
###Author: Elizabeth Lee
###Date: 6/18/14
###Function: Export zOR retrospective and early warning classifications into csv file format (SDI and ILINet, national and regional for SDI)
### Use nation-level peak-based retrospective classification for SDI region analysis
###Import data: R_export/OR_zip3_week_outpatient_cl.csv, R_export/allpopstat_zip3_season_cl.csv
#### These data were cleaned with data_extraction/clean_OR_hhsreg_week_outpatient.R and exported with OR_zip3_week.sql
#### allpopstat_zip3_season_cl.csv includes child, adult, and other populations; popstat_zip3_season_cl.csv includes only child and adult populations
###Command Line: python export_zOR_classif.py
##############################################
### notes ###
# Incidence per 100,000 is normalized by total population by second calendar year of the flu season
### packages/modules ###
import csv
## local modules ##
import functions as fxn
### data structures ###
### called/local plotting parameters ###
nw = fxn.gp_normweeks # number of normalization weeks in baseline period
### functions ###
def print_dict_to_file(dic, filename):
with open(filename, 'w+') as fwriter:
fwriter.write("season,mn_retro,mn_early\n")
for key, value in dic.items():
fwriter.write("%s,%s,%s\n" % (key, value[0], value[1]))
def print_dict_to_file2(dic, filename):
with open(filename, 'w+') as fwriter:
fwriter.write("season,region,mn_retro,mn_early\n")
for key, value in dic.items():
fwriter.write("%s,%s,%s,%s\n" % (key[0], key[1], value[0], value[1]))
def print_dict_to_file3(dic, filename):
with open(filename, 'w+') as fwriter:
fwriter.write('season,state,mn_retro,mn_early\n')
for key, value in dic.items():
fwriter.write("%s,%s,%s,%s\n" % (key[0], key[1], value[0], value[1]))
##############################################
# SDI NATIONAL
# national files
incidin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/SQL_export/OR_allweeks_outpatient.csv','r')
incid = csv.reader(incidin, delimiter=',')
popin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/SQL_export/totalpop_age.csv', 'r')
pop = csv.reader(popin, delimiter=',')
thanksin=open('/home/elee/Dropbox/My_Bansal_Lab/Clean_Data_for_Import/ThanksgivingWeekData_cl.csv', 'r')
thanksin.readline() # remove header
thanks=csv.reader(thanksin, delimiter=',')
# dict_wk[week] = seasonnum, dict_incid[week] = ILI cases per 10,000 in US population in second calendar year of flu season, dict_OR[week] = OR
d_wk, d_incid, d_OR = fxn.week_OR_processing(incid, pop)
d_zOR = fxn.week_zOR_processing(d_wk, d_OR)
# d_incid53ls[seasonnum] = [ILI wk 40 per 100000, ILI wk 41 per 100000,...], d_OR53ls[seasonnum] = [OR wk 40, OR wk 41, ...], d_zOR53ls[seasonnum] = [zOR wk 40, zOR wk 41, ...]
d_incid53ls, d_OR53ls, d_zOR53ls = fxn.week_plotting_dicts(d_wk, d_incid, d_OR, d_zOR)
# d_classifzOR[seasonnum] = (mean retrospective zOR, mean early warning zOR)
d_classifzOR = fxn.classif_zOR_processing(d_wk, d_incid53ls, d_zOR53ls, thanks)
# ##############################################
# # ILINet NATIONAL
# # national files
# incidin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/CDC_Source/Import_Data/all_cdc_source_data.csv','r')
# incidin.readline() # remove header
# incid = csv.reader(incidin, delimiter=',')
# popin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/Census/Import_Data/totalpop_age_Census_98-14.csv', 'r')
# pop = csv.reader(popin, delimiter=',')
# thanksin=open('/home/elee/Dropbox/My_Bansal_Lab/Clean_Data_for_Import/ThanksgivingWeekData_cl.csv', 'r')
# thanksin.readline() # remove header
# thanks=csv.reader(thanksin, delimiter=',')
# # dict_wk[week] = seasonnum, dict_incid[week] = ILI cases per 10,000 in US population in second calendar year of flu season, dict_OR[week] = OR
# d_wk, d_incid, d_OR = fxn.ILINet_week_OR_processing(incid, pop)
# d_zOR = fxn.week_zOR_processing(d_wk, d_OR)
# # d_incid53ls[seasonnum] = [ILI wk 40 per 100000, ILI wk 41 per 100000,...], d_OR53ls[seasonnum] = [OR wk 40, OR wk 41, ...], d_zOR53ls[seasonnum] = [zOR wk 40, zOR wk 41, ...]
# d_incid53ls, d_OR53ls, d_zOR53ls = fxn.week_plotting_dicts(d_wk, d_incid, d_OR, d_zOR)
# # d_ILINet_classifzOR[seasonnum] = (mean retrospective zOR, mean early warning zOR)
# d_ILINet_classifzOR = fxn.classif_zOR_processing(d_wk, d_incid53ls, d_zOR53ls, thanks)
##############################################
# SDI REGION: nation-level peak-basesd retrospective classification
# regional files
reg_incidin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/R_export/OR_zip3_week_outpatient_cl.csv', 'r')
reg_incidin.readline()
regincid = csv.reader(reg_incidin, delimiter=',')
reg_popin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/R_export/allpopstat_zip3_season_cl.csv','r')
reg_popin.readline()
regpop = csv.reader(reg_popin, delimiter=',')
# national files
incidin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/SQL_export/OR_allweeks_outpatient.csv','r')
incid = csv.reader(incidin, delimiter=',')
popin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/SQL_export/totalpop_age.csv', 'r')
pop = csv.reader(popin, delimiter=',')
thanksin=open('/home/elee/Dropbox/My_Bansal_Lab/Clean_Data_for_Import/ThanksgivingWeekData_cl.csv', 'r')
thanksin.readline() # remove header
thanks=csv.reader(thanksin, delimiter=',')
# dict_wk[week] = seasonnum, dict_incid[week] = ILI cases per 10,000 in US population in second calendar year of flu season, dict_OR[week] = OR
d_wk, d_incid, d_OR = fxn.week_OR_processing(incid, pop)
d_zOR = fxn.week_zOR_processing(d_wk, d_OR)
# d_incid53ls[seasonnum] = [ILI wk 40 per 100000, ILI wk 41 per 100000,...], d_OR53ls[seasonnum] = [OR wk 40, OR wk 41, ...], d_zOR53ls[seasonnum] = [zOR wk 40, zOR wk 41, ...]
d_incid53ls, d_OR53ls, d_zOR53ls = fxn.week_plotting_dicts(d_wk, d_incid, d_OR, d_zOR)
_, d_zip3_reg, d_incid_reg, d_OR_reg = fxn.week_OR_processing_region(regincid, regpop)
# dict_zOR_reg[(week, hhsreg)] = zOR
d_zOR_reg = fxn.week_zOR_processing_region(d_wk, d_OR_reg)
# dict_incid53ls_reg[(seasonnum, region)] = [ILI wk 40, ILI wk 41,...], dict_OR53ls_reg[(seasonnum, region)] = [OR wk 40, OR wk 41, ...], dict_zOR53ls_reg[(seasonnum, region)] = [zOR wk 40, zOR wk 41, ...]
d_incid53ls_reg, d_OR53ls_reg, d_zOR53ls_reg = fxn.week_plotting_dicts_region(d_wk, d_incid_reg, d_OR_reg, d_zOR_reg)
# dict_classifindex[seasonnum] = (index of first retro period week, index of first early warning period week)
d_classifindex = fxn.classif_zOR_index(d_wk, d_incid53ls, d_incid53ls_reg, 'region', thanks)
# d_classifzOR_reg[(seasonnum, region)] = (mean retrospective zOR, mean early warning zOR)
d_classifzOR_reg = fxn.classif_zOR_region_processing(d_classifindex, d_wk, d_zOR53ls_reg)
##############################################
# SDI STATE: nation-level peak-basesd retrospective classification
# import same files as regional files
reg_incidin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/R_export/OR_zip3_week_outpatient_cl.csv', 'r')
reg_incidin.readline()
regincid = csv.reader(reg_incidin, delimiter=',')
reg_popin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/R_export/allpopstat_zip3_season_cl.csv','r')
reg_popin.readline()
regpop = csv.reader(reg_popin, delimiter=',')
# national files
incidin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/SQL_export/OR_allweeks_outpatient.csv','r')
incid = csv.reader(incidin, delimiter=',')
popin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/SQL_export/totalpop_age.csv', 'r')
pop = csv.reader(popin, delimiter=',')
thanksin=open('/home/elee/Dropbox/My_Bansal_Lab/Clean_Data_for_Import/ThanksgivingWeekData_cl.csv', 'r')
thanksin.readline() # remove header
thanks=csv.reader(thanksin, delimiter=',')
# dict_wk[week] = seasonnum, dict_incid[week] = ILI cases per 10,000 in US population in second calendar year of flu season, dict_OR[week] = OR
d_wk, d_incid, d_OR = fxn.week_OR_processing(incid, pop)
d_zOR = fxn.week_zOR_processing(d_wk, d_OR)
# d_incid53ls[seasonnum] = [ILI wk 40 per 100000, ILI wk 41 per 100000,...], d_OR53ls[seasonnum] = [OR wk 40, OR wk 41, ...], d_zOR53ls[seasonnum] = [zOR wk 40, zOR wk 41, ...]
d_incid53ls, d_OR53ls, d_zOR53ls = fxn.week_plotting_dicts(d_wk, d_incid, d_OR, d_zOR)
_, d_zip3_reg, d_incid_state, d_OR_state = fxn.week_OR_processing_state(regincid, regpop)
# dict_zOR_state[(week, state)] = zOR
d_zOR_state = fxn.week_zOR_processing_state(d_wk, d_OR_state)
# dict_incid53ls_state[(seasonnum, state)] = [ILI wk 40, ILI wk 41,...], dict_OR53ls_reg[(seasonnum, state)] = [OR wk 40, OR wk 41, ...], dict_zOR53ls_state[(seasonnum, state)] = [zOR wk 40, zOR wk 41, ...]
d_incid53ls_state, d_OR53ls_state, d_zOR53ls_state = fxn.week_plotting_dicts_state(d_wk, d_incid_state, d_OR_state, d_zOR_state)
# dict_classifindex[seasonnum] = (index of first retro period week, index of first early warning period week)
d_classifindex = fxn.classif_zOR_index_state(d_wk, d_incid53ls, d_incid53ls_state, 'state', thanks)
# d_classifzOR_state[(seasonnum, state)] = (mean retrospective zOR, mean early warning zOR)
d_classifzOR_state = fxn.classif_zOR_state_processing(d_classifindex, d_wk, d_zOR53ls_state)
##############################################
print d_classifzOR
print d_classifzOR_reg
# fn1 = '/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/Py_export/SDI_national_classifications_%s.csv' %(nw)
# print_dict_to_file(d_classifzOR, fn1)
# fn2 = '/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/Py_export/ILINet_national_classifications_%s.csv' %(nw)
# print_dict_to_file(d_ILINet_classifzOR, fn2)
fn3 = '/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/Py_export/SDI_regional_classifications_%sreg.csv' %(nw)
print_dict_to_file2(d_classifzOR_reg, fn3)
fn4 = '/home/elee/Dropbox/Elizabeth_Bansal_Lab/SDI_Data/explore/Py_export/SDI_state_classifications_%sst.csv' %(nw)
print_dict_to_file3(d_classifzOR_state, fn4) | mit |
kamenim/samba-old | python/samba/descriptor.py | 36 | 27268 |
# Unix SMB/CIFS implementation.
# backend code for provisioning a Samba4 server
# Copyright (C) Jelmer Vernooij <jelmer@samba.org> 2007-2010
# Copyright (C) Andrew Bartlett <abartlet@samba.org> 2008-2009
# Copyright (C) Oliver Liebel <oliver@itc.li> 2008-2009
# Copyright (C) Amitay Isaacs <amitay@samba.org> 2011
#
# Based on the original in EJS:
# Copyright (C) Andrew Tridgell <tridge@samba.org> 2005
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Functions for setting up a Samba configuration (security descriptors)."""
from samba.dcerpc import security
from samba.ndr import ndr_pack
from samba.schema import get_schema_descriptor
import ldb
import re
# Descriptors of naming contexts and other important objects
def sddl2binary(sddl_in, domain_sid, name_map):
sddl = "%s" % sddl_in
for [name, sid] in name_map.items():
sddl = sddl.replace(name, sid)
sec = security.descriptor.from_sddl(sddl, domain_sid)
return ndr_pack(sec)
def get_empty_descriptor(domain_sid, name_map={}):
sddl= ""
return sddl2binary(sddl, domain_sid, name_map)
# "get_schema_descriptor" is located in "schema.py"
def get_config_descriptor(domain_sid, name_map={}):
sddl = "O:EAG:EAD:(OA;;CR;1131f6aa-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;1131f6ab-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;1131f6ac-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;1131f6aa-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;1131f6ab-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;1131f6ac-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(A;;RPLCLORC;;;AU)(A;CI;RPWPCRCCDCLCLORCWOWDSDDTSW;;;EA)" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)(A;CIIO;RPWPCRCCLCLORCWOWDSDSW;;;DA)" \
"(OA;;CR;1131f6ad-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;89e95b76-444d-4c62-991a-0facbeda640c;;ED)" \
"(OA;;CR;1131f6ad-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;89e95b76-444d-4c62-991a-0facbeda640c;;BA)" \
"(OA;;CR;1131f6aa-9c07-11d1-f79f-00c04fc2dcd2;;ER)" \
"S:(AU;SA;WPWOWD;;;WD)(AU;SA;CR;;;BA)(AU;SA;CR;;;DU)" \
"(OU;SA;CR;45ec5156-db7e-47bb-b53f-dbeb2d03c40f;;WD)"
return sddl2binary(sddl, domain_sid, name_map)
def get_config_partitions_descriptor(domain_sid, name_map={}):
sddl = "D:" \
"(A;;LCLORC;;;AU)" \
"(OA;;RP;e48d0154-bcf8-11d1-8702-00c04fb96050;;AU)" \
"(OA;;RP;d31a8757-2447-4545-8081-3bb610cacbf2;;AU)" \
"(OA;;RP;66171887-8f3c-11d0-afda-00c04fd930c9;;AU)" \
"(OA;;RP;032160bf-9824-11d1-aec0-0000f80367c1;;AU)" \
"(OA;;RP;789ee1eb-8c8e-4e4c-8cec-79b31b7617b5;;AU)" \
"(OA;;RP;5706aeaf-b940-4fb2-bcfc-5268683ad9fe;;AU)" \
"(A;;RPWPCRCCLCLORCWOWDSW;;;EA)" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)" \
"(A;;CC;;;ED)" \
"(OA;CIIO;WP;3df793df-9858-4417-a701-735a1ecebf74;bf967a8d-0de6-11d0-a285-00aa003049e2;BA)" \
"S:" \
"(AU;CISA;WPCRCCDCWOWDSDDT;;;WD)"
return sddl2binary(sddl, domain_sid, name_map)
def get_config_sites_descriptor(domain_sid, name_map={}):
sddl = "D:" \
"(A;;RPLCLORC;;;AU)" \
"(OA;CIIO;SW;d31a8757-2447-4545-8081-3bb610cacbf2;f0f8ffab-1191-11d0-a060-00aa006c33ed;ER)" \
"(A;;RPWPCRCCLCLORCWOWDSW;;;EA)" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)" \
"S:" \
"(AU;CISA;CCDCSDDT;;;WD)" \
"(OU;CIIOSA;CR;;f0f8ffab-1191-11d0-a060-00aa006c33ed;WD)" \
"(OU;CIIOSA;WP;f30e3bbe-9ff0-11d1-b603-0000f80367c1;bf967ab3-0de6-11d0-a285-00aa003049e2;WD)" \
"(OU;CIIOSA;WP;f30e3bbf-9ff0-11d1-b603-0000f80367c1;bf967ab3-0de6-11d0-a285-00aa003049e2;WD)" \
"(OU;CIIOSA;WP;3e10944c-c354-11d0-aff8-0000f80367c1;b7b13124-b82e-11d0-afee-0000f80367c1;WD)"
return sddl2binary(sddl, domain_sid, name_map)
def get_config_ntds_quotas_descriptor(domain_sid, name_map={}):
sddl = "D:" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;EA)" \
"(A;;RPLCLORC;;;BA)" \
"(OA;;CR;4ecc03fe-ffc0-4947-b630-eb672a8a9dbc;;WD)"
return sddl2binary(sddl, domain_sid, name_map)
def get_config_delete_protected1_descriptor(domain_sid, name_map={}):
sddl = "D:AI" \
"(A;;RPLCLORC;;;AU)" \
"(A;;RPWPCRCCLCLORCWOWDSW;;;EA)" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)"
return sddl2binary(sddl, domain_sid, name_map)
def get_config_delete_protected1wd_descriptor(domain_sid, name_map={}):
sddl = "D:AI" \
"(A;;RPLCLORC;;;WD)" \
"(A;;RPWPCRCCLCLORCWOWDSW;;;EA)" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)"
return sddl2binary(sddl, domain_sid, name_map)
def get_config_delete_protected2_descriptor(domain_sid, name_map={}):
sddl = "D:AI" \
"(A;;RPLCLORC;;;AU)" \
"(A;;RPWPCRCCDCLCLORCWOWDSW;;;EA)" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)"
return sddl2binary(sddl, domain_sid, name_map)
def get_domain_descriptor(domain_sid, name_map={}):
sddl= "O:BAG:BAD:AI(OA;CIIO;RP;4c164200-20c0-11d0-a768-00aa006e0529;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RP;4c164200-20c0-11d0-a768-00aa006e0529;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;CIIO;RP;5f202010-79a5-11d0-9020-00c04fc2d4cf;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RP;5f202010-79a5-11d0-9020-00c04fc2d4cf;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;CIIO;RP;bc0ac240-79a9-11d0-9020-00c04fc2d4cf;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RP;bc0ac240-79a9-11d0-9020-00c04fc2d4cf;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;CIIO;RP;59ba2f42-79a2-11d0-9020-00c04fc2d3cf;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RP;59ba2f42-79a2-11d0-9020-00c04fc2d3cf;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;CIIO;RP;037088f8-0ae1-11d2-b422-00a0c968f939;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RP;037088f8-0ae1-11d2-b422-00a0c968f939;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;;CR;1131f6aa-9c07-11d1-f79f-00c04fc2dcd2;;ER)" \
"(OA;;CR;1131f6ad-9c07-11d1-f79f-00c04fc2dcd2;;DD)" \
"(OA;CIIO;RP;b7c69e6d-2cc7-11d2-854e-00a0c983f608;bf967a86-0de6-11d0-a285-00aa003049e2;ED)" \
"(OA;CIIO;RP;b7c69e6d-2cc7-11d2-854e-00a0c983f608;bf967a9c-0de6-11d0-a285-00aa003049e2;ED)" \
"(OA;CIIO;RP;b7c69e6d-2cc7-11d2-854e-00a0c983f608;bf967aba-0de6-11d0-a285-00aa003049e2;ED)" \
"(OA;;CR;89e95b76-444d-4c62-991a-0facbeda640c;;BA)" \
"(OA;;CR;1131f6aa-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;1131f6ab-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;1131f6ac-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;1131f6ad-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;1131f6ae-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;e2a36dc9-ae17-47c3-b58b-be34c55ba633;;IF)" \
"(OA;;RP;c7407360-20bf-11d0-a768-00aa006e0529;;RU)" \
"(OA;;RP;b8119fd0-04f6-4762-ab7a-4986c76b3f9a;;RU)" \
"(OA;CIIO;RPLCLORC;;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RPLCLORC;;bf967a9c-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;CIIO;RPLCLORC;;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;;CR;05c74c5e-4deb-43b4-bd9f-86664c2a7fd5;;AU)" \
"(OA;;CR;89e95b76-444d-4c62-991a-0facbeda640c;;ED)" \
"(OA;;CR;ccc2dc7d-a6ad-4a7a-8846-c04e3cc53501;;AU)" \
"(OA;;CR;280f369c-67c7-438e-ae98-1d46f3c6f541;;AU)" \
"(OA;;CR;1131f6aa-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;1131f6ab-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;1131f6ac-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;1131f6ae-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;RP;b8119fd0-04f6-4762-ab7a-4986c76b3f9a;;AU)" \
"(OA;CIIO;RPWPCR;91e647de-d96f-4b70-9557-d63ff4f3ccd8;;PS)" \
"(A;;RPWPCRCCLCLORCWOWDSW;;;DA)" \
"(A;CI;RPWPCRCCDCLCLORCWOWDSDDTSW;;;EA)" \
"(A;;RPRC;;;RU)" \
"(A;CI;LC;;;RU)" \
"(A;CI;RPWPCRCCLCLORCWOWDSDSW;;;BA)" \
"(A;;RP;;;WD)" \
"(A;;RPLCLORC;;;ED)" \
"(A;;RPLCLORC;;;AU)" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)" \
"S:AI(OU;CISA;WP;f30e3bbe-9ff0-11d1-b603-0000f80367c1;bf967aa5-0de6-11d0-a285-00aa003049e2;WD)" \
"(OU;CISA;WP;f30e3bbf-9ff0-11d1-b603-0000f80367c1;bf967aa5-0de6-11d0-a285-00aa003049e2;WD)" \
"(AU;SA;CR;;;DU)(AU;SA;CR;;;BA)(AU;SA;WPWOWD;;;WD)"
return sddl2binary(sddl, domain_sid, name_map)
def get_domain_infrastructure_descriptor(domain_sid, name_map={}):
sddl = "D:" \
"(A;;RPLCLORC;;;AU)" \
"(A;;RPWPCRCCLCLORCWOWDSW;;;DA)" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)" \
"S:" \
"(AU;SA;WPCR;;;WD)"
return sddl2binary(sddl, domain_sid, name_map)
def get_domain_builtin_descriptor(domain_sid, name_map={}):
sddl = "D:" \
"(OA;CIIO;RP;4c164200-20c0-11d0-a768-00aa006e0529;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RP;4c164200-20c0-11d0-a768-00aa006e0529;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;CIIO;RP;5f202010-79a5-11d0-9020-00c04fc2d4cf;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RP;5f202010-79a5-11d0-9020-00c04fc2d4cf;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;CIIO;RP;bc0ac240-79a9-11d0-9020-00c04fc2d4cf;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RP;bc0ac240-79a9-11d0-9020-00c04fc2d4cf;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;CIIO;RP;59ba2f42-79a2-11d0-9020-00c04fc2d3cf;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RP;59ba2f42-79a2-11d0-9020-00c04fc2d3cf;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;CIIO;RP;037088f8-0ae1-11d2-b422-00a0c968f939;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RP;037088f8-0ae1-11d2-b422-00a0c968f939;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;;CR;1131f6aa-9c07-11d1-f79f-00c04fc2dcd2;;ER)" \
"(OA;;CR;1131f6ad-9c07-11d1-f79f-00c04fc2dcd2;;DD)" \
"(OA;CIIO;RP;b7c69e6d-2cc7-11d2-854e-00a0c983f608;bf967a86-0de6-11d0-a285-00aa003049e2;ED)" \
"(OA;CIIO;RP;b7c69e6d-2cc7-11d2-854e-00a0c983f608;bf967a9c-0de6-11d0-a285-00aa003049e2;ED)" \
"(OA;CIIO;RP;b7c69e6d-2cc7-11d2-854e-00a0c983f608;bf967aba-0de6-11d0-a285-00aa003049e2;ED)" \
"(OA;;CR;89e95b76-444d-4c62-991a-0facbeda640c;;BA)" \
"(OA;;CR;1131f6aa-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;1131f6ab-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;1131f6ac-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;1131f6ad-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;1131f6ae-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;e2a36dc9-ae17-47c3-b58b-be34c55ba633;;IF)" \
"(OA;;RP;c7407360-20bf-11d0-a768-00aa006e0529;;RU)" \
"(OA;;RP;b8119fd0-04f6-4762-ab7a-4986c76b3f9a;;RU)" \
"(OA;CIIO;RPLCLORC;;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RPLCLORC;;bf967a9c-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;CIIO;RPLCLORC;;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;;CR;05c74c5e-4deb-43b4-bd9f-86664c2a7fd5;;AU)" \
"(OA;;CR;89e95b76-444d-4c62-991a-0facbeda640c;;ED)" \
"(OA;;CR;ccc2dc7d-a6ad-4a7a-8846-c04e3cc53501;;AU)" \
"(OA;;CR;280f369c-67c7-438e-ae98-1d46f3c6f541;;AU)" \
"(OA;;CR;1131f6aa-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;1131f6ab-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;1131f6ac-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;1131f6ae-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;RP;b8119fd0-04f6-4762-ab7a-4986c76b3f9a;;AU)" \
"(OA;CIIO;RPWPCR;91e647de-d96f-4b70-9557-d63ff4f3ccd8;;PS)" \
"(A;;RPWPCRCCLCLORCWOWDSW;;;DA)" \
"(A;CI;RPWPCRCCDCLCLORCWOWDSDDTSW;;;EA)" \
"(A;;RPRC;;;RU)" \
"(A;CI;LC;;;RU)" \
"(A;CI;RPWPCRCCLCLORCWOWDSDSW;;;BA)" \
"(A;;RP;;;WD)" \
"(A;;RPLCLORC;;;ED)" \
"(A;;RPLCLORC;;;AU)" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)" \
"S:" \
"(OU;CISA;WP;f30e3bbe-9ff0-11d1-b603-0000f80367c1;bf967aa5-0de6-11d0-a285-00aa003049e2;WD)" \
"(OU;CISA;WP;f30e3bbf-9ff0-11d1-b603-0000f80367c1;bf967aa5-0de6-11d0-a285-00aa003049e2;WD)" \
"(AU;SA;CR;;;DU)" \
"(AU;SA;CR;;;BA)" \
"(AU;SA;WPWOWD;;;WD)"
return sddl2binary(sddl, domain_sid, name_map)
def get_domain_computers_descriptor(domain_sid, name_map={}):
sddl = "D:" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)" \
"(A;;RPWPCRCCDCLCLORCWOWDSW;;;DA)" \
"(OA;;CCDC;bf967a86-0de6-11d0-a285-00aa003049e2;;AO)" \
"(OA;;CCDC;bf967aba-0de6-11d0-a285-00aa003049e2;;AO)" \
"(OA;;CCDC;bf967a9c-0de6-11d0-a285-00aa003049e2;;AO)" \
"(OA;;CCDC;bf967aa8-0de6-11d0-a285-00aa003049e2;;PO)" \
"(A;;RPLCLORC;;;AU)" \
"(OA;;CCDC;4828cc14-1437-45bc-9b07-ad6f015e5f28;;AO)" \
"S:"
return sddl2binary(sddl, domain_sid, name_map)
def get_domain_users_descriptor(domain_sid, name_map={}):
sddl = "D:" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)" \
"(A;;RPWPCRCCDCLCLORCWOWDSW;;;DA)" \
"(OA;;CCDC;bf967aba-0de6-11d0-a285-00aa003049e2;;AO)" \
"(OA;;CCDC;bf967a9c-0de6-11d0-a285-00aa003049e2;;AO)" \
"(OA;;CCDC;bf967aa8-0de6-11d0-a285-00aa003049e2;;PO)" \
"(A;;RPLCLORC;;;AU)" \
"(OA;;CCDC;4828cc14-1437-45bc-9b07-ad6f015e5f28;;AO)" \
"S:"
return sddl2binary(sddl, domain_sid, name_map)
def get_domain_controllers_descriptor(domain_sid, name_map={}):
sddl = "D:" \
"(A;;RPLCLORC;;;AU)" \
"(A;;RPWPCRCCLCLORCWOWDSW;;;DA)" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)" \
"(A;;RPLCLORC;;;ED)" \
"S:" \
"(AU;SA;CCDCWOWDSDDT;;;WD)" \
"(AU;CISA;WP;;;WD)"
return sddl2binary(sddl, domain_sid, name_map)
def get_domain_delete_protected1_descriptor(domain_sid, name_map={}):
sddl = "D:AI" \
"(A;;RPLCLORC;;;AU)" \
"(A;;RPWPCRCCLCLORCWOWDSW;;;DA)" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)"
return sddl2binary(sddl, domain_sid, name_map)
def get_domain_delete_protected2_descriptor(domain_sid, name_map={}):
sddl = "D:AI" \
"(A;;RPLCLORC;;;AU)" \
"(A;;RPWPCRCCDCLCLORCWOWDSW;;;DA)" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)"
return sddl2binary(sddl, domain_sid, name_map)
def get_dns_partition_descriptor(domain_sid, name_map={}):
sddl = "O:SYG:BAD:AI" \
"(OA;CIIO;RP;4c164200-20c0-11d0-a768-00aa006e0529;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RP;4c164200-20c0-11d0-a768-00aa006e0529;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;CIIO;RP;5f202010-79a5-11d0-9020-00c04fc2d4cf;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RP;5f202010-79a5-11d0-9020-00c04fc2d4cf;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;CIIO;RP;bc0ac240-79a9-11d0-9020-00c04fc2d4cf;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RP;bc0ac240-79a9-11d0-9020-00c04fc2d4cf;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;CIIO;RP;59ba2f42-79a2-11d0-9020-00c04fc2d3cf;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RP;59ba2f42-79a2-11d0-9020-00c04fc2d3cf;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;CIIO;RP;037088f8-0ae1-11d2-b422-00a0c968f939;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RP;037088f8-0ae1-11d2-b422-00a0c968f939;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;;CR;1131f6aa-9c07-11d1-f79f-00c04fc2dcd2;;ER)" \
"(OA;CIIO;RP;b7c69e6d-2cc7-11d2-854e-00a0c983f608;bf967a86-0de6-11d0-a285-00aa003049e2;ED)" \
"(OA;CIIO;RP;b7c69e6d-2cc7-11d2-854e-00a0c983f608;bf967a9c-0de6-11d0-a285-00aa003049e2;ED)" \
"(OA;CIIO;RP;b7c69e6d-2cc7-11d2-854e-00a0c983f608;bf967aba-0de6-11d0-a285-00aa003049e2;ED)" \
"(OA;;CR;89e95b76-444d-4c62-991a-0facbeda640c;;BA)" \
"(OA;;CR;1131f6aa-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;1131f6ab-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;1131f6ac-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;1131f6ad-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;1131f6ae-9c07-11d1-f79f-00c04fc2dcd2;;BA)" \
"(OA;;CR;e2a36dc9-ae17-47c3-b58b-be34c55ba633;;IF)" \
"(OA;;RP;c7407360-20bf-11d0-a768-00aa006e0529;;RU)" \
"(OA;;RP;b8119fd0-04f6-4762-ab7a-4986c76b3f9a;;RU)" \
"(OA;CIIO;RPLCLORC;;4828cc14-1437-45bc-9b07-ad6f015e5f28;RU)" \
"(OA;CIIO;RPLCLORC;;bf967a9c-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;CIIO;RPLCLORC;;bf967aba-0de6-11d0-a285-00aa003049e2;RU)" \
"(OA;;CR;05c74c5e-4deb-43b4-bd9f-86664c2a7fd5;;AU)" \
"(OA;;CR;89e95b76-444d-4c62-991a-0facbeda640c;;ED)" \
"(OA;;CR;ccc2dc7d-a6ad-4a7a-8846-c04e3cc53501;;AU)" \
"(OA;;CR;280f369c-67c7-438e-ae98-1d46f3c6f541;;AU)" \
"(OA;;CR;1131f6aa-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;1131f6ab-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;1131f6ac-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;1131f6ad-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;CR;1131f6ae-9c07-11d1-f79f-00c04fc2dcd2;;ED)" \
"(OA;;RP;b8119fd0-04f6-4762-ab7a-4986c76b3f9a;;AU)" \
"(OA;CIIO;RPWPCR;91e647de-d96f-4b70-9557-d63ff4f3ccd8;;PS)" \
"(A;;RPWPCRCCLCLORCWOWDSW;;;DA)" \
"(A;CI;RPWPCRCCDCLCLORCWOWDSDDTSW;;;EA)" \
"(A;;RPRC;;;RU)" \
"(A;CI;LC;;;RU)" \
"(A;CI;RPWPCRCCLCLORCWOWDSDSW;;;BA)" \
"(A;;RP;;;WD)" \
"(A;;RPLCLORC;;;ED)" \
"(A;;RPLCLORC;;;AU)" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)" \
"S:AI" \
"(OU;CISA;WP;f30e3bbe-9ff0-11d1-b603-0000f80367c1;bf967aa5-0de6-11d0-a285-00aa003049e2;WD)" \
"(OU;CISA;WP;f30e3bbf-9ff0-11d1-b603-0000f80367c1;bf967aa5-0de6-11d0-a285-00aa003049e2;WD)" \
"(AU;SA;CR;;;DU)(AU;SA;CR;;;BA)(AU;SA;WPWOWD;;;WD)"
return sddl2binary(sddl, domain_sid, name_map)
def get_dns_forest_microsoft_dns_descriptor(domain_sid, name_map={}):
sddl = "O:SYG:SYD:AI" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)" \
"(A;CI;RPWPCRCCDCLCRCWOWDSDDTSW;;;ED)"
return sddl2binary(sddl, domain_sid, name_map)
def get_dns_domain_microsoft_dns_descriptor(domain_sid, name_map={}):
sddl = "O:SYG:SYD:AI" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;DA)" \
"(A;CI;RPWPCRCCDCLCRCWOWDSDDTSW;;;DnsAdmins)" \
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)" \
"(A;CI;RPWPCRCCDCLCRCWOWDSDDTSW;;;ED)"
return sddl2binary(sddl, domain_sid, name_map)
def get_paritions_crossref_subdomain_descriptor(domain_sid, name_map={}):
sddl = "O:SubdomainAdminsG:SubdomainAdminsD:AI" \
"(A;;RPWPCRCCLCLORCWOWDSW;;;SubdomainAdmins)"
"(A;;RPLCLORC;;;AU)"
"(A;;RPWPCRCCDCLCLORCWOWDSDDTSW;;;SY)"
return sddl2binary(sddl, domain_sid, name_map)
def get_wellknown_sds(samdb):
# Then subcontainers
subcontainers = [
(ldb.Dn(samdb, "%s" % str(samdb.domain_dn())), get_domain_descriptor),
(ldb.Dn(samdb, "CN=LostAndFound,%s" % str(samdb.domain_dn())), get_domain_delete_protected2_descriptor),
(ldb.Dn(samdb, "CN=System,%s" % str(samdb.domain_dn())), get_domain_delete_protected1_descriptor),
(ldb.Dn(samdb, "CN=Infrastructure,%s" % str(samdb.domain_dn())), get_domain_infrastructure_descriptor),
(ldb.Dn(samdb, "CN=Builtin,%s" % str(samdb.domain_dn())), get_domain_builtin_descriptor),
(ldb.Dn(samdb, "CN=Computers,%s" % str(samdb.domain_dn())), get_domain_computers_descriptor),
(ldb.Dn(samdb, "CN=Users,%s" % str(samdb.domain_dn())), get_domain_users_descriptor),
(ldb.Dn(samdb, "OU=Domain Controllers,%s" % str(samdb.domain_dn())), get_domain_controllers_descriptor),
(ldb.Dn(samdb, "CN=MicrosoftDNS,CN=System,%s" % str(samdb.domain_dn())), get_dns_domain_microsoft_dns_descriptor),
(ldb.Dn(samdb, "%s" % str(samdb.get_config_basedn())), get_config_descriptor),
(ldb.Dn(samdb, "CN=NTDS Quotas,%s" % str(samdb.get_config_basedn())), get_config_ntds_quotas_descriptor),
(ldb.Dn(samdb, "CN=LostAndFoundConfig,%s" % str(samdb.get_config_basedn())), get_config_delete_protected1wd_descriptor),
(ldb.Dn(samdb, "CN=Services,%s" % str(samdb.get_config_basedn())), get_config_delete_protected1_descriptor),
(ldb.Dn(samdb, "CN=Physical Locations,%s" % str(samdb.get_config_basedn())), get_config_delete_protected1wd_descriptor),
(ldb.Dn(samdb, "CN=WellKnown Security Principals,%s" % str(samdb.get_config_basedn())), get_config_delete_protected1wd_descriptor),
(ldb.Dn(samdb, "CN=ForestUpdates,%s" % str(samdb.get_config_basedn())), get_config_delete_protected1wd_descriptor),
(ldb.Dn(samdb, "CN=DisplaySpecifiers,%s" % str(samdb.get_config_basedn())), get_config_delete_protected2_descriptor),
(ldb.Dn(samdb, "CN=Extended-Rights,%s" % str(samdb.get_config_basedn())), get_config_delete_protected2_descriptor),
(ldb.Dn(samdb, "CN=Partitions,%s" % str(samdb.get_config_basedn())), get_config_partitions_descriptor),
(ldb.Dn(samdb, "CN=Sites,%s" % str(samdb.get_config_basedn())), get_config_sites_descriptor),
(ldb.Dn(samdb, "%s" % str(samdb.get_schema_basedn())), get_schema_descriptor),
]
current = samdb.search(expression="(objectClass=*)",
base="", scope=ldb.SCOPE_BASE,
attrs=["namingContexts"])
for nc in current[0]["namingContexts"]:
dnsforestdn = ldb.Dn(samdb, "DC=ForestDnsZones,%s" % (str(samdb.get_root_basedn())))
if ldb.Dn(samdb, nc) == dnsforestdn:
c = (ldb.Dn(samdb, "%s" % str(dnsforestdn)), get_dns_partition_descriptor)
subcontainers.append(c)
c = (ldb.Dn(samdb, "CN=Infrastructure,%s" % str(dnsforestdn)),
get_domain_delete_protected1_descriptor)
subcontainers.append(c)
c = (ldb.Dn(samdb, "CN=LostAndFound,%s" % str(dnsforestdn)),
get_domain_delete_protected2_descriptor)
subcontainers.append(c)
c = (ldb.Dn(samdb, "CN=MicrosoftDNS,%s" % str(dnsforestdn)),
get_dns_forest_microsoft_dns_descriptor)
subcontainers.append(c)
continue
dnsdomaindn = ldb.Dn(samdb, "DC=DomainDnsZones,%s" % (str(samdb.domain_dn())))
if ldb.Dn(samdb, nc) == dnsdomaindn:
c = (ldb.Dn(samdb, "%s" % str(dnsdomaindn)), get_dns_partition_descriptor)
subcontainers.append(c)
c = (ldb.Dn(samdb, "CN=Infrastructure,%s" % str(dnsdomaindn)),
get_domain_delete_protected1_descriptor)
subcontainers.append(c)
c = (ldb.Dn(samdb, "CN=LostAndFound,%s" % str(dnsdomaindn)),
get_domain_delete_protected2_descriptor)
subcontainers.append(c)
c = (ldb.Dn(samdb, "CN=MicrosoftDNS,%s" % str(dnsdomaindn)),
get_dns_domain_microsoft_dns_descriptor)
subcontainers.append(c)
return subcontainers
def chunck_acl(acl):
"""Return separate ACE of an ACL
:param acl: A string representing the ACL
:return: A hash with different parts
"""
p = re.compile(r'(\w+)?(\(.*?\))')
tab = p.findall(acl)
hash = {}
hash["aces"] = []
for e in tab:
if len(e[0]) > 0:
hash["flags"] = e[0]
hash["aces"].append(e[1])
return hash
def chunck_sddl(sddl):
""" Return separate parts of the SDDL (owner, group, ...)
:param sddl: An string containing the SDDL to chunk
:return: A hash with the different chunk
"""
p = re.compile(r'([OGDS]:)(.*?)(?=(?:[GDS]:|$))')
tab = p.findall(sddl)
hash = {}
for e in tab:
if e[0] == "O:":
hash["owner"] = e[1]
if e[0] == "G:":
hash["group"] = e[1]
if e[0] == "D:":
hash["dacl"] = e[1]
if e[0] == "S:":
hash["sacl"] = e[1]
return hash
def get_clean_sd(sd):
"""Get the SD without any inherited ACEs
:param sd: SD to strip
:return: An SD with inherited ACEs stripped
"""
sd_clean = security.descriptor()
sd_clean.owner_sid = sd.owner_sid
sd_clean.group_sid = sd.group_sid
sd_clean.type = sd.type
sd_clean.revision = sd.revision
aces = []
if sd.sacl is not None:
aces = sd.sacl.aces
for i in range(0, len(aces)):
ace = aces[i]
if not ace.flags & security.SEC_ACE_FLAG_INHERITED_ACE:
sd_clean.sacl_add(ace)
continue
aces = []
if sd.dacl is not None:
aces = sd.dacl.aces
for i in range(0, len(aces)):
ace = aces[i]
if not ace.flags & security.SEC_ACE_FLAG_INHERITED_ACE:
sd_clean.dacl_add(ace)
continue
return sd_clean
def get_diff_sds(refsd, cursd, domainsid, checkSacl = True):
"""Get the difference between 2 sd
This function split the textual representation of ACL into smaller
chunck in order to not to report a simple permutation as a difference
:param refsddl: First sddl to compare
:param cursddl: Second sddl to compare
:param checkSacl: If false we skip the sacl checks
:return: A string that explain difference between sddls
"""
cursddl = get_clean_sd(cursd).as_sddl(domainsid)
refsddl = get_clean_sd(refsd).as_sddl(domainsid)
txt = ""
hash_cur = chunck_sddl(cursddl)
hash_ref = chunck_sddl(refsddl)
if not hash_cur.has_key("owner"):
txt = "\tNo owner in current SD"
elif hash_ref.has_key("owner") and hash_cur["owner"] != hash_ref["owner"]:
txt = "\tOwner mismatch: %s (in ref) %s" \
"(in current)\n" % (hash_ref["owner"], hash_cur["owner"])
if not hash_cur.has_key("group"):
txt = "%s\tNo group in current SD" % txt
elif hash_ref.has_key("group") and hash_cur["group"] != hash_ref["group"]:
txt = "%s\tGroup mismatch: %s (in ref) %s" \
"(in current)\n" % (txt, hash_ref["group"], hash_cur["group"])
parts = [ "dacl" ]
if checkSacl:
parts.append("sacl")
for part in parts:
if hash_cur.has_key(part) and hash_ref.has_key(part):
# both are present, check if they contain the same ACE
h_cur = set()
h_ref = set()
c_cur = chunck_acl(hash_cur[part])
c_ref = chunck_acl(hash_ref[part])
for elem in c_cur["aces"]:
h_cur.add(elem)
for elem in c_ref["aces"]:
h_ref.add(elem)
for k in set(h_ref):
if k in h_cur:
h_cur.remove(k)
h_ref.remove(k)
if len(h_cur) + len(h_ref) > 0:
txt = "%s\tPart %s is different between reference" \
" and current here is the detail:\n" % (txt, part)
for item in h_cur:
txt = "%s\t\t%s ACE is not present in the" \
" reference\n" % (txt, item)
for item in h_ref:
txt = "%s\t\t%s ACE is not present in the" \
" current\n" % (txt, item)
elif hash_cur.has_key(part) and not hash_ref.has_key(part):
txt = "%s\tReference ACL hasn't a %s part\n" % (txt, part)
elif not hash_cur.has_key(part) and hash_ref.has_key(part):
txt = "%s\tCurrent ACL hasn't a %s part\n" % (txt, part)
return txt
| gpl-3.0 |
anorfleet/kaggle-titanic | Python Examples/agc_embark_class_gender.py | 6 | 3506 | # A model for prediction survival on the Titanic based on where an
# individual Embarked, their gender, or the class they traveled in.
# AGC 2013
#
#
# Here Will will run generate predictions of who survived and who did not
# from our basic Least Squares Regression model.
# Our Formula is :
# survived_prediction = C + pclass + sex + age + sibsp + embarked
# Import Utilities
import csv as csv
import numpy as np
import statsmodels.api as sm
import kaggleaux as ka
from pasty import dmatrices
# declare necessary variables
data = [] # a container to hold our training data in.
test_data = [] # a continer to hold our test data in.
x = 0 # a container for an iterator later on
with open('train.csv', 'rb') as f: # deals with opening and closing
csv_file = csv.reader(open('train.csv', 'rb'))
csv_file.next() # skips the header, so we can get to the data.
for row in csv_file:
data.append(row)
# Then we convert our list to NumPy array for more efficient data manipulation.
data = np.array(data)
# Model formula
formula='survived ~ C(pclass) + C(sex) + age + sibsp + C(embarked)'
# Create a regression friendly version of our data using dmatrices
y,x=dmatrices(formula, data=data, return_type='matrix')
# Create a Logit Model Based on our data
model=sm.Logit(y,x)
# Fit that Model to the Data
results= model.fit()
results.params
# The file is already trained on the train.csv file.
# Now we test our model by making predictions on the test.csv file.
# You'll notice the test.csv file has no values in the survived field. This
# Is what we're are tying to predict.
with open('test.csv', 'rb') as f: # Handel's the opening and closing of the file for us.
csv_file = csv.reader(open('test.csv', 'rb'))
csv_file.next() # Skips the header in the test file, so we can get right to the data
for row in csv_file:
test_data.append(row)
test_data = np.array(data)
predicted_results = ka.regress_pred_output(test_data, res, 'survived')
# The print statements used throughout are designed to show what the program is doing
# Once its executed in the terminal.
# The \n starts a new line. This allows for more attractive printing.
print "\nBegin Predictions:"
# Reads in the 'train' file for a comparative result
cop_open_file = open("results_embarkclassgendermodel.csv", "wb") # Creates a csv file with write privileges ("wb") called
# results_embarkclassgendermodel.csv
open_file = csv.writer(cop_open_file) # Theres no header in this guy
with open('test.csv', 'rb') as f: # Handel's the opening and closing of the file for us.
csv_file = csv.reader(open('test.csv', 'rb'))
csv_file.next()
for row in csv_file:
surived_pred = (C + (female * int(data[x][1])) + (U_Class * int(data[x][0])) + (embarked_c * int(data[x][2])))
print "Row #:" + str(x)
print "urvived_pred: " + str(urvived_pred) + " female: %s Class: %s Em: %s" %(data[x][1],data[x][0],data[x][2])
if survived_prediction > .50000:
pred_val = 1
else:
pred_val = 0
row.insert(0,pred_val) # Insert the prediction at the start of the row
open_file.writerow(row) # Write the row to the file
x += 1
cop_open_file.close()
print "Analysis ended"
| apache-2.0 |
DVSBA/ajenti | ajenti/plugins/recovery/main.py | 17 | 4216 | from ajenti.api import *
from ajenti.ui import *
from api import Manager
class RecoveryPlugin(CategoryPlugin):
text = 'Recovery'
icon = '/dl/recovery/icon.png'
folder = 'bottom'
def on_init(self):
self.manager = Manager(self.app)
self.providers = self.app.grab_plugins(IConfigurable)
self.providers = sorted(self.providers, key=lambda x: x.name)
if not self._current:
self._current = self.providers[0].id
self._current_name = self.providers[0].name
def get_ui(self):
ui = self.app.inflate('recovery:main')
provs = ui.find('provs')
for p in self.providers:
provs.append(
UI.ListItem(
UI.Label(text=p.name),
id=p.id,
active=p.id==self._current
)
)
backs = ui.find('backs')
for rev in self.manager.list_backups(self._current):
backs.append(
UI.DTR(
UI.Label(text=rev.revision),
UI.Label(text=rev.date),
UI.DTC(
UI.HContainer(
UI.TipIcon(
text='Recover',
icon='/dl/recovery/rev-restore.png',
id='restore/%s/%s'%(self._current,rev.revision),
warning='Restore configuration of %s as of %s (rev %s)'%(
self._current,
rev.date,
rev.revision
)
),
UI.TipIcon(
text='Drop',
icon='/dl/recovery/rev-delete.png',
id='drop/%s/%s'%(self._current,rev.revision),
warning='Delete backed up configuration of %s as of %s (rev %s)'%(
self._current,
rev.date,
rev.revision
)
),
spacing=0
),
width=0,
)
)
)
ui.find('btnBackup').set('text', 'Backup %s'%self._current_name)
ui.find('btnBackup').set('id', 'backup/%s'%self._current)
return ui
@event('button/click')
def on_click(self, event, params, vars=None):
if params[0] == 'backup':
p = self.manager.find_provider(params[1])
try:
self.manager.backup(p)
self.put_message('info', 'Stored backup for %s.' % p.name)
except:
self.put_message('err', 'Failed to backup %s.' % p.name)
if params[0] == 'backupall':
errs = self.manager.backup_all()
if errs != []:
self.put_message('err', 'Backup failed for %s.' % ', '.join(errs))
else:
self.put_message('info', 'Stored full backup')
if params[0] == 'restore':
p = self.manager.find_provider(params[1])
try:
self.manager.restore(p, params[2])
self.put_message('info', 'Restored configuration of %s (rev %s).' % (p.name, params[2]))
except:
self.put_message('err', 'Failed to recover %s.' % p.name)
if params[0] == 'drop':
try:
self.manager.delete_backup(params[1], params[2])
self.put_message('info', 'Deleted backup rev %s for %s.' % (params[2], params[1]))
except:
self.put_message('err', 'Failed to delete backup rev %s for %s.' % (params[2], params[1]))
@event('listitem/click')
def on_list_click(self, event, params, vars=None):
for p in self.providers:
if p.id == params[0]:
self._current = p.id
self._current_name = p.name
| lgpl-3.0 |
yanheven/neutron | neutron/tests/tempest/common/generator/valid_generator.py | 34 | 2931 | # Copyright 2014 Deutsche Telekom AG
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
import six
import neutron.tests.tempest.common.generator.base_generator as base
LOG = logging.getLogger(__name__)
class ValidTestGenerator(base.BasicGeneratorSet):
@base.generator_type("string")
@base.simple_generator
def generate_valid_string(self, schema):
size = schema.get("minLength", 1)
# TODO(dkr mko): handle format and pattern
return "x" * size
@base.generator_type("integer")
@base.simple_generator
def generate_valid_integer(self, schema):
# TODO(dkr mko): handle multipleOf
if "minimum" in schema:
minimum = schema["minimum"]
if "exclusiveMinimum" not in schema:
return minimum
else:
return minimum + 1
if "maximum" in schema:
maximum = schema["maximum"]
if "exclusiveMaximum" not in schema:
return maximum
else:
return maximum - 1
return 0
@base.generator_type("object")
@base.simple_generator
def generate_valid_object(self, schema):
obj = {}
for k, v in six.iteritems(schema["properties"]):
obj[k] = self.generate_valid(v)
return obj
def generate(self, schema):
schema_type = schema["type"]
if isinstance(schema_type, list):
if "integer" in schema_type:
schema_type = "integer"
else:
raise Exception("non-integer list types not supported")
result = []
if schema_type not in self.types_dict:
raise TypeError("generator (%s) doesn't support type: %s"
% (self.__class__.__name__, schema_type))
for generator in self.types_dict[schema_type]:
ret = generator(schema)
if ret is not None:
if isinstance(ret, list):
result.extend(ret)
elif isinstance(ret, tuple):
result.append(ret)
else:
raise Exception("generator (%s) returns invalid result: %s"
% (generator, ret))
return result
def generate_valid(self, schema):
return self.generate(schema)[0][1]
| apache-2.0 |
geky/mbed | tools/host_tests/host_tests_plugins/module_reset_mps2.py | 73 | 2576 | """
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from host_test_plugins import HostTestPluginBase
from time import sleep
# Note: This plugin is not fully functional, needs improvements
class HostTestPluginResetMethod_MPS2(HostTestPluginBase):
""" Plugin used to reset ARM_MPS2 platform
Supports:
reboot.txt - startup from standby state, reboots when in run mode.
shutdown.txt - shutdown from run mode.
reset.txt - reset FPGA during run mode.
"""
def touch_file(self, file):
""" Touch file and set timestamp to items
"""
tfile = file+'.tmp'
fhandle = open(tfile, 'a')
try:
fhandle.close()
finally:
os.rename(tfile, file)
return True
# Plugin interface
name = 'HostTestPluginResetMethod_MPS2'
type = 'ResetMethod'
capabilities = ['mps2-reboot', 'mps2-reset']
required_parameters = ['disk']
def setup(self, *args, **kwargs):
""" Prepare / configure plugin to work.
This method can receive plugin specific parameters by kwargs and
ignore other parameters which may affect other plugins.
"""
return True
def execute(self, capabilitity, *args, **kwargs):
""" Executes capability by name.
Each capability may directly just call some command line
program or execute building pythonic function
"""
return True
result = False
if self.check_parameters(capabilitity, *args, **kwargs) is True:
disk = kwargs['disk']
if capabilitity == 'mps2-reboot' and self.touch_file(disk + 'reboot.txt'):
sleep(20)
result = True
elif capabilitity == 'mps2-reset' and self.touch_file(disk + 'reboot.txt'):
sleep(20)
result = True
return result
def load_plugin():
""" Returns plugin available in this module
"""
return HostTestPluginResetMethod_MPS2()
| apache-2.0 |
pbrod/scipy | scipy/io/mmio.py | 23 | 27443 | """
Matrix Market I/O in Python.
See http://math.nist.gov/MatrixMarket/formats.html
for information about the Matrix Market format.
"""
#
# Author: Pearu Peterson <pearu@cens.ioc.ee>
# Created: October, 2004
#
# References:
# http://math.nist.gov/MatrixMarket/
#
from __future__ import division, print_function, absolute_import
import os
import sys
from numpy import (asarray, real, imag, conj, zeros, ndarray, concatenate,
ones, ascontiguousarray, vstack, savetxt, fromfile,
fromstring, can_cast)
from numpy.compat import asbytes, asstr
from scipy._lib.six import string_types
from scipy.sparse import coo_matrix, isspmatrix
__all__ = ['mminfo', 'mmread', 'mmwrite', 'MMFile']
# -----------------------------------------------------------------------------
def mminfo(source):
"""
Return size and storage parameters from Matrix Market file-like 'source'.
Parameters
----------
source : str or file-like
Matrix Market filename (extension .mtx) or open file-like object
Returns
-------
rows : int
Number of matrix rows.
cols : int
Number of matrix columns.
entries : int
Number of non-zero entries of a sparse matrix
or rows*cols for a dense matrix.
format : str
Either 'coordinate' or 'array'.
field : str
Either 'real', 'complex', 'pattern', or 'integer'.
symmetry : str
Either 'general', 'symmetric', 'skew-symmetric', or 'hermitian'.
"""
return MMFile.info(source)
# -----------------------------------------------------------------------------
def mmread(source):
"""
Reads the contents of a Matrix Market file-like 'source' into a matrix.
Parameters
----------
source : str or file-like
Matrix Market filename (extensions .mtx, .mtz.gz)
or open file-like object.
Returns
-------
a : ndarray or coo_matrix
Dense or sparse matrix depending on the matrix format in the
Matrix Market file.
"""
return MMFile().read(source)
# -----------------------------------------------------------------------------
def mmwrite(target, a, comment='', field=None, precision=None, symmetry=None):
"""
Writes the sparse or dense array `a` to Matrix Market file-like `target`.
Parameters
----------
target : str or file-like
Matrix Market filename (extension .mtx) or open file-like object.
a : array like
Sparse or dense 2D array.
comment : str, optional
Comments to be prepended to the Matrix Market file.
field : None or str, optional
Either 'real', 'complex', 'pattern', or 'integer'.
precision : None or int, optional
Number of digits to display for real or complex values.
symmetry : None or str, optional
Either 'general', 'symmetric', 'skew-symmetric', or 'hermitian'.
If symmetry is None the symmetry type of 'a' is determined by its
values.
"""
MMFile().write(target, a, comment, field, precision, symmetry)
###############################################################################
class MMFile (object):
__slots__ = ('_rows',
'_cols',
'_entries',
'_format',
'_field',
'_symmetry')
@property
def rows(self):
return self._rows
@property
def cols(self):
return self._cols
@property
def entries(self):
return self._entries
@property
def format(self):
return self._format
@property
def field(self):
return self._field
@property
def symmetry(self):
return self._symmetry
@property
def has_symmetry(self):
return self._symmetry in (self.SYMMETRY_SYMMETRIC,
self.SYMMETRY_SKEW_SYMMETRIC,
self.SYMMETRY_HERMITIAN)
# format values
FORMAT_COORDINATE = 'coordinate'
FORMAT_ARRAY = 'array'
FORMAT_VALUES = (FORMAT_COORDINATE, FORMAT_ARRAY)
@classmethod
def _validate_format(self, format):
if format not in self.FORMAT_VALUES:
raise ValueError('unknown format type %s, must be one of %s' %
(format, self.FORMAT_VALUES))
# field values
FIELD_INTEGER = 'integer'
FIELD_REAL = 'real'
FIELD_COMPLEX = 'complex'
FIELD_PATTERN = 'pattern'
FIELD_VALUES = (FIELD_INTEGER, FIELD_REAL, FIELD_COMPLEX, FIELD_PATTERN)
@classmethod
def _validate_field(self, field):
if field not in self.FIELD_VALUES:
raise ValueError('unknown field type %s, must be one of %s' %
(field, self.FIELD_VALUES))
# symmetry values
SYMMETRY_GENERAL = 'general'
SYMMETRY_SYMMETRIC = 'symmetric'
SYMMETRY_SKEW_SYMMETRIC = 'skew-symmetric'
SYMMETRY_HERMITIAN = 'hermitian'
SYMMETRY_VALUES = (SYMMETRY_GENERAL, SYMMETRY_SYMMETRIC,
SYMMETRY_SKEW_SYMMETRIC, SYMMETRY_HERMITIAN)
@classmethod
def _validate_symmetry(self, symmetry):
if symmetry not in self.SYMMETRY_VALUES:
raise ValueError('unknown symmetry type %s, must be one of %s' %
(symmetry, self.SYMMETRY_VALUES))
DTYPES_BY_FIELD = {FIELD_INTEGER: 'intp',
FIELD_REAL: 'd',
FIELD_COMPLEX: 'D',
FIELD_PATTERN: 'd'}
# -------------------------------------------------------------------------
@staticmethod
def reader():
pass
# -------------------------------------------------------------------------
@staticmethod
def writer():
pass
# -------------------------------------------------------------------------
@classmethod
def info(self, source):
"""
Return size, storage parameters from Matrix Market file-like 'source'.
Parameters
----------
source : str or file-like
Matrix Market filename (extension .mtx) or open file-like object
Returns
-------
rows : int
Number of matrix rows.
cols : int
Number of matrix columns.
entries : int
Number of non-zero entries of a sparse matrix
or rows*cols for a dense matrix.
format : str
Either 'coordinate' or 'array'.
field : str
Either 'real', 'complex', 'pattern', or 'integer'.
symmetry : str
Either 'general', 'symmetric', 'skew-symmetric', or 'hermitian'.
"""
stream, close_it = self._open(source)
try:
# read and validate header line
line = stream.readline()
mmid, matrix, format, field, symmetry = \
[asstr(part.strip()) for part in line.split()]
if not mmid.startswith('%%MatrixMarket'):
raise ValueError('source is not in Matrix Market format')
if not matrix.lower() == 'matrix':
raise ValueError("Problem reading file header: " + line)
# http://math.nist.gov/MatrixMarket/formats.html
if format.lower() == 'array':
format = self.FORMAT_ARRAY
elif format.lower() == 'coordinate':
format = self.FORMAT_COORDINATE
# skip comments
while line.startswith(b'%'):
line = stream.readline()
line = line.split()
if format == self.FORMAT_ARRAY:
if not len(line) == 2:
raise ValueError("Header line not of length 2: " + line)
rows, cols = map(int, line)
entries = rows * cols
else:
if not len(line) == 3:
raise ValueError("Header line not of length 3: " + line)
rows, cols, entries = map(int, line)
return (rows, cols, entries, format, field.lower(),
symmetry.lower())
finally:
if close_it:
stream.close()
# -------------------------------------------------------------------------
@staticmethod
def _open(filespec, mode='rb'):
""" Return an open file stream for reading based on source.
If source is a file name, open it (after trying to find it with mtx and
gzipped mtx extensions). Otherwise, just return source.
Parameters
----------
filespec : str or file-like
String giving file name or file-like object
mode : str, optional
Mode with which to open file, if `filespec` is a file name.
Returns
-------
fobj : file-like
Open file-like object.
close_it : bool
True if the calling function should close this file when done,
false otherwise.
"""
close_it = False
if isinstance(filespec, string_types):
close_it = True
# open for reading
if mode[0] == 'r':
# determine filename plus extension
if not os.path.isfile(filespec):
if os.path.isfile(filespec+'.mtx'):
filespec = filespec + '.mtx'
elif os.path.isfile(filespec+'.mtx.gz'):
filespec = filespec + '.mtx.gz'
elif os.path.isfile(filespec+'.mtx.bz2'):
filespec = filespec + '.mtx.bz2'
# open filename
if filespec.endswith('.gz'):
import gzip
stream = gzip.open(filespec, mode)
elif filespec.endswith('.bz2'):
import bz2
stream = bz2.BZ2File(filespec, 'rb')
else:
stream = open(filespec, mode)
# open for writing
else:
if filespec[-4:] != '.mtx':
filespec = filespec + '.mtx'
stream = open(filespec, mode)
else:
stream = filespec
return stream, close_it
# -------------------------------------------------------------------------
@staticmethod
def _get_symmetry(a):
m, n = a.shape
if m != n:
return MMFile.SYMMETRY_GENERAL
issymm = True
isskew = True
isherm = a.dtype.char in 'FD'
# sparse input
if isspmatrix(a):
# check if number of nonzero entries of lower and upper triangle
# matrix are equal
a = a.tocoo()
(row, col) = a.nonzero()
if (row < col).sum() != (row > col).sum():
return MMFile.SYMMETRY_GENERAL
# define iterator over symmetric pair entries
a = a.todok()
def symm_iterator():
for ((i, j), aij) in a.items():
if i > j:
aji = a[j, i]
yield (aij, aji)
# non-sparse input
else:
# define iterator over symmetric pair entries
def symm_iterator():
for j in range(n):
for i in range(j+1, n):
aij, aji = a[i][j], a[j][i]
yield (aij, aji)
# check for symmetry
for (aij, aji) in symm_iterator():
if issymm and aij != aji:
issymm = False
if isskew and aij != -aji:
isskew = False
if isherm and aij != conj(aji):
isherm = False
if not (issymm or isskew or isherm):
break
# return symmetry value
if issymm:
return MMFile.SYMMETRY_SYMMETRIC
if isskew:
return MMFile.SYMMETRY_SKEW_SYMMETRIC
if isherm:
return MMFile.SYMMETRY_HERMITIAN
return MMFile.SYMMETRY_GENERAL
# -------------------------------------------------------------------------
@staticmethod
def _field_template(field, precision):
return {MMFile.FIELD_REAL: '%%.%ie\n' % precision,
MMFile.FIELD_INTEGER: '%i\n',
MMFile.FIELD_COMPLEX: '%%.%ie %%.%ie\n' %
(precision, precision)
}.get(field, None)
# -------------------------------------------------------------------------
def __init__(self, **kwargs):
self._init_attrs(**kwargs)
# -------------------------------------------------------------------------
def read(self, source):
"""
Reads the contents of a Matrix Market file-like 'source' into a matrix.
Parameters
----------
source : str or file-like
Matrix Market filename (extensions .mtx, .mtz.gz)
or open file object.
Returns
-------
a : ndarray or coo_matrix
Dense or sparse matrix depending on the matrix format in the
Matrix Market file.
"""
stream, close_it = self._open(source)
try:
self._parse_header(stream)
return self._parse_body(stream)
finally:
if close_it:
stream.close()
# -------------------------------------------------------------------------
def write(self, target, a, comment='', field=None, precision=None,
symmetry=None):
"""
Writes sparse or dense array `a` to Matrix Market file-like `target`.
Parameters
----------
target : str or file-like
Matrix Market filename (extension .mtx) or open file-like object.
a : array like
Sparse or dense 2D array.
comment : str, optional
Comments to be prepended to the Matrix Market file.
field : None or str, optional
Either 'real', 'complex', 'pattern', or 'integer'.
precision : None or int, optional
Number of digits to display for real or complex values.
symmetry : None or str, optional
Either 'general', 'symmetric', 'skew-symmetric', or 'hermitian'.
If symmetry is None the symmetry type of 'a' is determined by its
values.
"""
stream, close_it = self._open(target, 'wb')
try:
self._write(stream, a, comment, field, precision, symmetry)
finally:
if close_it:
stream.close()
else:
stream.flush()
# -------------------------------------------------------------------------
def _init_attrs(self, **kwargs):
"""
Initialize each attributes with the corresponding keyword arg value
or a default of None
"""
attrs = self.__class__.__slots__
public_attrs = [attr[1:] for attr in attrs]
invalid_keys = set(kwargs.keys()) - set(public_attrs)
if invalid_keys:
raise ValueError('''found %s invalid keyword arguments, please only
use %s''' % (tuple(invalid_keys),
public_attrs))
for attr in attrs:
setattr(self, attr, kwargs.get(attr[1:], None))
# -------------------------------------------------------------------------
def _parse_header(self, stream):
rows, cols, entries, format, field, symmetry = \
self.__class__.info(stream)
self._init_attrs(rows=rows, cols=cols, entries=entries, format=format,
field=field, symmetry=symmetry)
# -------------------------------------------------------------------------
def _parse_body(self, stream):
rows, cols, entries, format, field, symm = (self.rows, self.cols,
self.entries, self.format,
self.field, self.symmetry)
try:
from scipy.sparse import coo_matrix
except ImportError:
coo_matrix = None
dtype = self.DTYPES_BY_FIELD.get(field, None)
has_symmetry = self.has_symmetry
is_integer = field == self.FIELD_INTEGER
is_complex = field == self.FIELD_COMPLEX
is_skew = symm == self.SYMMETRY_SKEW_SYMMETRIC
is_herm = symm == self.SYMMETRY_HERMITIAN
is_pattern = field == self.FIELD_PATTERN
if format == self.FORMAT_ARRAY:
a = zeros((rows, cols), dtype=dtype)
line = 1
i, j = 0, 0
while line:
line = stream.readline()
if not line or line.startswith(b'%'):
continue
if is_integer:
aij = int(line)
elif is_complex:
aij = complex(*map(float, line.split()))
else:
aij = float(line)
a[i, j] = aij
if has_symmetry and i != j:
if is_skew:
a[j, i] = -aij
elif is_herm:
a[j, i] = conj(aij)
else:
a[j, i] = aij
if i < rows-1:
i = i + 1
else:
j = j + 1
if not has_symmetry:
i = 0
else:
i = j
if not (i in [0, j] and j == cols):
raise ValueError("Parse error, did not read all lines.")
elif format == self.FORMAT_COORDINATE and coo_matrix is None:
# Read sparse matrix to dense when coo_matrix is not available.
a = zeros((rows, cols), dtype=dtype)
line = 1
k = 0
while line:
line = stream.readline()
if not line or line.startswith(b'%'):
continue
l = line.split()
i, j = map(int, l[:2])
i, j = i-1, j-1
if is_integer:
aij = int(l[2])
elif is_complex:
aij = complex(*map(float, l[2:]))
else:
aij = float(l[2])
a[i, j] = aij
if has_symmetry and i != j:
if is_skew:
a[j, i] = -aij
elif is_herm:
a[j, i] = conj(aij)
else:
a[j, i] = aij
k = k + 1
if not k == entries:
ValueError("Did not read all entries")
elif format == self.FORMAT_COORDINATE:
# Read sparse COOrdinate format
if entries == 0:
# empty matrix
return coo_matrix((rows, cols), dtype=dtype)
I = zeros(entries, dtype='intc')
J = zeros(entries, dtype='intc')
if is_pattern:
V = ones(entries, dtype='int8')
elif is_integer:
V = zeros(entries, dtype='intp')
elif is_complex:
V = zeros(entries, dtype='complex')
else:
V = zeros(entries, dtype='float')
entry_number = 0
for line in stream:
if not line or line.startswith(b'%'):
continue
if entry_number+1 > entries:
raise ValueError("'entries' in header is smaller than "
"number of entries")
l = line.split()
I[entry_number], J[entry_number] = map(int, l[:2])
if not is_pattern:
if is_integer:
V[entry_number] = int(l[2])
elif is_complex:
V[entry_number] = complex(*map(float, l[2:]))
else:
V[entry_number] = float(l[2])
entry_number += 1
if entry_number < entries:
raise ValueError("'entries' in header is larger than "
"number of entries")
I -= 1 # adjust indices (base 1 -> base 0)
J -= 1
if has_symmetry:
mask = (I != J) # off diagonal mask
od_I = I[mask]
od_J = J[mask]
od_V = V[mask]
I = concatenate((I, od_J))
J = concatenate((J, od_I))
if is_skew:
od_V *= -1
elif is_herm:
od_V = od_V.conjugate()
V = concatenate((V, od_V))
a = coo_matrix((V, (I, J)), shape=(rows, cols), dtype=dtype)
else:
raise NotImplementedError(format)
return a
# ------------------------------------------------------------------------
def _write(self, stream, a, comment='', field=None, precision=None,
symmetry=None):
if isinstance(a, list) or isinstance(a, ndarray) or \
isinstance(a, tuple) or hasattr(a, '__array__'):
rep = self.FORMAT_ARRAY
a = asarray(a)
if len(a.shape) != 2:
raise ValueError('Expected 2 dimensional array')
rows, cols = a.shape
if field is not None:
if field == self.FIELD_INTEGER:
if not can_cast(a.dtype, 'intp'):
raise OverflowError("mmwrite does not support integer "
"dtypes larger than native 'intp'.")
a = a.astype('intp')
elif field == self.FIELD_REAL:
if a.dtype.char not in 'fd':
a = a.astype('d')
elif field == self.FIELD_COMPLEX:
if a.dtype.char not in 'FD':
a = a.astype('D')
else:
if not isspmatrix(a):
raise ValueError('unknown matrix type: %s' % type(a))
rep = 'coordinate'
rows, cols = a.shape
typecode = a.dtype.char
if precision is None:
if typecode in 'fF':
precision = 8
else:
precision = 16
if field is None:
kind = a.dtype.kind
if kind == 'i':
if not can_cast(a.dtype, 'intp'):
raise OverflowError("mmwrite does not support integer "
"dtypes larger than native 'intp'.")
field = 'integer'
elif kind == 'f':
field = 'real'
elif kind == 'c':
field = 'complex'
else:
raise TypeError('unexpected dtype kind ' + kind)
if symmetry is None:
symmetry = self._get_symmetry(a)
# validate rep, field, and symmetry
self.__class__._validate_format(rep)
self.__class__._validate_field(field)
self.__class__._validate_symmetry(symmetry)
# write initial header line
stream.write(asbytes('%%MatrixMarket matrix {0} {1} {2}\n'.format(rep,
field, symmetry)))
# write comments
for line in comment.split('\n'):
stream.write(asbytes('%%%s\n' % (line)))
template = self._field_template(field, precision)
# write dense format
if rep == self.FORMAT_ARRAY:
# write shape spec
stream.write(asbytes('%i %i\n' % (rows, cols)))
if field in (self.FIELD_INTEGER, self.FIELD_REAL):
if symmetry == self.SYMMETRY_GENERAL:
for j in range(cols):
for i in range(rows):
stream.write(asbytes(template % a[i, j]))
else:
for j in range(cols):
for i in range(j, rows):
stream.write(asbytes(template % a[i, j]))
elif field == self.FIELD_COMPLEX:
if symmetry == self.SYMMETRY_GENERAL:
for j in range(cols):
for i in range(rows):
aij = a[i, j]
stream.write(asbytes(template % (real(aij),
imag(aij))))
else:
for j in range(cols):
for i in range(j, rows):
aij = a[i, j]
stream.write(asbytes(template % (real(aij),
imag(aij))))
elif field == self.FIELD_PATTERN:
raise ValueError('pattern type inconsisted with dense format')
else:
raise TypeError('Unknown field type %s' % field)
# write sparse format
else:
coo = a.tocoo() # convert to COOrdinate format
# if symmetry format used, remove values above main diagonal
if symmetry != self.SYMMETRY_GENERAL:
lower_triangle_mask = coo.row >= coo.col
coo = coo_matrix((coo.data[lower_triangle_mask],
(coo.row[lower_triangle_mask],
coo.col[lower_triangle_mask])),
shape=coo.shape)
# write shape spec
stream.write(asbytes('%i %i %i\n' % (rows, cols, coo.nnz)))
template = self._field_template(field, precision-1)
if field == self.FIELD_PATTERN:
for r, c in zip(coo.row+1, coo.col+1):
stream.write(asbytes("%i %i\n" % (r, c)))
elif field in (self.FIELD_INTEGER, self.FIELD_REAL):
for r, c, d in zip(coo.row+1, coo.col+1, coo.data):
stream.write(asbytes(("%i %i " % (r, c)) +
(template % d)))
elif field == self.FIELD_COMPLEX:
for r, c, d in zip(coo.row+1, coo.col+1, coo.data):
stream.write(asbytes(("%i %i " % (r, c)) +
(template % (d.real, d.imag))))
else:
raise TypeError('Unknown field type %s' % field)
def _is_fromfile_compatible(stream):
"""
Check whether `stream` is compatible with numpy.fromfile.
Passing a gzipped file object to ``fromfile/fromstring`` doesn't work with
Python3.
"""
if sys.version_info[0] < 3:
return True
bad_cls = []
try:
import gzip
bad_cls.append(gzip.GzipFile)
except ImportError:
pass
try:
import bz2
bad_cls.append(bz2.BZ2File)
except ImportError:
pass
bad_cls = tuple(bad_cls)
return not isinstance(stream, bad_cls)
# -----------------------------------------------------------------------------
if __name__ == '__main__':
import time
for filename in sys.argv[1:]:
print('Reading', filename, '...', end=' ')
sys.stdout.flush()
t = time.time()
mmread(filename)
print('took %s seconds' % (time.time() - t))
| bsd-3-clause |
RoboCupULaval/StrategyIA | ai/GameDomainObjects/ball.py | 1 | 1041 | # Under MIT License, see LICENSE.txt
from typing import Dict
from Util import Position
class Ball:
def __init__(self, position=Position()):
self._position = position
self._velocity = Position()
def update(self, new_dict: Dict):
self.position = new_dict['position']
self.velocity = new_dict['velocity']
def is_moving_fast(self, fast_speed = 600.0): # mm/s
return fast_speed < self.velocity.norm
def is_mobile(self, immobile_speed = 300.0): # mm/s
return immobile_speed < self.velocity.norm
def is_immobile(self):
return not self.is_mobile()
@property
def position(self) -> Position:
return self._position
@position.setter
def position(self, value):
assert isinstance(value, Position)
self._position = value
@property
def velocity(self) -> Position:
return self._velocity
@velocity.setter
def velocity(self, value):
assert isinstance(value, Position)
self._velocity = value
| mit |
dkubiak789/odoo | addons/website_blog/__init__.py | 373 | 1036 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import controllers
import models
import wizard
| agpl-3.0 |
GhostThrone/django | tests/gis_tests/distapp/tests.py | 5 | 34623 | from __future__ import unicode_literals
from django.contrib.gis.db.models.functions import (
Area, Distance, Length, Perimeter, Transform,
)
from django.contrib.gis.geos import GEOSGeometry, LineString, Point
from django.contrib.gis.measure import D # alias for Distance
from django.db import connection
from django.db.models import F, Q
from django.test import TestCase, ignore_warnings, skipUnlessDBFeature
from django.utils.deprecation import RemovedInDjango20Warning
from ..utils import no_oracle, oracle, postgis, spatialite
from .models import (
AustraliaCity, CensusZipcode, Interstate, SouthTexasCity, SouthTexasCityFt,
SouthTexasInterstate, SouthTexasZipcode,
)
@skipUnlessDBFeature("gis_enabled")
class DistanceTest(TestCase):
fixtures = ['initial']
def setUp(self):
# A point we are testing distances with -- using a WGS84
# coordinate that'll be implicitly transformed to that to
# the coordinate system of the field, EPSG:32140 (Texas South Central
# w/units in meters)
self.stx_pnt = GEOSGeometry('POINT (-95.370401017314293 29.704867409475465)', 4326)
# Another one for Australia
self.au_pnt = GEOSGeometry('POINT (150.791 -34.4919)', 4326)
def get_names(self, qs):
cities = [c.name for c in qs]
cities.sort()
return cities
def test_init(self):
"""
Test initialization of distance models.
"""
self.assertEqual(9, SouthTexasCity.objects.count())
self.assertEqual(9, SouthTexasCityFt.objects.count())
self.assertEqual(11, AustraliaCity.objects.count())
self.assertEqual(4, SouthTexasZipcode.objects.count())
self.assertEqual(4, CensusZipcode.objects.count())
self.assertEqual(1, Interstate.objects.count())
self.assertEqual(1, SouthTexasInterstate.objects.count())
@skipUnlessDBFeature("supports_dwithin_lookup")
def test_dwithin(self):
"""
Test the `dwithin` lookup type.
"""
# Distances -- all should be equal (except for the
# degree/meter pair in au_cities, that's somewhat
# approximate).
tx_dists = [(7000, 22965.83), D(km=7), D(mi=4.349)]
au_dists = [(0.5, 32000), D(km=32), D(mi=19.884)]
# Expected cities for Australia and Texas.
tx_cities = ['Downtown Houston', 'Southside Place']
au_cities = ['Mittagong', 'Shellharbour', 'Thirroul', 'Wollongong']
# Performing distance queries on two projected coordinate systems one
# with units in meters and the other in units of U.S. survey feet.
for dist in tx_dists:
if isinstance(dist, tuple):
dist1, dist2 = dist
else:
dist1 = dist2 = dist
qs1 = SouthTexasCity.objects.filter(point__dwithin=(self.stx_pnt, dist1))
qs2 = SouthTexasCityFt.objects.filter(point__dwithin=(self.stx_pnt, dist2))
for qs in qs1, qs2:
self.assertEqual(tx_cities, self.get_names(qs))
# Now performing the `dwithin` queries on a geodetic coordinate system.
for dist in au_dists:
if isinstance(dist, D) and not oracle:
type_error = True
else:
type_error = False
if isinstance(dist, tuple):
if oracle:
dist = dist[1]
else:
dist = dist[0]
# Creating the query set.
qs = AustraliaCity.objects.order_by('name')
if type_error:
# A ValueError should be raised on PostGIS when trying to pass
# Distance objects into a DWithin query using a geodetic field.
self.assertRaises(ValueError, AustraliaCity.objects.filter(point__dwithin=(self.au_pnt, dist)).count)
else:
self.assertListEqual(au_cities, self.get_names(qs.filter(point__dwithin=(self.au_pnt, dist))))
@skipUnlessDBFeature("has_distance_method")
@ignore_warnings(category=RemovedInDjango20Warning)
def test_distance_projected(self):
"""
Test the `distance` GeoQuerySet method on projected coordinate systems.
"""
# The point for La Grange, TX
lagrange = GEOSGeometry('POINT(-96.876369 29.905320)', 4326)
# Reference distances in feet and in meters. Got these values from
# using the provided raw SQL statements.
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 32140))
# FROM distapp_southtexascity;
m_distances = [147075.069813, 139630.198056, 140888.552826,
138809.684197, 158309.246259, 212183.594374,
70870.188967, 165337.758878, 139196.085105]
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 2278))
# FROM distapp_southtexascityft;
# Oracle 11 thinks this is not a projected coordinate system, so it's
# not tested.
ft_distances = [482528.79154625, 458103.408123001, 462231.860397575,
455411.438904354, 519386.252102563, 696139.009211594,
232513.278304279, 542445.630586414, 456679.155883207]
# Testing using different variations of parameters and using models
# with different projected coordinate systems.
dist1 = SouthTexasCity.objects.distance(lagrange, field_name='point').order_by('id')
dist2 = SouthTexasCity.objects.distance(lagrange).order_by('id') # Using GEOSGeometry parameter
if spatialite or oracle:
dist_qs = [dist1, dist2]
else:
dist3 = SouthTexasCityFt.objects.distance(lagrange.ewkt).order_by('id') # Using EWKT string parameter.
dist4 = SouthTexasCityFt.objects.distance(lagrange).order_by('id')
dist_qs = [dist1, dist2, dist3, dist4]
# Original query done on PostGIS, have to adjust AlmostEqual tolerance
# for Oracle.
tol = 2 if oracle else 5
# Ensuring expected distances are returned for each distance queryset.
for qs in dist_qs:
for i, c in enumerate(qs):
self.assertAlmostEqual(m_distances[i], c.distance.m, tol)
self.assertAlmostEqual(ft_distances[i], c.distance.survey_ft, tol)
@skipUnlessDBFeature("has_distance_method", "supports_distance_geodetic")
@ignore_warnings(category=RemovedInDjango20Warning)
def test_distance_geodetic(self):
"""
Test the `distance` GeoQuerySet method on geodetic coordinate systems.
"""
tol = 2 if oracle else 5
# Testing geodetic distance calculation with a non-point geometry
# (a LineString of Wollongong and Shellharbour coords).
ls = LineString(((150.902, -34.4245), (150.87, -34.5789)))
# Reference query:
# SELECT ST_distance_sphere(point, ST_GeomFromText('LINESTRING(150.9020 -34.4245,150.8700 -34.5789)', 4326))
# FROM distapp_australiacity ORDER BY name;
distances = [1120954.92533513, 140575.720018241, 640396.662906304,
60580.9693849269, 972807.955955075, 568451.8357838,
40435.4335201384, 0, 68272.3896586844, 12375.0643697706, 0]
qs = AustraliaCity.objects.distance(ls).order_by('name')
for city, distance in zip(qs, distances):
# Testing equivalence to within a meter.
self.assertAlmostEqual(distance, city.distance.m, 0)
# Got the reference distances using the raw SQL statements:
# SELECT ST_distance_spheroid(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326),
# 'SPHEROID["WGS 84",6378137.0,298.257223563]') FROM distapp_australiacity WHERE (NOT (id = 11));
# SELECT ST_distance_sphere(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326))
# FROM distapp_australiacity WHERE (NOT (id = 11)); st_distance_sphere
if connection.ops.postgis and connection.ops.proj_version_tuple() >= (4, 7, 0):
# PROJ.4 versions 4.7+ have updated datums, and thus different
# distance values.
spheroid_distances = [60504.0628957201, 77023.9489850262, 49154.8867574404,
90847.4358768573, 217402.811919332, 709599.234564757,
640011.483550888, 7772.00667991925, 1047861.78619339,
1165126.55236034]
sphere_distances = [60580.9693849267, 77144.0435286473, 49199.4415344719,
90804.7533823494, 217713.384600405, 709134.127242793,
639828.157159169, 7786.82949717788, 1049204.06569028,
1162623.7238134]
else:
spheroid_distances = [60504.0628825298, 77023.948962654, 49154.8867507115,
90847.435881812, 217402.811862568, 709599.234619957,
640011.483583758, 7772.00667666425, 1047861.7859506,
1165126.55237647]
sphere_distances = [60580.7612632291, 77143.7785056615, 49199.2725132184,
90804.4414289463, 217712.63666124, 709131.691061906,
639825.959074112, 7786.80274606706, 1049200.46122281,
1162619.7297006]
# Testing with spheroid distances first.
hillsdale = AustraliaCity.objects.get(name='Hillsdale')
qs = AustraliaCity.objects.exclude(id=hillsdale.id).distance(hillsdale.point, spheroid=True).order_by('id')
for i, c in enumerate(qs):
self.assertAlmostEqual(spheroid_distances[i], c.distance.m, tol)
if postgis:
# PostGIS uses sphere-only distances by default, testing these as well.
qs = AustraliaCity.objects.exclude(id=hillsdale.id).distance(hillsdale.point).order_by('id')
for i, c in enumerate(qs):
self.assertAlmostEqual(sphere_distances[i], c.distance.m, tol)
@no_oracle # Oracle already handles geographic distance calculation.
@skipUnlessDBFeature("has_distance_method")
@ignore_warnings(category=RemovedInDjango20Warning)
def test_distance_transform(self):
"""
Test the `distance` GeoQuerySet method used with `transform` on a geographic field.
"""
# We'll be using a Polygon (created by buffering the centroid
# of 77005 to 100m) -- which aren't allowed in geographic distance
# queries normally, however our field has been transformed to
# a non-geographic system.
z = SouthTexasZipcode.objects.get(name='77005')
# Reference query:
# SELECT ST_Distance(ST_Transform("distapp_censuszipcode"."poly", 32140),
# ST_GeomFromText('<buffer_wkt>', 32140))
# FROM "distapp_censuszipcode";
dists_m = [3553.30384972258, 1243.18391525602, 2186.15439472242]
# Having our buffer in the SRID of the transformation and of the field
# -- should get the same results. The first buffer has no need for
# transformation SQL because it is the same SRID as what was given
# to `transform()`. The second buffer will need to be transformed,
# however.
buf1 = z.poly.centroid.buffer(100)
buf2 = buf1.transform(4269, clone=True)
ref_zips = ['77002', '77025', '77401']
for buf in [buf1, buf2]:
qs = CensusZipcode.objects.exclude(name='77005').transform(32140).distance(buf).order_by('name')
self.assertListEqual(ref_zips, self.get_names(qs))
for i, z in enumerate(qs):
self.assertAlmostEqual(z.distance.m, dists_m[i], 5)
@skipUnlessDBFeature("supports_distances_lookups")
def test_distance_lookups(self):
"""
Test the `distance_lt`, `distance_gt`, `distance_lte`, and `distance_gte` lookup types.
"""
# Retrieving the cities within a 20km 'donut' w/a 7km radius 'hole'
# (thus, Houston and Southside place will be excluded as tested in
# the `test02_dwithin` above).
qs1 = SouthTexasCity.objects.filter(point__distance_gte=(self.stx_pnt, D(km=7))).filter(
point__distance_lte=(self.stx_pnt, D(km=20)),
)
# Can't determine the units on SpatiaLite from PROJ.4 string, and
# Oracle 11 incorrectly thinks it is not projected.
if spatialite or oracle:
dist_qs = (qs1,)
else:
qs2 = SouthTexasCityFt.objects.filter(point__distance_gte=(self.stx_pnt, D(km=7))).filter(
point__distance_lte=(self.stx_pnt, D(km=20)),
)
dist_qs = (qs1, qs2)
for qs in dist_qs:
cities = self.get_names(qs)
self.assertEqual(cities, ['Bellaire', 'Pearland', 'West University Place'])
# Doing a distance query using Polygons instead of a Point.
z = SouthTexasZipcode.objects.get(name='77005')
qs = SouthTexasZipcode.objects.exclude(name='77005').filter(poly__distance_lte=(z.poly, D(m=275)))
self.assertEqual(['77025', '77401'], self.get_names(qs))
# If we add a little more distance 77002 should be included.
qs = SouthTexasZipcode.objects.exclude(name='77005').filter(poly__distance_lte=(z.poly, D(m=300)))
self.assertEqual(['77002', '77025', '77401'], self.get_names(qs))
@skipUnlessDBFeature("supports_distances_lookups", "supports_distance_geodetic")
def test_geodetic_distance_lookups(self):
"""
Test distance lookups on geodetic coordinate systems.
"""
# Line is from Canberra to Sydney. Query is for all other cities within
# a 100km of that line (which should exclude only Hobart & Adelaide).
line = GEOSGeometry('LINESTRING(144.9630 -37.8143,151.2607 -33.8870)', 4326)
dist_qs = AustraliaCity.objects.filter(point__distance_lte=(line, D(km=100)))
self.assertEqual(9, dist_qs.count())
self.assertEqual(['Batemans Bay', 'Canberra', 'Hillsdale',
'Melbourne', 'Mittagong', 'Shellharbour',
'Sydney', 'Thirroul', 'Wollongong'],
self.get_names(dist_qs))
# Too many params (4 in this case) should raise a ValueError.
queryset = AustraliaCity.objects.filter(point__distance_lte=('POINT(5 23)', D(km=100), 'spheroid', '4'))
self.assertRaises(ValueError, len, queryset)
# Not enough params should raise a ValueError.
self.assertRaises(ValueError, len,
AustraliaCity.objects.filter(point__distance_lte=('POINT(5 23)',)))
# Getting all cities w/in 550 miles of Hobart.
hobart = AustraliaCity.objects.get(name='Hobart')
qs = AustraliaCity.objects.exclude(name='Hobart').filter(point__distance_lte=(hobart.point, D(mi=550)))
cities = self.get_names(qs)
self.assertEqual(cities, ['Batemans Bay', 'Canberra', 'Melbourne'])
# Cities that are either really close or really far from Wollongong --
# and using different units of distance.
wollongong = AustraliaCity.objects.get(name='Wollongong')
d1, d2 = D(yd=19500), D(nm=400) # Yards (~17km) & Nautical miles.
# Normal geodetic distance lookup (uses `distance_sphere` on PostGIS.
gq1 = Q(point__distance_lte=(wollongong.point, d1))
gq2 = Q(point__distance_gte=(wollongong.point, d2))
qs1 = AustraliaCity.objects.exclude(name='Wollongong').filter(gq1 | gq2)
# Geodetic distance lookup but telling GeoDjango to use `distance_spheroid`
# instead (we should get the same results b/c accuracy variance won't matter
# in this test case).
querysets = [qs1]
if connection.features.has_distance_spheroid_method:
gq3 = Q(point__distance_lte=(wollongong.point, d1, 'spheroid'))
gq4 = Q(point__distance_gte=(wollongong.point, d2, 'spheroid'))
qs2 = AustraliaCity.objects.exclude(name='Wollongong').filter(gq3 | gq4)
querysets.append(qs2)
for qs in querysets:
cities = self.get_names(qs)
self.assertEqual(cities, ['Adelaide', 'Hobart', 'Shellharbour', 'Thirroul'])
@skipUnlessDBFeature("supports_distances_lookups")
def test_distance_lookups_with_expression_rhs(self):
qs = SouthTexasCity.objects.filter(
point__distance_lte=(self.stx_pnt, F('radius')),
).order_by('name')
self.assertEqual(
self.get_names(qs),
['Bellaire', 'Downtown Houston', 'Southside Place', 'West University Place']
)
# With a combined expression
qs = SouthTexasCity.objects.filter(
point__distance_lte=(self.stx_pnt, F('radius') * 2),
).order_by('name')
self.assertEqual(len(qs), 5)
self.assertIn('Pearland', self.get_names(qs))
# With spheroid param
if connection.features.supports_distance_geodetic:
hobart = AustraliaCity.objects.get(name='Hobart')
qs = AustraliaCity.objects.filter(
point__distance_lte=(hobart.point, F('radius') * 70, 'spheroid'),
).order_by('name')
self.assertEqual(self.get_names(qs), ['Canberra', 'Hobart', 'Melbourne'])
@skipUnlessDBFeature("has_area_method")
@ignore_warnings(category=RemovedInDjango20Warning)
def test_area(self):
"""
Test the `area` GeoQuerySet method.
"""
# Reference queries:
# SELECT ST_Area(poly) FROM distapp_southtexaszipcode;
area_sq_m = [5437908.90234375, 10183031.4389648, 11254471.0073242, 9881708.91772461]
# Tolerance has to be lower for Oracle
tol = 2
for i, z in enumerate(SouthTexasZipcode.objects.order_by('name').area()):
self.assertAlmostEqual(area_sq_m[i], z.area.sq_m, tol)
@skipUnlessDBFeature("has_length_method")
@ignore_warnings(category=RemovedInDjango20Warning)
def test_length(self):
"""
Test the `length` GeoQuerySet method.
"""
# Reference query (should use `length_spheroid`).
# SELECT ST_length_spheroid(ST_GeomFromText('<wkt>', 4326) 'SPHEROID["WGS 84",6378137,298.257223563,
# AUTHORITY["EPSG","7030"]]');
len_m1 = 473504.769553813
len_m2 = 4617.668
if connection.features.supports_distance_geodetic:
qs = Interstate.objects.length()
tol = 2 if oracle else 3
self.assertAlmostEqual(len_m1, qs[0].length.m, tol)
else:
# Does not support geodetic coordinate systems.
self.assertRaises(ValueError, Interstate.objects.length)
# Now doing length on a projected coordinate system.
i10 = SouthTexasInterstate.objects.length().get(name='I-10')
self.assertAlmostEqual(len_m2, i10.length.m, 2)
@skipUnlessDBFeature("has_perimeter_method")
@ignore_warnings(category=RemovedInDjango20Warning)
def test_perimeter(self):
"""
Test the `perimeter` GeoQuerySet method.
"""
# Reference query:
# SELECT ST_Perimeter(distapp_southtexaszipcode.poly) FROM distapp_southtexaszipcode;
perim_m = [18404.3550889361, 15627.2108551001, 20632.5588368978, 17094.5996143697]
tol = 2 if oracle else 7
for i, z in enumerate(SouthTexasZipcode.objects.order_by('name').perimeter()):
self.assertAlmostEqual(perim_m[i], z.perimeter.m, tol)
# Running on points; should return 0.
for i, c in enumerate(SouthTexasCity.objects.perimeter(model_att='perim')):
self.assertEqual(0, c.perim.m)
@skipUnlessDBFeature("has_area_method", "has_distance_method")
@ignore_warnings(category=RemovedInDjango20Warning)
def test_measurement_null_fields(self):
"""
Test the measurement GeoQuerySet methods on fields with NULL values.
"""
# Creating SouthTexasZipcode w/NULL value.
SouthTexasZipcode.objects.create(name='78212')
# Performing distance/area queries against the NULL PolygonField,
# and ensuring the result of the operations is None.
htown = SouthTexasCity.objects.get(name='Downtown Houston')
z = SouthTexasZipcode.objects.distance(htown.point).area().get(name='78212')
self.assertIsNone(z.distance)
self.assertIsNone(z.area)
@skipUnlessDBFeature("has_distance_method")
@ignore_warnings(category=RemovedInDjango20Warning)
def test_distance_order_by(self):
qs = SouthTexasCity.objects.distance(Point(3, 3)).order_by(
'distance'
).values_list('name', flat=True).filter(name__in=('San Antonio', 'Pearland'))
self.assertQuerysetEqual(qs, ['San Antonio', 'Pearland'], lambda x: x)
'''
=============================
Distance functions on PostGIS
=============================
| Projected Geometry | Lon/lat Geometry | Geography (4326)
ST_Distance(geom1, geom2) | OK (meters) | :-( (degrees) | OK (meters)
ST_Distance(geom1, geom2, use_spheroid=False) | N/A | N/A | OK (meters), less accurate, quick
Distance_Sphere(geom1, geom2) | N/A | OK (meters) | N/A
Distance_Spheroid(geom1, geom2, spheroid) | N/A | OK (meters) | N/A
================================
Distance functions on Spatialite
================================
| Projected Geometry | Lon/lat Geometry
ST_Distance(geom1, geom2) | OK (meters) | N/A
ST_Distance(geom1, geom2, use_ellipsoid=True) | N/A | OK (meters)
ST_Distance(geom1, geom2, use_ellipsoid=False) | N/A | OK (meters), less accurate, quick
''' # NOQA
@skipUnlessDBFeature("gis_enabled")
class DistanceFunctionsTests(TestCase):
fixtures = ['initial']
@skipUnlessDBFeature("has_Area_function")
def test_area(self):
# Reference queries:
# SELECT ST_Area(poly) FROM distapp_southtexaszipcode;
area_sq_m = [5437908.90234375, 10183031.4389648, 11254471.0073242, 9881708.91772461]
# Tolerance has to be lower for Oracle
tol = 2
for i, z in enumerate(SouthTexasZipcode.objects.annotate(area=Area('poly')).order_by('name')):
# MySQL is returning a raw float value
self.assertAlmostEqual(area_sq_m[i], z.area.sq_m if hasattr(z.area, 'sq_m') else z.area, tol)
@skipUnlessDBFeature("has_Distance_function")
def test_distance_simple(self):
"""
Test a simple distance query, with projected coordinates and without
transformation.
"""
lagrange = GEOSGeometry('POINT(805066.295722839 4231496.29461335)', 32140)
houston = SouthTexasCity.objects.annotate(dist=Distance('point', lagrange)).order_by('id').first()
tol = 2 if oracle else 5
self.assertAlmostEqual(
houston.dist.m if hasattr(houston.dist, 'm') else houston.dist,
147075.069813,
tol
)
@skipUnlessDBFeature("has_Distance_function", "has_Transform_function")
def test_distance_projected(self):
"""
Test the `Distance` function on projected coordinate systems.
"""
# The point for La Grange, TX
lagrange = GEOSGeometry('POINT(-96.876369 29.905320)', 4326)
# Reference distances in feet and in meters. Got these values from
# using the provided raw SQL statements.
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 32140))
# FROM distapp_southtexascity;
m_distances = [147075.069813, 139630.198056, 140888.552826,
138809.684197, 158309.246259, 212183.594374,
70870.188967, 165337.758878, 139196.085105]
# SELECT ST_Distance(point, ST_Transform(ST_GeomFromText('POINT(-96.876369 29.905320)', 4326), 2278))
# FROM distapp_southtexascityft;
# Oracle 11 thinks this is not a projected coordinate system, so it's
# not tested.
ft_distances = [482528.79154625, 458103.408123001, 462231.860397575,
455411.438904354, 519386.252102563, 696139.009211594,
232513.278304279, 542445.630586414, 456679.155883207]
# Testing using different variations of parameters and using models
# with different projected coordinate systems.
dist1 = SouthTexasCity.objects.annotate(distance=Distance('point', lagrange)).order_by('id')
if spatialite or oracle:
dist_qs = [dist1]
else:
dist2 = SouthTexasCityFt.objects.annotate(distance=Distance('point', lagrange)).order_by('id')
# Using EWKT string parameter.
dist3 = SouthTexasCityFt.objects.annotate(distance=Distance('point', lagrange.ewkt)).order_by('id')
dist_qs = [dist1, dist2, dist3]
# Original query done on PostGIS, have to adjust AlmostEqual tolerance
# for Oracle.
tol = 2 if oracle else 5
# Ensuring expected distances are returned for each distance queryset.
for qs in dist_qs:
for i, c in enumerate(qs):
self.assertAlmostEqual(m_distances[i], c.distance.m, tol)
self.assertAlmostEqual(ft_distances[i], c.distance.survey_ft, tol)
@skipUnlessDBFeature("has_Distance_function", "supports_distance_geodetic")
def test_distance_geodetic(self):
"""
Test the `Distance` function on geodetic coordinate systems.
"""
# Testing geodetic distance calculation with a non-point geometry
# (a LineString of Wollongong and Shellharbour coords).
ls = LineString(((150.902, -34.4245), (150.87, -34.5789)), srid=4326)
# Reference query:
# SELECT ST_distance_sphere(point, ST_GeomFromText('LINESTRING(150.9020 -34.4245,150.8700 -34.5789)', 4326))
# FROM distapp_australiacity ORDER BY name;
distances = [1120954.92533513, 140575.720018241, 640396.662906304,
60580.9693849269, 972807.955955075, 568451.8357838,
40435.4335201384, 0, 68272.3896586844, 12375.0643697706, 0]
qs = AustraliaCity.objects.annotate(distance=Distance('point', ls)).order_by('name')
for city, distance in zip(qs, distances):
# Testing equivalence to within a meter.
self.assertAlmostEqual(distance, city.distance.m, 0)
@skipUnlessDBFeature("has_Distance_function", "supports_distance_geodetic")
def test_distance_geodetic_spheroid(self):
tol = 2 if oracle else 5
# Got the reference distances using the raw SQL statements:
# SELECT ST_distance_spheroid(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326),
# 'SPHEROID["WGS 84",6378137.0,298.257223563]') FROM distapp_australiacity WHERE (NOT (id = 11));
# SELECT ST_distance_sphere(point, ST_GeomFromText('POINT(151.231341 -33.952685)', 4326))
# FROM distapp_australiacity WHERE (NOT (id = 11)); st_distance_sphere
if connection.ops.postgis and connection.ops.proj_version_tuple() >= (4, 7, 0):
# PROJ.4 versions 4.7+ have updated datums, and thus different
# distance values.
spheroid_distances = [60504.0628957201, 77023.9489850262, 49154.8867574404,
90847.4358768573, 217402.811919332, 709599.234564757,
640011.483550888, 7772.00667991925, 1047861.78619339,
1165126.55236034]
sphere_distances = [60580.9693849267, 77144.0435286473, 49199.4415344719,
90804.7533823494, 217713.384600405, 709134.127242793,
639828.157159169, 7786.82949717788, 1049204.06569028,
1162623.7238134]
else:
spheroid_distances = [60504.0628825298, 77023.948962654, 49154.8867507115,
90847.435881812, 217402.811862568, 709599.234619957,
640011.483583758, 7772.00667666425, 1047861.7859506,
1165126.55237647]
sphere_distances = [60580.7612632291, 77143.7785056615, 49199.2725132184,
90804.4414289463, 217712.63666124, 709131.691061906,
639825.959074112, 7786.80274606706, 1049200.46122281,
1162619.7297006]
# Testing with spheroid distances first.
hillsdale = AustraliaCity.objects.get(name='Hillsdale')
qs = AustraliaCity.objects.exclude(id=hillsdale.id).annotate(
distance=Distance('point', hillsdale.point, spheroid=True)
).order_by('id')
for i, c in enumerate(qs):
self.assertAlmostEqual(spheroid_distances[i], c.distance.m, tol)
if postgis:
# PostGIS uses sphere-only distances by default, testing these as well.
qs = AustraliaCity.objects.exclude(id=hillsdale.id).annotate(
distance=Distance('point', hillsdale.point)
).order_by('id')
for i, c in enumerate(qs):
self.assertAlmostEqual(sphere_distances[i], c.distance.m, tol)
@no_oracle # Oracle already handles geographic distance calculation.
@skipUnlessDBFeature("has_Distance_function", 'has_Transform_function')
def test_distance_transform(self):
"""
Test the `Distance` function used with `Transform` on a geographic field.
"""
# We'll be using a Polygon (created by buffering the centroid
# of 77005 to 100m) -- which aren't allowed in geographic distance
# queries normally, however our field has been transformed to
# a non-geographic system.
z = SouthTexasZipcode.objects.get(name='77005')
# Reference query:
# SELECT ST_Distance(ST_Transform("distapp_censuszipcode"."poly", 32140),
# ST_GeomFromText('<buffer_wkt>', 32140))
# FROM "distapp_censuszipcode";
dists_m = [3553.30384972258, 1243.18391525602, 2186.15439472242]
# Having our buffer in the SRID of the transformation and of the field
# -- should get the same results. The first buffer has no need for
# transformation SQL because it is the same SRID as what was given
# to `transform()`. The second buffer will need to be transformed,
# however.
buf1 = z.poly.centroid.buffer(100)
buf2 = buf1.transform(4269, clone=True)
ref_zips = ['77002', '77025', '77401']
for buf in [buf1, buf2]:
qs = CensusZipcode.objects.exclude(name='77005').annotate(
distance=Distance(Transform('poly', 32140), buf)
).order_by('name')
self.assertEqual(ref_zips, sorted([c.name for c in qs]))
for i, z in enumerate(qs):
self.assertAlmostEqual(z.distance.m, dists_m[i], 5)
@skipUnlessDBFeature("has_Distance_function")
def test_distance_order_by(self):
qs = SouthTexasCity.objects.annotate(distance=Distance('point', Point(3, 3, srid=32140))).order_by(
'distance'
).values_list('name', flat=True).filter(name__in=('San Antonio', 'Pearland'))
self.assertQuerysetEqual(qs, ['San Antonio', 'Pearland'], lambda x: x)
@skipUnlessDBFeature("has_Length_function")
def test_length(self):
"""
Test the `Length` function.
"""
# Reference query (should use `length_spheroid`).
# SELECT ST_length_spheroid(ST_GeomFromText('<wkt>', 4326) 'SPHEROID["WGS 84",6378137,298.257223563,
# AUTHORITY["EPSG","7030"]]');
len_m1 = 473504.769553813
len_m2 = 4617.668
if connection.features.supports_length_geodetic:
qs = Interstate.objects.annotate(length=Length('path'))
tol = 2 if oracle else 3
self.assertAlmostEqual(len_m1, qs[0].length.m, tol)
# TODO: test with spheroid argument (True and False)
else:
# Does not support geodetic coordinate systems.
with self.assertRaises(NotImplementedError):
list(Interstate.objects.annotate(length=Length('path')))
# Now doing length on a projected coordinate system.
i10 = SouthTexasInterstate.objects.annotate(length=Length('path')).get(name='I-10')
self.assertAlmostEqual(len_m2, i10.length.m if isinstance(i10.length, D) else i10.length, 2)
self.assertTrue(
SouthTexasInterstate.objects.annotate(length=Length('path')).filter(length__gt=4000).exists()
)
@skipUnlessDBFeature("has_Perimeter_function")
def test_perimeter(self):
"""
Test the `Perimeter` function.
"""
# Reference query:
# SELECT ST_Perimeter(distapp_southtexaszipcode.poly) FROM distapp_southtexaszipcode;
perim_m = [18404.3550889361, 15627.2108551001, 20632.5588368978, 17094.5996143697]
tol = 2 if oracle else 7
qs = SouthTexasZipcode.objects.annotate(perimeter=Perimeter('poly')).order_by('name')
for i, z in enumerate(qs):
self.assertAlmostEqual(perim_m[i], z.perimeter.m, tol)
# Running on points; should return 0.
qs = SouthTexasCity.objects.annotate(perim=Perimeter('point'))
for city in qs:
self.assertEqual(0, city.perim.m)
@skipUnlessDBFeature("supports_null_geometries", "has_Area_function", "has_Distance_function")
def test_measurement_null_fields(self):
"""
Test the measurement functions on fields with NULL values.
"""
# Creating SouthTexasZipcode w/NULL value.
SouthTexasZipcode.objects.create(name='78212')
# Performing distance/area queries against the NULL PolygonField,
# and ensuring the result of the operations is None.
htown = SouthTexasCity.objects.get(name='Downtown Houston')
z = SouthTexasZipcode.objects.annotate(
distance=Distance('poly', htown.point), area=Area('poly')
).get(name='78212')
self.assertIsNone(z.distance)
self.assertIsNone(z.area)
| bsd-3-clause |
archetipo/stock-logistics-workflow | stock_picking_backorder_strategy/models/stock.py | 15 | 1995 | # -*- coding: utf-8 -*-
##############################################################################
#
# Authors: Laetitia Gangloff
# Copyright (c) 2015 Acsone SA/NV (http://www.acsone.eu)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import api, fields, models
from openerp.tools.translate import _
class StockPicking(models.Model):
_inherit = "stock.picking"
@api.model
def _create_backorder(self, picking, backorder_moves=None):
if backorder_moves is None:
backorder_moves = []
res = False
if picking.picking_type_id.backorder_strategy == 'no_create':
res = True
else:
res = super(StockPicking, self)._create_backorder(
picking, backorder_moves=backorder_moves)
if res and picking.picking_type_id.backorder_strategy == 'cancel':
self.browse(res).action_cancel()
return res
class StockPickingType(models.Model):
_inherit = "stock.picking.type"
backorder_strategy = fields.Selection(
[('create', _('Create')), ('no_create', _('No create')),
('cancel', _('Cancel'))], string='Backorder Strategy',
default='create', help="Define what to do with backorder",
required=True)
| agpl-3.0 |
aliyun/oss-ftp | python27/win32/Lib/email/iterators.py | 415 | 2202 | # Copyright (C) 2001-2006 Python Software Foundation
# Author: Barry Warsaw
# Contact: email-sig@python.org
"""Various types of useful iterators and generators."""
__all__ = [
'body_line_iterator',
'typed_subpart_iterator',
'walk',
# Do not include _structure() since it's part of the debugging API.
]
import sys
from cStringIO import StringIO
# This function will become a method of the Message class
def walk(self):
"""Walk over the message tree, yielding each subpart.
The walk is performed in depth-first order. This method is a
generator.
"""
yield self
if self.is_multipart():
for subpart in self.get_payload():
for subsubpart in subpart.walk():
yield subsubpart
# These two functions are imported into the Iterators.py interface module.
def body_line_iterator(msg, decode=False):
"""Iterate over the parts, returning string payloads line-by-line.
Optional decode (default False) is passed through to .get_payload().
"""
for subpart in msg.walk():
payload = subpart.get_payload(decode=decode)
if isinstance(payload, basestring):
for line in StringIO(payload):
yield line
def typed_subpart_iterator(msg, maintype='text', subtype=None):
"""Iterate over the subparts with a given MIME type.
Use `maintype' as the main MIME type to match against; this defaults to
"text". Optional `subtype' is the MIME subtype to match against; if
omitted, only the main type is matched.
"""
for subpart in msg.walk():
if subpart.get_content_maintype() == maintype:
if subtype is None or subpart.get_content_subtype() == subtype:
yield subpart
def _structure(msg, fp=None, level=0, include_default=False):
"""A handy debugging aid"""
if fp is None:
fp = sys.stdout
tab = ' ' * (level * 4)
print >> fp, tab + msg.get_content_type(),
if include_default:
print >> fp, '[%s]' % msg.get_default_type()
else:
print >> fp
if msg.is_multipart():
for subpart in msg.get_payload():
_structure(subpart, fp, level+1, include_default)
| mit |
LIS/lis-tempest | tempest/tests/lib/services/compute/test_server_groups_client.py | 3 | 3290 | # Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import httplib2
from oslotest import mockpatch
from tempest.tests.lib import fake_auth_provider
from tempest.lib.services.compute import server_groups_client
from tempest.tests.lib.services.compute import base
class TestServerGroupsClient(base.BaseComputeServiceTest):
server_group = {
"id": "5bbcc3c4-1da2-4437-a48a-66f15b1b13f9",
"name": "test",
"policies": ["anti-affinity"],
"members": [],
"metadata": {}}
def setUp(self):
super(TestServerGroupsClient, self).setUp()
fake_auth = fake_auth_provider.FakeAuthProvider()
self.client = server_groups_client.ServerGroupsClient(
fake_auth, 'compute', 'regionOne')
def _test_create_server_group(self, bytes_body=False):
expected = {"server_group": TestServerGroupsClient.server_group}
self.check_service_client_function(
self.client.create_server_group,
'tempest.lib.common.rest_client.RestClient.post', expected,
bytes_body, name='fake-group', policies=['affinity'])
def test_create_server_group_str_body(self):
self._test_create_server_group(bytes_body=False)
def test_create_server_group_byte_body(self):
self._test_create_server_group(bytes_body=True)
def test_delete_server_group(self):
response = (httplib2.Response({'status': 204}), None)
self.useFixture(mockpatch.Patch(
'tempest.lib.common.rest_client.RestClient.delete',
return_value=response))
self.client.delete_server_group('fake-group')
def _test_list_server_groups(self, bytes_body=False):
expected = {"server_groups": [TestServerGroupsClient.server_group]}
self.check_service_client_function(
self.client.list_server_groups,
'tempest.lib.common.rest_client.RestClient.get',
expected, bytes_body)
def test_list_server_groups_str_body(self):
self._test_list_server_groups(bytes_body=False)
def test_list_server_groups_byte_body(self):
self._test_list_server_groups(bytes_body=True)
def _test_show_server_group(self, bytes_body=False):
expected = {"server_group": TestServerGroupsClient.server_group}
self.check_service_client_function(
self.client.show_server_group,
'tempest.lib.common.rest_client.RestClient.get',
expected, bytes_body,
server_group_id='5bbcc3c4-1da2-4437-a48a-66f15b1b13f9')
def test_show_server_group_str_body(self):
self._test_show_server_group(bytes_body=False)
def test_show_server_group_byte_body(self):
self._test_show_server_group(bytes_body=True)
| apache-2.0 |
ericbaze/continuum_code_2012 | pydata/moin/pythonenv/local/lib/python2.7/encodings/iso8859_11.py | 593 | 12591 | """ Python Character Mapping Codec iso8859_11 generated from 'MAPPINGS/ISO8859/8859-11.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-11',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u0e01' # 0xA1 -> THAI CHARACTER KO KAI
u'\u0e02' # 0xA2 -> THAI CHARACTER KHO KHAI
u'\u0e03' # 0xA3 -> THAI CHARACTER KHO KHUAT
u'\u0e04' # 0xA4 -> THAI CHARACTER KHO KHWAI
u'\u0e05' # 0xA5 -> THAI CHARACTER KHO KHON
u'\u0e06' # 0xA6 -> THAI CHARACTER KHO RAKHANG
u'\u0e07' # 0xA7 -> THAI CHARACTER NGO NGU
u'\u0e08' # 0xA8 -> THAI CHARACTER CHO CHAN
u'\u0e09' # 0xA9 -> THAI CHARACTER CHO CHING
u'\u0e0a' # 0xAA -> THAI CHARACTER CHO CHANG
u'\u0e0b' # 0xAB -> THAI CHARACTER SO SO
u'\u0e0c' # 0xAC -> THAI CHARACTER CHO CHOE
u'\u0e0d' # 0xAD -> THAI CHARACTER YO YING
u'\u0e0e' # 0xAE -> THAI CHARACTER DO CHADA
u'\u0e0f' # 0xAF -> THAI CHARACTER TO PATAK
u'\u0e10' # 0xB0 -> THAI CHARACTER THO THAN
u'\u0e11' # 0xB1 -> THAI CHARACTER THO NANGMONTHO
u'\u0e12' # 0xB2 -> THAI CHARACTER THO PHUTHAO
u'\u0e13' # 0xB3 -> THAI CHARACTER NO NEN
u'\u0e14' # 0xB4 -> THAI CHARACTER DO DEK
u'\u0e15' # 0xB5 -> THAI CHARACTER TO TAO
u'\u0e16' # 0xB6 -> THAI CHARACTER THO THUNG
u'\u0e17' # 0xB7 -> THAI CHARACTER THO THAHAN
u'\u0e18' # 0xB8 -> THAI CHARACTER THO THONG
u'\u0e19' # 0xB9 -> THAI CHARACTER NO NU
u'\u0e1a' # 0xBA -> THAI CHARACTER BO BAIMAI
u'\u0e1b' # 0xBB -> THAI CHARACTER PO PLA
u'\u0e1c' # 0xBC -> THAI CHARACTER PHO PHUNG
u'\u0e1d' # 0xBD -> THAI CHARACTER FO FA
u'\u0e1e' # 0xBE -> THAI CHARACTER PHO PHAN
u'\u0e1f' # 0xBF -> THAI CHARACTER FO FAN
u'\u0e20' # 0xC0 -> THAI CHARACTER PHO SAMPHAO
u'\u0e21' # 0xC1 -> THAI CHARACTER MO MA
u'\u0e22' # 0xC2 -> THAI CHARACTER YO YAK
u'\u0e23' # 0xC3 -> THAI CHARACTER RO RUA
u'\u0e24' # 0xC4 -> THAI CHARACTER RU
u'\u0e25' # 0xC5 -> THAI CHARACTER LO LING
u'\u0e26' # 0xC6 -> THAI CHARACTER LU
u'\u0e27' # 0xC7 -> THAI CHARACTER WO WAEN
u'\u0e28' # 0xC8 -> THAI CHARACTER SO SALA
u'\u0e29' # 0xC9 -> THAI CHARACTER SO RUSI
u'\u0e2a' # 0xCA -> THAI CHARACTER SO SUA
u'\u0e2b' # 0xCB -> THAI CHARACTER HO HIP
u'\u0e2c' # 0xCC -> THAI CHARACTER LO CHULA
u'\u0e2d' # 0xCD -> THAI CHARACTER O ANG
u'\u0e2e' # 0xCE -> THAI CHARACTER HO NOKHUK
u'\u0e2f' # 0xCF -> THAI CHARACTER PAIYANNOI
u'\u0e30' # 0xD0 -> THAI CHARACTER SARA A
u'\u0e31' # 0xD1 -> THAI CHARACTER MAI HAN-AKAT
u'\u0e32' # 0xD2 -> THAI CHARACTER SARA AA
u'\u0e33' # 0xD3 -> THAI CHARACTER SARA AM
u'\u0e34' # 0xD4 -> THAI CHARACTER SARA I
u'\u0e35' # 0xD5 -> THAI CHARACTER SARA II
u'\u0e36' # 0xD6 -> THAI CHARACTER SARA UE
u'\u0e37' # 0xD7 -> THAI CHARACTER SARA UEE
u'\u0e38' # 0xD8 -> THAI CHARACTER SARA U
u'\u0e39' # 0xD9 -> THAI CHARACTER SARA UU
u'\u0e3a' # 0xDA -> THAI CHARACTER PHINTHU
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\u0e3f' # 0xDF -> THAI CURRENCY SYMBOL BAHT
u'\u0e40' # 0xE0 -> THAI CHARACTER SARA E
u'\u0e41' # 0xE1 -> THAI CHARACTER SARA AE
u'\u0e42' # 0xE2 -> THAI CHARACTER SARA O
u'\u0e43' # 0xE3 -> THAI CHARACTER SARA AI MAIMUAN
u'\u0e44' # 0xE4 -> THAI CHARACTER SARA AI MAIMALAI
u'\u0e45' # 0xE5 -> THAI CHARACTER LAKKHANGYAO
u'\u0e46' # 0xE6 -> THAI CHARACTER MAIYAMOK
u'\u0e47' # 0xE7 -> THAI CHARACTER MAITAIKHU
u'\u0e48' # 0xE8 -> THAI CHARACTER MAI EK
u'\u0e49' # 0xE9 -> THAI CHARACTER MAI THO
u'\u0e4a' # 0xEA -> THAI CHARACTER MAI TRI
u'\u0e4b' # 0xEB -> THAI CHARACTER MAI CHATTAWA
u'\u0e4c' # 0xEC -> THAI CHARACTER THANTHAKHAT
u'\u0e4d' # 0xED -> THAI CHARACTER NIKHAHIT
u'\u0e4e' # 0xEE -> THAI CHARACTER YAMAKKAN
u'\u0e4f' # 0xEF -> THAI CHARACTER FONGMAN
u'\u0e50' # 0xF0 -> THAI DIGIT ZERO
u'\u0e51' # 0xF1 -> THAI DIGIT ONE
u'\u0e52' # 0xF2 -> THAI DIGIT TWO
u'\u0e53' # 0xF3 -> THAI DIGIT THREE
u'\u0e54' # 0xF4 -> THAI DIGIT FOUR
u'\u0e55' # 0xF5 -> THAI DIGIT FIVE
u'\u0e56' # 0xF6 -> THAI DIGIT SIX
u'\u0e57' # 0xF7 -> THAI DIGIT SEVEN
u'\u0e58' # 0xF8 -> THAI DIGIT EIGHT
u'\u0e59' # 0xF9 -> THAI DIGIT NINE
u'\u0e5a' # 0xFA -> THAI CHARACTER ANGKHANKHU
u'\u0e5b' # 0xFB -> THAI CHARACTER KHOMUT
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| gpl-2.0 |
bdoner/SickRage | sickbeard/subtitles.py | 3 | 10951 | # Author: Nyaran <nyayukko@gmail.com>, based on Antoine Bertin <diaoulael@gmail.com> work
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
import datetime
import sickbeard
from sickbeard.common import *
from sickbeard.exceptions import ex
from sickbeard import logger
from sickbeard import encodingKludge as ek
from sickbeard import db
import subliminal
import babelfish
import subprocess
subliminal.cache_region.configure('dogpile.cache.memory')
provider_urls = {'addic7ed': 'http://www.addic7ed.com',
'opensubtitles': 'http://www.opensubtitles.org',
'podnapisi': 'http://www.podnapisi.net',
'thesubdb': 'http://www.thesubdb.com',
'tvsubtitles': 'http://www.tvsubtitles.net'
}
SINGLE = 'und'
def sortedServiceList():
newList = []
lmgtfy = 'http://lmgtfy.com/?q=%s'
curIndex = 0
for curService in sickbeard.SUBTITLES_SERVICES_LIST:
if curService in subliminal.provider_manager.available_providers:
newList.append({'name': curService,
'url': provider_urls[curService] if curService in provider_urls else lmgtfy % curService,
'image': curService + '.png',
'enabled': sickbeard.SUBTITLES_SERVICES_ENABLED[curIndex] == 1
})
curIndex += 1
for curService in subliminal.provider_manager.available_providers:
if curService not in [x['name'] for x in newList]:
newList.append({'name': curService,
'url': provider_urls[curService] if curService in provider_urls else lmgtfy % curService,
'image': curService + '.png',
'enabled': False,
})
return newList
def getEnabledServiceList():
return [x['name'] for x in sortedServiceList() if x['enabled']]
#Hack around this for now.
def fromietf(language):
return babelfish.Language.fromopensubtitles(language)
def isValidLanguage(language):
try:
langObj = fromietf(language)
except:
return False
return True
def getLanguageName(language):
return fromietf(language ).name
# TODO: Filter here for non-languages in sickbeard.SUBTITLES_LANGUAGES
def wantedLanguages(sqlLike = False):
wantedLanguages = [x for x in sorted(sickbeard.SUBTITLES_LANGUAGES) if x in babelfish.language_converters['opensubtitles'].codes]
if sqlLike:
return '%' + ','.join(wantedLanguages) + '%'
return wantedLanguages
def subtitlesLanguages(video_path):
"""Return a list detected subtitles for the given video file"""
resultList = []
if sickbeard.SUBTITLES_DIR and ek.ek(os.path.exists, sickbeard.SUBTITLES_DIR):
video_path = ek.ek(os.path.join, sickbeard.SUBTITLES_DIR, ek.ek(os.path.basename, video_path))
# Search subtitles in the relative path
if sickbeard.SUBTITLES_DIR:
video_path = ek.ek(os.path.join, ek.ek(os.path.dirname, video_path), sickbeard.SUBTITLES_DIR, ek.ek(os.path.basename, video_path))
languages = subliminal.video.scan_subtitle_languages(video_path)
for language in languages:
if hasattr(language, 'opensubtitles') and language.opensubtitles:
resultList.append(language.opensubtitles)
elif hasattr(language, 'alpha3') and language.alpha3:
resultList.append(language.alpha3)
elif hasattr(language, 'alpha2') and language.alpha2:
resultList.append(language.alpha2)
defaultLang = wantedLanguages()
if len(resultList) is 1 and len(defaultLang) is 1:
return defaultLang
if ('pob' in defaultLang or 'pb' in defaultLang) and ('pt' not in defaultLang and 'por' not in defaultLang):
resultList = [x if not x in ['por', 'pt'] else u'pob' for x in resultList]
return sorted(resultList)
# TODO: Return only languages our providers allow
def subtitleLanguageFilter():
return [babelfish.Language.fromopensubtitles(language) for language in babelfish.language_converters['opensubtitles'].codes if len(language) == 3]
class SubtitlesFinder():
"""
The SubtitlesFinder will be executed every hour but will not necessarly search
and download subtitles. Only if the defined rule is true
"""
def __init__(self):
self.amActive = False
def run(self, force=False):
self.amActive = True
if not sickbeard.USE_SUBTITLES:
return
if len(sickbeard.subtitles.getEnabledServiceList()) < 1:
logger.log(u'Not enough services selected. At least 1 service is required to search subtitles in the background', logger.ERROR)
return
logger.log(u'Checking for subtitles', logger.INFO)
# get episodes on which we want subtitles
# criteria is:
# - show subtitles = 1
# - episode subtitles != config wanted languages or SINGLE (depends on config multi)
# - search count < 2 and diff(airdate, now) > 1 week : now -> 1d
# - search count < 7 and diff(airdate, now) <= 1 week : now -> 4h -> 8h -> 16h -> 1d -> 1d -> 1d
today = datetime.date.today().toordinal()
# you have 5 minutes to understand that one. Good luck
myDB = db.DBConnection()
sqlResults = myDB.select('SELECT s.show_name, e.showid, e.season, e.episode, e.status, e.subtitles, ' +
'e.subtitles_searchcount AS searchcount, e.subtitles_lastsearch AS lastsearch, e.location, (? - e.airdate) AS airdate_daydiff ' +
'FROM tv_episodes AS e INNER JOIN tv_shows AS s ON (e.showid = s.indexer_id) ' +
'WHERE s.subtitles = 1 AND e.subtitles NOT LIKE (?) ' +
'AND (e.subtitles_searchcount <= 2 OR (e.subtitles_searchcount <= 7 AND airdate_daydiff <= 7)) ' +
'AND e.location != ""', [today, wantedLanguages(True)])
if len(sqlResults) == 0:
logger.log('No subtitles to download', logger.INFO)
return
rules = self._getRules()
now = datetime.datetime.now()
for epToSub in sqlResults:
if not ek.ek(os.path.isfile, epToSub['location']):
logger.log('Episode file does not exist, cannot download subtitles for episode %dx%d of show %s' % (epToSub['season'], epToSub['episode'], epToSub['show_name']), logger.DEBUG)
continue
# Old shows rule
throwaway = datetime.datetime.strptime('20110101', '%Y%m%d')
if ((epToSub['airdate_daydiff'] > 7 and epToSub['searchcount'] < 2 and now - datetime.datetime.strptime(epToSub['lastsearch'], '%Y-%m-%d %H:%M:%S') > datetime.timedelta(hours=rules['old'][epToSub['searchcount']])) or
# Recent shows rule
(epToSub['airdate_daydiff'] <= 7 and epToSub['searchcount'] < 7 and now - datetime.datetime.strptime(epToSub['lastsearch'], '%Y-%m-%d %H:%M:%S') > datetime.timedelta(hours=rules['new'][epToSub['searchcount']]))):
logger.log('Downloading subtitles for episode %dx%d of show %s' % (epToSub['season'], epToSub['episode'], epToSub['show_name']), logger.DEBUG)
showObj = sickbeard.helpers.findCertainShow(sickbeard.showList, int(epToSub['showid']))
if not showObj:
logger.log(u'Show not found', logger.DEBUG)
return
epObj = showObj.getEpisode(int(epToSub["season"]), int(epToSub["episode"]))
if isinstance(epObj, str):
logger.log(u'Episode not found', logger.DEBUG)
return
previous_subtitles = epObj.subtitles
try:
epObj.downloadSubtitles()
except Exception as e:
logger.log(u'Unable to find subtitles', logger.DEBUG)
logger.log(str(e), logger.DEBUG)
return
newSubtitles = frozenset(epObj.subtitles).difference(previous_subtitles)
if newSubtitles:
logger.log(u'Downloaded subtitles for S%02dE%02d in %s' % (epToSub["season"], epToSub["episode"], ', '.join(newSubtitles)))
self.amActive = False
def _getRules(self):
"""
Define the hours to wait between 2 subtitles search depending on:
- the episode: new or old
- the number of searches done so far (searchcount), represented by the index of the list
"""
return {'old': [0, 24], 'new': [0, 4, 8, 4, 16, 24, 24]}
def run_subs_extra_scripts(epObj, foundSubs):
for curScriptName in sickbeard.SUBTITLES_EXTRA_SCRIPTS:
script_cmd = [piece for piece in re.split("( |\\\".*?\\\"|'.*?')", curScriptName) if piece.strip()]
script_cmd[0] = ek.ek(os.path.abspath, script_cmd[0])
logger.log(u"Absolute path to script: " + script_cmd[0], logger.DEBUG)
for video, subs in foundSubs.iteritems():
subpaths = []
for sub in subs:
subpath = subliminal.subtitle.get_subtitle_path(video.name, sub.language)
if sickbeard.SUBTITLES_DIR and ek.ek(os.path.exists, sickbeard.SUBTITLES_DIR):
subpath = ek.ek(os.path.join, sickbeard.SUBTITLES_DIR, ek.ek(os.path.basename, subpath))
elif sickbeard.SUBTITLES_DIR:
subpath = ek.ek(os.path.join, ek.ek(os.path.dirname, subpath), sickbeard.SUBTITLES_DIR, ek.ek(os.path.basename, subpath))
inner_cmd = script_cmd + [video.name, subpath, sub.language.opensubtitles, epObj.show.name,
str(epObj.season), str(epObj.episode), epObj.name, str(epObj.show.indexerid)]
# use subprocess to run the command and capture output
logger.log(u"Executing command: %s" % inner_cmd)
try:
p = subprocess.Popen(inner_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
out, err = p.communicate() # @UnusedVariable
logger.log(u"Script result: %s" % out, logger.DEBUG)
except Exception as e:
logger.log(u"Unable to run subs_extra_script: " + ex(e))
| gpl-3.0 |
shahar-stratoscale/nova | nova/tests/objects/test_network.py | 10 | 8757 | # Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import netaddr
from nova.objects import network as network_obj
from nova.tests.objects import test_objects
fake_network = {
'deleted': False,
'created_at': None,
'updated_at': None,
'deleted_at': None,
'id': 1,
'label': 'Fake Network',
'injected': False,
'cidr': '192.168.1.0/24',
'cidr_v6': '1234::/64',
'multi_host': False,
'netmask': '255.255.255.0',
'gateway': '192.168.1.1',
'broadcast': '192.168.1.255',
'netmask_v6': 64,
'gateway_v6': '1234::1',
'bridge': 'br100',
'bridge_interface': 'eth0',
'dns1': '8.8.8.8',
'dns2': '8.8.4.4',
'vlan': None,
'vpn_public_address': None,
'vpn_public_port': None,
'vpn_private_address': None,
'dhcp_start': '192.168.1.10',
'rxtx_base': None,
'project_id': None,
'priority': None,
'host': None,
'uuid': 'fake-uuid',
}
class _TestNetworkObject(object):
def _compare(self, obj, db_obj):
for field in obj.fields:
db_val = db_obj[field]
obj_val = obj[field]
if isinstance(obj_val, netaddr.IPAddress):
obj_val = str(obj_val)
if isinstance(obj_val, netaddr.IPNetwork):
obj_val = str(obj_val)
if field == 'netmask_v6':
db_val = str(netaddr.IPNetwork('1::/%i' % db_val).netmask)
self.assertEqual(db_val, obj_val)
@mock.patch('nova.db.network_get')
def test_get_by_id(self, get):
get.return_value = fake_network
network = network_obj.Network.get_by_id(self.context, 'foo')
self._compare(network, fake_network)
get.assert_called_once_with(self.context, 'foo',
project_only='allow_none')
@mock.patch('nova.db.network_get_by_uuid')
def test_get_by_uuid(self, get):
get.return_value = fake_network
network = network_obj.Network.get_by_uuid(self.context, 'foo')
self._compare(network, fake_network)
get.assert_called_once_with(self.context, 'foo')
@mock.patch('nova.db.network_get_by_cidr')
def test_get_by_cidr(self, get):
get.return_value = fake_network
network = network_obj.Network.get_by_cidr(self.context,
'192.168.1.0/24')
self._compare(network, fake_network)
get.assert_called_once_with(self.context, '192.168.1.0/24')
@mock.patch('nova.db.network_update')
@mock.patch('nova.db.network_set_host')
def test_save(self, set_host, update):
result = dict(fake_network, injected=True)
network = network_obj.Network._from_db_object(self.context,
network_obj.Network(),
fake_network)
network.obj_reset_changes()
network.save()
network.label = 'bar'
update.return_value = result
network.save()
update.assert_called_once_with(self.context, network.id,
{'label': 'bar'})
self.assertFalse(set_host.called)
self._compare(network, result)
@mock.patch('nova.db.network_update')
@mock.patch('nova.db.network_set_host')
@mock.patch('nova.db.network_get')
def test_save_with_host(self, get, set_host, update):
result = dict(fake_network, injected=True)
network = network_obj.Network._from_db_object(self.context,
network_obj.Network(),
fake_network)
network.obj_reset_changes()
network.host = 'foo'
get.return_value = result
network.save()
set_host.assert_called_once_with(self.context, network.id, 'foo')
self.assertFalse(update.called)
self._compare(network, result)
@mock.patch('nova.db.network_update')
@mock.patch('nova.db.network_set_host')
def test_save_with_host_and_other(self, set_host, update):
result = dict(fake_network, injected=True)
network = network_obj.Network._from_db_object(self.context,
network_obj.Network(),
fake_network)
network.obj_reset_changes()
network.host = 'foo'
network.label = 'bar'
update.return_value = result
network.save()
set_host.assert_called_once_with(self.context, network.id, 'foo')
update.assert_called_once_with(self.context, network.id,
{'label': 'bar'})
self._compare(network, result)
@mock.patch('nova.db.network_associate')
def test_associate(self, associate):
network_obj.Network.associate(self.context, 'project',
network_id=123)
associate.assert_called_once_with(self.context, 'project',
network_id=123, force=False)
@mock.patch('nova.db.network_disassociate')
def test_disassociate(self, disassociate):
network_obj.Network.disassociate(self.context, 123,
host=True, project=True)
disassociate.assert_called_once_with(self.context, 123, True, True)
@mock.patch('nova.db.network_create_safe')
def test_create(self, create):
create.return_value = fake_network
network = network_obj.Network(context=self.context, label='foo')
network.create()
create.assert_called_once_with(self.context, {'label': 'foo'})
self._compare(network, fake_network)
@mock.patch('nova.db.network_delete_safe')
def test_destroy(self, delete):
network = network_obj.Network(context=self.context, id=123)
network.destroy()
delete.assert_called_once_with(self.context, 123)
self.assertTrue(network.deleted)
self.assertNotIn('deleted', network.obj_what_changed())
@mock.patch('nova.db.network_get_all')
def test_get_all(self, get_all):
get_all.return_value = [fake_network]
networks = network_obj.NetworkList.get_all(self.context)
self.assertEqual(1, len(networks))
get_all.assert_called_once_with(self.context, 'allow_none')
self._compare(networks[0], fake_network)
@mock.patch('nova.db.network_get_all_by_uuids')
def test_get_all_by_uuids(self, get_all):
get_all.return_value = [fake_network]
networks = network_obj.NetworkList.get_by_uuids(self.context,
['foo'])
self.assertEqual(1, len(networks))
get_all.assert_called_once_with(self.context, ['foo'], 'allow_none')
self._compare(networks[0], fake_network)
@mock.patch('nova.db.network_get_all_by_host')
def test_get_all_by_host(self, get_all):
get_all.return_value = [fake_network]
networks = network_obj.NetworkList.get_by_host(self.context, 'host')
self.assertEqual(1, len(networks))
get_all.assert_called_once_with(self.context, 'host')
self._compare(networks[0], fake_network)
@mock.patch('nova.db.network_in_use_on_host')
def test_in_use_on_host(self, in_use):
in_use.return_value = True
self.assertTrue(network_obj.Network.in_use_on_host(self.context,
123, 'foo'))
in_use.assert_called_once_with(self.context, 123, 'foo')
@mock.patch('nova.db.project_get_networks')
def test_get_all_by_project(self, get_nets):
get_nets.return_value = [fake_network]
networks = network_obj.NetworkList.get_by_project(self.context, 123)
self.assertEqual(1, len(networks))
get_nets.assert_called_once_with(self.context, 123, associate=True)
self._compare(networks[0], fake_network)
class TestNetworkObject(test_objects._LocalTest,
_TestNetworkObject):
pass
class TestRemoteNetworkObject(test_objects._RemoteTest,
_TestNetworkObject):
pass
| apache-2.0 |
zchking/odoo | addons/payment_buckaroo/controllers/main.py | 325 | 1270 | # -*- coding: utf-8 -*-
try:
import simplejson as json
except ImportError:
import json
import logging
import pprint
import werkzeug
from openerp import http, SUPERUSER_ID
from openerp.http import request
_logger = logging.getLogger(__name__)
class BuckarooController(http.Controller):
_return_url = '/payment/buckaroo/return'
_cancel_url = '/payment/buckaroo/cancel'
_exception_url = '/payment/buckaroo/error'
_reject_url = '/payment/buckaroo/reject'
@http.route([
'/payment/buckaroo/return',
'/payment/buckaroo/cancel',
'/payment/buckaroo/error',
'/payment/buckaroo/reject',
], type='http', auth='none')
def buckaroo_return(self, **post):
""" Buckaroo."""
_logger.info('Buckaroo: entering form_feedback with post data %s', pprint.pformat(post)) # debug
request.registry['payment.transaction'].form_feedback(request.cr, SUPERUSER_ID, post, 'buckaroo', context=request.context)
return_url = post.pop('return_url', '')
if not return_url:
data ='' + post.pop('ADD_RETURNDATA', '{}').replace("'", "\"")
custom = json.loads(data)
return_url = custom.pop('return_url', '/')
return werkzeug.utils.redirect(return_url)
| agpl-3.0 |
RogerRueegg/lvw-young-talents | src/profiles/views.py | 1 | 2796 | from __future__ import unicode_literals
from django.views import generic
from django.shortcuts import get_object_or_404, redirect
from django.contrib import messages
from django.contrib.auth.mixins import LoginRequiredMixin
from . import forms
from . import models
import datetime
class ShowProfile(LoginRequiredMixin, generic.TemplateView):
template_name = "profiles/show_profile.html"
http_method_names = ['get']
def get(self, request, *args, **kwargs):
slug = self.kwargs.get('slug')
if slug:
profile = get_object_or_404(models.Profile, slug=slug)
user = profile.user
else:
user = self.request.user
if user == self.request.user:
kwargs["editable"] = True
kwargs["show_user"] = user
return super(ShowProfile, self).get(request, *args, **kwargs)
class EditProfile(LoginRequiredMixin, generic.TemplateView):
template_name = "profiles/edit_profile.html"
http_method_names = ['get', 'post']
def get(self, request, *args, **kwargs):
user = self.request.user
if "user_form" not in kwargs:
kwargs["user_form"] = forms.UserForm(instance=user)
if "profile_form" not in kwargs:
kwargs["profile_form"] = forms.ProfileForm(instance=user.profile)
return super(EditProfile, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
user = self.request.user
user_form = forms.UserForm(request.POST, instance=user)
profile_form = forms.ProfileForm(request.POST,
request.FILES,
instance=user.profile)
if not (user_form.is_valid() and profile_form.is_valid()):
message = ""
if profile_form.errors:
if 'phone_number' in profile_form.errors.keys():
message += "Bitte gibt Deine Natelnummer wie folgt ein: +41791234567. "
if 'bdate' in profile_form.errors.keys():
message += "Bitte gibt das Geburtsdatum wie folgt ein: 2002-01-15 für 15. Januar 2002"
messages.error(request, message)
user_form = forms.UserForm(instance=user)
profile_form = forms.ProfileForm(instance=user.profile)
return super(EditProfile, self).get(request,
user_form=user_form,
profile_form=profile_form)
# Both forms are fine. Time to save!
user_form.save()
profile = profile_form.save(commit=False)
profile.user = user
profile.save()
messages.success(request, "Profile details saved!")
return redirect("profiles:show_self")
| mit |
hsluo/youtube-dl | youtube_dl/extractor/rtvnh.py | 84 | 1589 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import ExtractorError
class RTVNHIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?rtvnh\.nl/video/(?P<id>[0-9]+)'
_TEST = {
'url': 'http://www.rtvnh.nl/video/131946',
'md5': '6e1d0ab079e2a00b6161442d3ceacfc1',
'info_dict': {
'id': '131946',
'ext': 'mp4',
'title': 'Grote zoektocht in zee bij Zandvoort naar vermiste vrouw',
'thumbnail': 're:^https?:.*\.jpg$'
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
meta = self._parse_json(self._download_webpage(
'http://www.rtvnh.nl/video/json?m=' + video_id, video_id), video_id)
status = meta.get('status')
if status != 200:
raise ExtractorError(
'%s returned error code %d' % (self.IE_NAME, status), expected=True)
formats = self._extract_smil_formats(
'http://www.rtvnh.nl/video/smil?m=' + video_id, video_id, fatal=False)
for item in meta['source']['fb']:
if item.get('type') == 'hls':
formats.extend(self._extract_m3u8_formats(
item['file'], video_id, ext='mp4', entry_protocol='m3u8_native'))
elif item.get('type') == '':
formats.append({'url': item['file']})
return {
'id': video_id,
'title': meta['title'].strip(),
'thumbnail': meta.get('image'),
'formats': formats
}
| unlicense |
wcb2/wcb2 | docs/ekg2book/txt2docbook.py | 2 | 7088 | #!/usr/bin/python
# -*- encoding: iso-8859-2 -*-
import re
import sys
import getopt
msg_session_vars = "Zmienne sesyjne"
msg_vars = "Zmienne"
msg_commands = "Polecenia"
str_type = "typ"
msg_type = "Typ"
str_def_val = "domy¶lna warto¶æ"
msg_def_val = "Domy¶lna warto¶æ"
str_params = "parametry"
msg_params = "Parametry"
str_short_desc = "krotki opis"
msg_short_desc = "Krótki opis"
debug = True
def usage():
"""Display usage message"""
sys.stdout = sys.stderr
print "Usage: "+sys.argv[0]+" [-c|-v|-s] <filename.txt>"
print "\t-c\tparse command file (default)"
print "\t-v\tparse vars file"
print "\t-s\tparse session vars file"
def debug(str):
"""Write message to strerr, if debugging is enabled."""
if debug:
sys.stderr.write("[debug] {"+str+"}\n")
def warn(str):
"""Write message to stderr."""
sys.stderr.write("[warn] {"+str+"}\n")
def die(str):
"""Write message to stderr and exit with an error."""
sys.stderr.write("[FATAL] {"+str+"}\n")
sys.exit(1)
def strip_indent_amount(line):
"""Return a number after which char to cut, to get a visual 8-char unindent"""
if len(line) > 0 and line[0] == '\t':
return 1
if len(line) > 7 and line[0:8] == ' ':
return 8
if len(line) > 1 and line[0:1] == ' ' and line[1] == '\t':
return 2
if len(line) > 2 and line[0:2] == ' ' and line[2] == '\t':
return 3
if len(line) > 3 and line[0:3] == ' ' and line[3] == '\t':
return 4
if len(line) > 4 and line[0:4] == ' ' and line[4] == '\t':
return 5
if len(line) > 5 and line[0:5] == ' ' and line[5] == '\t':
return 6
if len(line) > 6 and line[0:6] == ' ' and line[6] == '\t':
return 7
if len(line) > 7 and line[0:7] == ' ' and line[7] == '\t':
return 8
return 0
def is_indented(line):
"""Whether it's properly indented"""
if strip_indent_amount(line) == 0:
return False
else:
return True
def strip_indent(fname, linenum, line):
"""Unindent the line by 8 visual chars, or raise an exception."""
ret = strip_indent_amount(line)
if ret == 0:
raise Exception('Invalid indent %s:%d' % (fname, linenum))
elif ret >= 2 and ret < 8:
warn('Unclean indent %s:%d' % (fname, linenum))
elif ret == 8 and line[7] == '\t':
warn('Unclean indent %s:%d' % (fname, linenum))
return line[ret:]
def parse_header(fname, linenum, vars, session, commands, line):
"""Parse an undindented header, returning an XML snippet"""
if line.find(':') < 0:
raise Exception('Header expected (%s:%d)' % (fname, linenum))
debug('header on line %d: %s' % (linenum, line))
data = line.split(':')
# header name
if vars or session:
if data[0] == str_type:
title = msg_type
elif data[0] == str_def_val:
title = msg_def_val
else:
raise Exception("Unknown header [%s] (%s:%d)" % (data[0], fname, linenum))
elif commands:
if data[0] == str_params:
title = msg_params
elif data[0] == str_short_desc:
title = msg_short_desc
else:
raise Exception("Unknown header [%s] (%s:%d)" % (data[0], fname, linenum))
para = data[1].replace('&', '&')
para = para.replace('<', '<')
para = para.replace('>', '>')
return "<formalpara><title>%s</title><para>%s</para></formalpara>\n" % (title, para)
def print_entry(record):
"""Print an XML snippet of the supplied record."""
print """<varlistentry>
<term>
%(term)s
</term>
<listitem>
%(header)s
<screen>
<![CDATA[%(desc)s]]>
</screen>
</listitem>
</varlistentry>
""" % record
def main():
if len(sys.argv) < 2:
usage()
sys.exit(1)
try:
opts, args = getopt.getopt(sys.argv[1:], "cvs")
except getopt.GetoptError:
usage()
sys.exit(1)
vars = None
commands = True
session = True
for o, a in opts:
if o == "-v":
vars = True
commands = None
session = None
if o == "-c":
vars = None
commands = True
session = None
if o == "-s":
vars = None
commands = None
session = True
fname = args[0]
try:
file = open(fname, "r")
except Exception, e:
sys.stdout = sys.stderr
print "Error: can't open file for reading"
print str(e)
sys.exit(1)
debug('Reading file %s' % (fname))
# begin output
output = "<sect2><title>";
if session:
output += msg_session_vars
if vars:
output += msg_vars
elif commands:
output += msg_commands
output += "</title>\n<variablelist>\n"
print(output)
record = None
state_was = 'top_level'
linenum = 0
r = re.compile("%T(.+)%n")
line = file.readline()
while line:
linenum += 1
line = line[:-1]
line = r.sub("\\1", line)
if state_was == 'top_level':
if line[0:2] == "//" or line == "":
# still top level
state_was = 'top_level'
elif len(line) > 0 and (line[0] == ' ' or line[0] == "\t"):
raise Exception('Unexpected input on top level (%s:%d)' % (fname, linenum))
else:
debug('entry start on line %d: %s' % (linenum, line))
state_was = 'entry_start'
record = {'term': line, 'desc' : '', 'header': ''}
elif state_was == 'entry_start':
# this must be a header
if is_indented(line):
line = strip_indent(fname, linenum, line)
else:
raise Exception('Header expected (%s:%d)' % (fname, linenum))
record['header'] += parse_header(fname, linenum, vars, session, commands, line)
state_was = 'header'
elif state_was == 'header':
if line == '':
debug('entry ended on line %d' % (linenum - 1))
state_was = 'top_level'
if record:
print_entry(record)
record = {'term': line, 'desc' : '', 'header': ''}
# so it doesn't match later
line = None
elif is_indented(line):
line = strip_indent(fname, linenum, line)
else:
raise Exception('Header, separator, or empty line expected (%s:%d)' % (fname, linenum))
if line == None:
pass
elif line == '':
# separator
debug('entry headers ended on line %d' % (linenum - 1))
state_was = 'entry_contents'
elif line.find(':') >= 0:
record['header'] += parse_header(fname, linenum, vars, session, commands, line)
else:
raise Exception('Unparseable header or extra whitespace in separator (%s:%d)' % (fname, linenum))
elif state_was == 'entry_contents':
if line == '':
state_was = 'top_level'
print_entry(record)
record = {'term': line, 'desc' : '', 'header': ''}
elif is_indented(line):
debug('entry contents on line %d' % (linenum))
record['desc'] += strip_indent(fname, linenum, line) + "\n"
else:
raise Exception('Expected entry contents, separator, or empty line (%s:%d)' % (fname, linenum))
else:
raise Exception('Unknown state (%s:%d)' % (fname, linenum))
line = file.readline()
if record:
print_entry(record)
print('</variablelist></sect2>')
if __name__ == "__main__":
try:
main()
except Exception, e:
die(e.args[0])
| gpl-2.0 |
adoosii/edx-platform | cms/djangoapps/xblock_config/migrations/0001_initial.py | 110 | 4856 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'StudioConfig'
db.create_table('xblock_config_studioconfig', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('change_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('changed_by', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, on_delete=models.PROTECT)),
('enabled', self.gf('django.db.models.fields.BooleanField')(default=False)),
('disabled_blocks', self.gf('django.db.models.fields.TextField')(default='about course_info static_tab')),
))
db.send_create_signal('xblock_config', ['StudioConfig'])
def backwards(self, orm):
# Deleting model 'StudioConfig'
db.delete_table('xblock_config_studioconfig')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'xblock_config.studioconfig': {
'Meta': {'object_name': 'StudioConfig'},
'change_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'changed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'disabled_blocks': ('django.db.models.fields.TextField', [], {'default': "'about course_info static_tab'"}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['xblock_config'] | agpl-3.0 |
Erotemic/ibeis | super_setup.py | 1 | 26677 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Requirements:
pip install gitpython click ubelt
"""
import re
from os.path import exists
from os.path import join
from os.path import dirname
from os.path import abspath
import ubelt as ub
import functools
class ShellException(Exception):
"""
Raised when shell returns a non-zero error code
"""
class DirtyRepoError(Exception):
"""
If the repo is in an unexpected state, its very easy to break things using
automated scripts. To be safe, we don't do anything. We ensure this by
raising this error.
"""
def parse_version(package):
"""
Statically parse the version number from __init__.py
CommandLine:
python -c "import setup; print(setup.parse_version('ovharn'))"
"""
from os.path import dirname, join
import ast
init_fpath = join(dirname(__file__), package, '__init__.py')
with open(init_fpath) as file_:
sourcecode = file_.read()
pt = ast.parse(sourcecode)
class VersionVisitor(ast.NodeVisitor):
def visit_Assign(self, node):
for target in node.targets:
if target.id == '__version__':
self.version = node.value.s
visitor = VersionVisitor()
visitor.visit(pt)
return visitor.version
class GitURL(object):
"""
Represent and transform git urls between protocols defined in [3]_.
The code in GitURL is largely derived from [1]_ and [2]_.
Credit to @coala and @FriendCode.
Note:
while this code aims to suport protocols defined in [3]_, it is only
tested for specific use cases and therefore might need to be improved.
References:
.. [1] https://github.com/coala/git-url-parse
.. [2] https://github.com/FriendCode/giturlparse.py
.. [3] https://git-scm.com/docs/git-clone#URLS
Example:
>>> self = GitURL('git@gitlab.kitware.com:computer-vision/netharn.git')
>>> print(ub.repr2(self.parts()))
>>> print(self.format('ssh'))
>>> print(self.format('https'))
>>> self = GitURL('https://gitlab.kitware.com/computer-vision/netharn.git')
>>> print(ub.repr2(self.parts()))
>>> print(self.format('ssh'))
>>> print(self.format('https'))
"""
SYNTAX_PATTERNS = {
# git allows for a url style syntax
'url': re.compile(r'(?P<transport>\w+://)'
r'((?P<user>\w+[^@]*@))?'
r'(?P<host>[a-z0-9_.-]+)'
r'((?P<port>:[0-9]+))?'
r'/(?P<path>.*\.git)'),
# git allows for ssh style syntax
'ssh': re.compile(r'(?P<user>\w+[^@]*@)'
r'(?P<host>[a-z0-9_.-]+)'
r':(?P<path>.*\.git)'),
}
r"""
Ignore:
# Helper to build the parse pattern regexes
def named(key, regex):
return '(?P<{}>{})'.format(key, regex)
def optional(pat):
return '({})?'.format(pat)
parse_patterns = {}
# Standard url format
transport = named('transport', r'\w+://')
user = named('user', r'\w+[^@]*@')
host = named('host', r'[a-z0-9_.-]+')
port = named('port', r':[0-9]+')
path = named('path', r'.*\.git')
pat = ''.join([transport, optional(user), host, optional(port), '/', path])
parse_patterns['url'] = pat
pat = ''.join([user, host, ':', path])
parse_patterns['ssh'] = pat
print(ub.repr2(parse_patterns))
"""
def __init__(self, url):
self._url = url
self._parts = None
def parts(self):
"""
Parses a GIT URL and returns an info dict.
Returns:
dict: info about the url
Raises:
Exception : if parsing fails
"""
info = {
'syntax': '',
'host': '',
'user': '',
'port': '',
'path': None,
'transport': '',
}
for syntax, regex in self.SYNTAX_PATTERNS.items():
match = regex.search(self._url)
if match:
info['syntax'] = syntax
info.update(match.groupdict())
break
else:
raise Exception('Invalid URL {!r}'.format(self._url))
# change none to empty string
for k, v in info.items():
if v is None:
info[k] = ''
return info
def format(self, protocol):
"""
Change the protocol of the git URL
"""
parts = self.parts()
if protocol == 'ssh':
parts['user'] = 'git@'
url = ''.join([
parts['user'], parts['host'], ':', parts['path']
])
else:
parts['transport'] = protocol + '://'
parts['port'] = ''
parts['user'] = ''
url = ''.join([
parts['transport'], parts['user'], parts['host'],
parts['port'], '/', parts['path']
])
return url
class Repo(ub.NiceRepr):
"""
Abstraction that references a git repository, and is able to manipulate it.
A common use case is to define a `remote` and a `code_dpath`, which lets
you check and ensure that the repo is cloned and on a particular branch.
You can also query its status, and pull, and perform custom git commands.
Args:
*args: name, dpath, code_dpath, remotes, remote, branch
Attributes:
All names listed in args are attributse. In addition, the class also
exposes these derived attributes.
url (URI): where the primary location is
Example:
>>> # Here is a simple example referencing ubelt
>>> from super_setup import *
>>> import ubelt as ub
>>> repo = Repo(
>>> remote='https://github.com/Erotemic/ubelt.git',
>>> code_dpath=ub.ensuredir(ub.expandpath('~/tmp/demo-repos')),
>>> )
>>> print('repo = {}'.format(repo))
>>> repo.check()
>>> repo.ensure()
>>> repo.check()
>>> repo.status()
>>> repo._cmd('python setup.py build')
>>> repo._cmd('./run_doctests.sh')
repo = <Repo('ubelt')>
>>> # Here is a less simple example referencing ubelt
>>> from super_setup import *
>>> import ubelt as ub
>>> repo = Repo(
>>> name='ubelt-local',
>>> remote='github',
>>> branch='master',
>>> remotes={
>>> 'github': 'https://github.com/Erotemic/ubelt.git',
>>> 'fakemirror': 'https://gitlab.com/Erotemic/ubelt.git',
>>> },
>>> code_dpath=ub.ensuredir(ub.expandpath('~/tmp/demo-repos')),
>>> )
>>> print('repo = {}'.format(repo))
>>> repo.ensure()
>>> repo._cmd('python setup.py build')
>>> repo._cmd('./run_doctests.sh')
"""
def __init__(repo, **kwargs):
repo.name = kwargs.pop('name', None)
repo.dpath = kwargs.pop('dpath', None)
repo.code_dpath = kwargs.pop('code_dpath', None)
repo.remotes = kwargs.pop('remotes', None)
repo.remote = kwargs.pop('remote', None)
repo.branch = kwargs.pop('branch', 'master')
repo._logged_lines = []
repo._logged_cmds = []
if repo.remote is None:
if repo.remotes is None:
raise ValueError('must specify some remote')
else:
if len(repo.remotes) > 1:
raise ValueError('remotes are ambiguous, specify one')
else:
repo.remote = ub.peek(repo.remotes)
else:
if repo.remotes is None:
_default_remote = 'origin'
repo.remotes = {
_default_remote: repo.remote
}
repo.remote = _default_remote
repo.url = repo.remotes[repo.remote]
if repo.name is None:
suffix = repo.url.split('/')[-1]
repo.name = suffix.split('.git')[0]
if repo.dpath is None:
repo.dpath = join(repo.code_dpath, repo.name)
repo.pkg_dpath = join(repo.dpath, repo.name)
for path_attr in ['dpath', 'code_dpath']:
path = getattr(repo, path_attr)
if path is not None:
setattr(repo, path_attr, ub.expandpath(path))
repo.verbose = kwargs.pop('verbose', 3)
if kwargs:
raise ValueError('unknown kwargs = {}'.format(kwargs.keys()))
repo._pygit = None
def set_protocol(self, protocol):
"""
Changes the url protocol to either ssh or https
Args:
protocol (str): can be ssh or https
"""
gurl = GitURL(self.url)
self.url = gurl.format(protocol)
def info(repo, msg):
repo._logged_lines.append(('INFO', 'INFO: ' + msg))
if repo.verbose >= 1:
print(msg)
def debug(repo, msg):
repo._logged_lines.append(('DEBUG', 'DEBUG: ' + msg))
if repo.verbose >= 1:
print(msg)
def _getlogs(repo):
return '\n'.join([t[1] for t in repo._logged_lines])
def __nice__(repo):
return '{}, branch={}'.format(repo.name, repo.branch)
def _cmd(repo, command, cwd=ub.NoParam, verbose=ub.NoParam):
if verbose is ub.NoParam:
verbose = repo.verbose
if cwd is ub.NoParam:
cwd = repo.dpath
repo._logged_cmds.append((command, cwd))
repo.debug('Run {!r} in {!r}'.format(command, cwd))
info = ub.cmd(command, cwd=cwd, verbose=verbose)
if verbose:
if info['out'].strip():
repo.info(info['out'])
if info['err'].strip():
repo.debug(info['err'])
if info['ret'] != 0:
raise ShellException(ub.repr2(info))
return info
@property
# @ub.memoize_property
def pygit(repo):
""" pip install gitpython """
import git as gitpython
if repo._pygit is None:
repo._pygit = gitpython.Repo(repo.dpath)
return repo._pygit
def develop(repo):
devsetup_script_fpath = join(repo.dpath, 'run_developer_setup.sh')
if not exists(devsetup_script_fpath):
raise AssertionError('Assume we always have run_developer_setup.sh: repo={!r}'.format(repo))
repo._cmd(devsetup_script_fpath, cwd=repo.dpath)
def doctest(repo):
devsetup_script_fpath = join(repo.dpath, 'run_doctests.sh')
if not exists(devsetup_script_fpath):
raise AssertionError('Assume we always have run_doctests.sh: repo={!r}'.format(repo))
repo._cmd(devsetup_script_fpath, cwd=repo.dpath)
def clone(repo):
if exists(repo.dpath):
raise ValueError('cannot clone into non-empty directory')
args = '--recursive'
if repo.branch is not None:
args += ' -b {}'.format(repo.branch)
command = 'git clone {args} {url} {dpath}'.format(args=args, url=repo.url, dpath=repo.dpath)
repo._cmd(command, cwd=repo.code_dpath)
def _assert_clean(repo):
if repo.pygit.is_dirty():
raise DirtyRepoError('The repo={} is dirty'.format(repo))
def check(repo):
repo.ensure(dry=True)
def versions(repo):
"""
Print current version information
"""
fmtkw = {}
fmtkw['pkg'] = parse_version(repo.pkg_dpath) + ','
fmtkw['sha1'] = repo._cmd('git rev-parse HEAD', verbose=0)['out'].strip()
try:
fmtkw['tag'] = repo._cmd('git describe --tags', verbose=0)['out'].strip() + ','
except ShellException:
fmtkw['tag'] = '<None>,'
fmtkw['branch'] = repo.pygit.active_branch.name + ','
fmtkw['repo'] = repo.name + ','
repo.info('repo={repo:<14} pkg={pkg:<12} tag={tag:<18} branch={branch:<10} sha1={sha1}'.format(
**fmtkw))
def ensure_clone(repo):
if exists(repo.dpath):
repo.debug('No need to clone existing repo={}'.format(repo))
else:
repo.debug('Clone non-existing repo={}'.format(repo))
repo.clone()
def ensure(repo, dry=False):
"""
Ensure that the repo is checked out on your local machine, that the
correct branch is checked out, and the upstreams are targeting the
correct remotes.
"""
if repo.verbose > 0:
if dry:
repo.debug(ub.color_text('Checking {}'.format(repo), 'blue'))
else:
repo.debug(ub.color_text('Ensuring {}'.format(repo), 'blue'))
if not exists(repo.dpath):
repo.debug('NEED TO CLONE {}'.format(repo))
if dry:
return
repo.ensure_clone()
repo._assert_clean()
# Ensure all registered remotes exist
for remote_name, remote_url in repo.remotes.items():
try:
remote = repo.pygit.remotes[remote_name]
have_urls = list(remote.urls)
if remote_url not in have_urls:
print('WARNING: REMOTE NAME EXIST BUT URL IS NOT {}. '
'INSTEAD GOT: {}'.format(remote_url, have_urls))
except (IndexError):
try:
print('NEED TO ADD REMOTE {}->{} FOR {}'.format(
remote_name, remote_url, repo))
if not dry:
repo._cmd('git remote add {} {}'.format(remote_name, remote_url))
except ShellException:
if remote_name == repo.remote:
# Only error if the main remote is not available
raise
# Ensure we have the right remote
try:
remote = repo.pygit.remotes[repo.remote]
except IndexError:
if not dry:
raise AssertionError('Something went wrong')
else:
remote = None
if remote is not None:
try:
if not remote.exists():
raise IndexError
else:
repo.debug('The requested remote={} name exists'.format(remote))
except IndexError:
repo.debug('WARNING: remote={} does not exist'.format(remote))
else:
if remote.exists():
repo.debug('Requested remote does exists')
remote_branchnames = [ref.remote_head for ref in remote.refs]
if repo.branch not in remote_branchnames:
repo.info('Branch name not found in local remote. Attempting to fetch')
if dry:
repo.info('dry run, not fetching')
else:
repo._cmd('git fetch {}'.format(remote.name))
repo.info('Fetch was successful')
else:
repo.debug('Requested remote does NOT exist')
# Ensure the remote points to the right place
if repo.url not in list(remote.urls):
repo.debug('WARNING: The requested url={} disagrees with remote urls={}'.format(repo.url, list(remote.urls)))
if dry:
repo.info('Dry run, not updating remote url')
else:
repo.info('Updating remote url')
repo._cmd('git remote set-url {} {}'.format(repo.remote, repo.url))
# Ensure we are on the right branch
if repo.branch != repo.pygit.active_branch.name:
repo.debug('NEED TO SET BRANCH TO {} for {}'.format(repo.branch, repo))
try:
repo._cmd('git checkout {}'.format(repo.branch))
except ShellException:
repo.debug('Checkout failed. Branch name might be ambiguous. Trying again')
try:
repo._cmd('git checkout -b {} {}/{}'.format(repo.branch, repo.remote, repo.branch))
except ShellException:
raise Exception('does the branch exist on the remote?')
tracking_branch = repo.pygit.active_branch.tracking_branch()
if tracking_branch is None or tracking_branch.remote_name != repo.remote:
repo.debug('NEED TO SET UPSTREAM FOR FOR {}'.format(repo))
try:
remote = repo.pygit.remotes[repo.remote]
if not remote.exists():
raise IndexError
except IndexError:
repo.debug('WARNING: remote={} does not exist'.format(remote))
else:
if remote.exists():
remote_branchnames = [ref.remote_head for ref in remote.refs]
if repo.branch not in remote_branchnames:
if dry:
repo.info('Branch name not found in local remote. Dry run, use ensure to attempt to fetch')
else:
repo.info('Branch name not found in local remote. Attempting to fetch')
repo._cmd('git fetch {}'.format(repo.remote))
remote_branchnames = [ref.remote_head for ref in remote.refs]
if repo.branch not in remote_branchnames:
raise Exception('Branch name still does not exist')
if not dry:
repo._cmd('git branch --set-upstream-to={remote}/{branch} {branch}'.format(
remote=repo.remote, branch=repo.branch
))
else:
repo.info('Would attempt to set upstream')
# Print some status
repo.debug(' * branch = {} -> {}'.format(
repo.pygit.active_branch.name,
repo.pygit.active_branch.tracking_branch(),
))
def pull(repo):
repo._assert_clean()
repo._cmd('git pull')
def status(repo):
repo._cmd('git status')
def worker(repo, funcname, kwargs):
repo.verbose = 0
func = getattr(repo, funcname)
func(**kwargs)
return repo
class RepoRegistry(ub.NiceRepr):
def __init__(registery, repos):
registery.repos = repos
def __nice__(registery):
return ub.repr2(registery.repos, si=1, nl=1)
def apply(registery, funcname, num_workers=0, **kwargs):
print(ub.color_text('--- APPLY {} ---'.format(funcname), 'white'))
print(' * num_workers = {!r}'.format(num_workers))
if num_workers == 0:
processed_repos = []
for repo in registery.repos:
print(ub.color_text('--- REPO = {} ---'.format(repo), 'blue'))
try:
getattr(repo, funcname)(**kwargs)
except DirtyRepoError:
print(ub.color_text('Ignoring dirty repo={}'.format(repo), 'red'))
processed_repos.append(repo)
else:
from concurrent import futures
# with futures.ThreadPoolExecutor(max_workers=num_workers) as pool:
with futures.ProcessPoolExecutor(max_workers=num_workers) as pool:
tasks = []
for i, repo in enumerate(registery.repos):
future = pool.submit(worker, repo, funcname, kwargs)
future.repo = repo
tasks.append(future)
processed_repos = []
for future in futures.as_completed(tasks):
repo = future.repo
print(ub.color_text('--- REPO = {} ---'.format(repo), 'blue'))
try:
repo = future.result()
except DirtyRepoError:
print(ub.color_text('Ignoring dirty repo={}'.format(repo), 'red'))
else:
print(repo._getlogs())
processed_repos.append(repo)
print(ub.color_text('--- FINISHED APPLY {} ---'.format(funcname), 'white'))
SHOW_CMDLOG = 1
if SHOW_CMDLOG:
print('LOGGED COMMANDS')
import os
ORIG_CWD = MY_CWD = os.getcwd()
for repo in processed_repos:
print('# --- For repo = {!r} --- '.format(repo))
for t in repo._logged_cmds:
cmd, cwd = t
if cwd is None:
cwd = os.get_cwd()
if cwd != MY_CWD:
print('cd ' + ub.shrinkuser(cwd))
MY_CWD = cwd
print(cmd)
print('cd ' + ub.shrinkuser(ORIG_CWD))
def determine_code_dpath():
"""
Returns a good place to put the code for the internal dependencies.
Returns:
PathLike: the directory where you want to store your code
In order, the methods used for determing this are:
* the `--codedpath` command line flag (may be undocumented in the CLI)
* the `--codedir` command line flag (may be undocumented in the CLI)
* the CODE_DPATH environment variable
* the CODE_DIR environment variable
* the directory above this script (e.g. if this is in ~/code/repo/super_setup.py then code dir resolves to ~/code)
* the user's ~/code directory.
"""
import os
candidates = [
ub.argval('--codedir', default=''),
ub.argval('--codedpath', default=''),
os.environ.get('CODE_DPATH', ''),
os.environ.get('CODE_DIR', ''),
]
valid = [c for c in candidates if c != '']
if len(valid) > 0:
code_dpath = valid[0]
else:
try:
# This file should be in the top level of a repo, the directory from
# this file should be the code directory.
this_fpath = abspath(__file__)
code_dpath = abspath(dirname(dirname(this_fpath)))
except NameError:
code_dpath = ub.expandpath('~/code')
if not exists(code_dpath):
code_dpath = ub.expandpath(code_dpath)
# if CODE_DIR and not exists(CODE_DIR):
# import warnings
# warnings.warn('environment variable CODE_DIR={!r} was defined, but does not exist'.format(CODE_DIR))
if not exists(code_dpath):
raise Exception(ub.codeblock(
'''
Please specify a correct code_dir using the CLI or ENV.
code_dpath={!r} does not exist.
'''.format(code_dpath)))
return code_dpath
def make_netharn_registry():
code_dpath = determine_code_dpath()
CommonRepo = functools.partial(Repo, code_dpath=code_dpath)
repos = [
# The util libs
CommonRepo(
name='utool', branch='master', remote='Erotemic',
remotes={'Erotemic': 'git@github.com:Erotemic/utool.git'},
),
CommonRepo(
name='vtool_ibeis', branch='master', remote='Erotemic',
remotes={'Erotemic': 'git@github.com:Erotemic/vtool_ibeis.git'},
),
CommonRepo(
name='dtool_ibeis', branch='master', remote='Erotemic',
remotes={'Erotemic': 'git@github.com:Erotemic/dtool_ibeis.git'},
),
CommonRepo(
name='plottool_ibeis', branch='master', remote='Erotemic',
remotes={'Erotemic': 'git@github.com:Erotemic/plottool_ibeis.git'},
),
CommonRepo(
name='guitool_ibeis', branch='master', remote='Erotemic',
remotes={'Erotemic': 'git@github.com:Erotemic/guitool_ibeis.git'},
),
CommonRepo(
name='ibeis', branch='master', remote='Erotemic',
remotes={'Erotemic': 'git@github.com:Erotemic/ibeis.git'},
),
]
registery = RepoRegistry(repos)
return registery
def main():
import click
registery = make_netharn_registry()
only = ub.argval('--only', default=None)
if only is not None:
only = only.split(',')
registery.repos = [repo for repo in registery.repos if repo.name in only]
num_workers = int(ub.argval('--workers', default=8))
if ub.argflag('--serial'):
num_workers = 0
protocol = ub.argval('--protocol', None)
if ub.argflag('--https'):
protocol = 'https'
if ub.argflag('--http'):
protocol = 'http'
if ub.argflag('--ssh'):
protocol = 'ssh'
if protocol is not None:
for repo in registery.repos:
repo.set_protocol(protocol)
default_context_settings = {
'help_option_names': ['-h', '--help'],
'allow_extra_args': True,
'ignore_unknown_options': True}
@click.group(context_settings=default_context_settings)
def cli_group():
pass
@cli_group.add_command
@click.command('pull', context_settings=default_context_settings)
def pull():
registery.apply('pull', num_workers=num_workers)
@cli_group.add_command
@click.command('ensure', context_settings=default_context_settings)
def ensure():
"""
Ensure is the live run of "check".
"""
registery.apply('ensure', num_workers=num_workers)
@cli_group.add_command
@click.command('ensure_clone', context_settings=default_context_settings)
def ensure_clone():
registery.apply('ensure_clone', num_workers=num_workers)
@cli_group.add_command
@click.command('check', context_settings=default_context_settings)
def check():
"""
Check is just a dry run of "ensure".
"""
registery.apply('check', num_workers=num_workers)
@cli_group.add_command
@click.command('status', context_settings=default_context_settings)
def status():
registery.apply('status', num_workers=num_workers)
@cli_group.add_command
@click.command('develop', context_settings=default_context_settings)
def develop():
registery.apply('develop', num_workers=0)
@cli_group.add_command
@click.command('doctest', context_settings=default_context_settings)
def doctest():
registery.apply('doctest')
@cli_group.add_command
@click.command('versions', context_settings=default_context_settings)
def versions():
registery.apply('versions')
cli_group()
if __name__ == '__main__':
main()
| apache-2.0 |
Zlash65/erpnext | erpnext/accounts/doctype/accounting_dimension/test_accounting_dimension.py | 6 | 3500 | # -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from erpnext.accounts.doctype.sales_invoice.test_sales_invoice import create_sales_invoice
from erpnext.accounts.doctype.journal_entry.test_journal_entry import make_journal_entry
from erpnext.accounts.doctype.accounting_dimension.accounting_dimension import delete_accounting_dimension
class TestAccountingDimension(unittest.TestCase):
def setUp(self):
frappe.set_user("Administrator")
if not frappe.db.exists("Accounting Dimension", {"document_type": "Department"}):
dimension = frappe.get_doc({
"doctype": "Accounting Dimension",
"document_type": "Department",
}).insert()
else:
dimension1 = frappe.get_doc("Accounting Dimension", "Department")
dimension1.disabled = 0
dimension1.save()
if not frappe.db.exists("Accounting Dimension", {"document_type": "Location"}):
dimension1 = frappe.get_doc({
"doctype": "Accounting Dimension",
"document_type": "Location",
})
dimension1.append("dimension_defaults", {
"company": "_Test Company",
"reference_document": "Location",
"default_dimension": "Block 1",
"mandatory_for_bs": 1
})
dimension1.insert()
dimension1.save()
else:
dimension1 = frappe.get_doc("Accounting Dimension", "Location")
dimension1.disabled = 0
dimension1.save()
def test_dimension_against_sales_invoice(self):
si = create_sales_invoice(do_not_save=1)
si.location = "Block 1"
si.append("items", {
"item_code": "_Test Item",
"warehouse": "_Test Warehouse - _TC",
"qty": 1,
"rate": 100,
"income_account": "Sales - _TC",
"expense_account": "Cost of Goods Sold - _TC",
"cost_center": "_Test Cost Center - _TC",
"department": "_Test Department - _TC",
"location": "Block 1"
})
si.save()
si.submit()
gle = frappe.get_doc("GL Entry", {"voucher_no": si.name, "account": "Sales - _TC"})
self.assertEqual(gle.get('department'), "_Test Department - _TC")
def test_dimension_against_journal_entry(self):
je = make_journal_entry("Sales - _TC", "Sales Expenses - _TC", 500, save=False)
je.accounts[0].update({"department": "_Test Department - _TC"})
je.accounts[1].update({"department": "_Test Department - _TC"})
je.accounts[0].update({"location": "Block 1"})
je.accounts[1].update({"location": "Block 1"})
je.save()
je.submit()
gle = frappe.get_doc("GL Entry", {"voucher_no": je.name, "account": "Sales - _TC"})
gle1 = frappe.get_doc("GL Entry", {"voucher_no": je.name, "account": "Sales Expenses - _TC"})
self.assertEqual(gle.get('department'), "_Test Department - _TC")
self.assertEqual(gle1.get('department'), "_Test Department - _TC")
def test_mandatory(self):
si = create_sales_invoice(do_not_save=1)
si.append("items", {
"item_code": "_Test Item",
"warehouse": "_Test Warehouse - _TC",
"qty": 1,
"rate": 100,
"income_account": "Sales - _TC",
"expense_account": "Cost of Goods Sold - _TC",
"cost_center": "_Test Cost Center - _TC",
"location": ""
})
si.save()
self.assertRaises(frappe.ValidationError, si.submit)
def tearDown(self):
disable_dimension()
def disable_dimension():
dimension1 = frappe.get_doc("Accounting Dimension", "Department")
dimension1.disabled = 1
dimension1.save()
dimension2 = frappe.get_doc("Accounting Dimension", "Location")
dimension2.disabled = 1
dimension2.save()
| gpl-3.0 |
caseylucas/ansible-modules-core | commands/command.py | 6 | 8459 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>, and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import copy
import sys
import datetime
import glob
import traceback
import re
import shlex
import os
DOCUMENTATION = '''
---
module: command
short_description: Executes a command on a remote node
version_added: historical
description:
- The M(command) module takes the command name followed by a list of space-delimited arguments.
- The given command will be executed on all selected nodes. It will not be
processed through the shell, so variables like C($HOME) and operations
like C("<"), C(">"), C("|"), and C("&") will not work (use the M(shell)
module if you need these features).
options:
free_form:
description:
- the command module takes a free form command to run. There is no parameter actually named 'free form'.
See the examples!
required: true
default: null
creates:
description:
- a filename or (since 2.0) glob pattern, when it already exists, this step will B(not) be run.
required: no
default: null
removes:
description:
- a filename or (since 2.0) glob pattern, when it does not exist, this step will B(not) be run.
version_added: "0.8"
required: no
default: null
chdir:
description:
- cd into this directory before running the command
version_added: "0.6"
required: false
default: null
executable:
description:
- change the shell used to execute the command. Should be an absolute path to the executable.
required: false
default: null
version_added: "0.9"
warn:
version_added: "1.8"
default: yes
description:
- if command warnings are on in ansible.cfg, do not warn about this particular line if set to no/false.
required: false
notes:
- If you want to run a command through the shell (say you are using C(<),
C(>), C(|), etc), you actually want the M(shell) module instead. The
M(command) module is much more secure as it's not affected by the user's
environment.
- " C(creates), C(removes), and C(chdir) can be specified after the command. For instance, if you only want to run a command if a certain file does not exist, use this."
author:
- Ansible Core Team
- Michael DeHaan
'''
EXAMPLES = '''
# Example from Ansible Playbooks.
- command: /sbin/shutdown -t now
# Run the command if the specified file does not exist.
- command: /usr/bin/make_database.sh arg1 arg2 creates=/path/to/database
# You can also use the 'args' form to provide the options. This command
# will change the working directory to somedir/ and will only run when
# /path/to/database doesn't exist.
- command: /usr/bin/make_database.sh arg1 arg2
args:
chdir: somedir/
creates: /path/to/database
'''
# Dict of options and their defaults
OPTIONS = {'chdir': None,
'creates': None,
'executable': None,
'NO_LOG': None,
'removes': None,
'warn': True,
}
# This is a pretty complex regex, which functions as follows:
#
# 1. (^|\s)
# ^ look for a space or the beginning of the line
# 2. ({options_list})=
# ^ expanded to (chdir|creates|executable...)=
# look for a valid param, followed by an '='
# 3. (?P<quote>[\'"])?
# ^ look for an optional quote character, which can either be
# a single or double quote character, and store it for later
# 4. (.*?)
# ^ match everything in a non-greedy manner until...
# 5. (?(quote)(?<!\\)(?P=quote))((?<!\\)(?=\s)|$)
# ^ a non-escaped space or a non-escaped quote of the same kind
# that was matched in the first 'quote' is found, or the end of
# the line is reached
OPTIONS_REGEX = '|'.join(OPTIONS.keys())
PARAM_REGEX = re.compile(
r'(^|\s)(' + OPTIONS_REGEX +
r')=(?P<quote>[\'"])?(.*?)(?(quote)(?<!\\)(?P=quote))((?<!\\)(?=\s)|$)'
)
def check_command(commandline):
arguments = { 'chown': 'owner', 'chmod': 'mode', 'chgrp': 'group',
'ln': 'state=link', 'mkdir': 'state=directory',
'rmdir': 'state=absent', 'rm': 'state=absent', 'touch': 'state=touch' }
commands = { 'hg': 'hg', 'curl': 'get_url or uri', 'wget': 'get_url or uri',
'svn': 'subversion', 'service': 'service',
'mount': 'mount', 'rpm': 'yum, dnf or zypper', 'yum': 'yum', 'apt-get': 'apt',
'tar': 'unarchive', 'unzip': 'unarchive', 'sed': 'template or lineinfile',
'dnf': 'dnf', 'zypper': 'zypper' }
become = [ 'sudo', 'su', 'pbrun', 'pfexec', 'runas' ]
warnings = list()
command = os.path.basename(commandline.split()[0])
if command in arguments:
warnings.append("Consider using file module with %s rather than running %s" % (arguments[command], command))
if command in commands:
warnings.append("Consider using %s module rather than running %s" % (commands[command], command))
if command in become:
warnings.append("Consider using 'become', 'become_method', and 'become_user' rather than running %s" % (command,))
return warnings
def main():
# the command module is the one ansible module that does not take key=value args
# hence don't copy this one if you are looking to build others!
module = AnsibleModule(
argument_spec=dict(
_raw_params = dict(),
_uses_shell = dict(type='bool', default=False),
chdir = dict(),
executable = dict(),
creates = dict(),
removes = dict(),
warn = dict(type='bool', default=True),
)
)
shell = module.params['_uses_shell']
chdir = module.params['chdir']
executable = module.params['executable']
args = module.params['_raw_params']
creates = module.params['creates']
removes = module.params['removes']
warn = module.params['warn']
if args.strip() == '':
module.fail_json(rc=256, msg="no command given")
if chdir:
chdir = os.path.abspath(os.path.expanduser(chdir))
os.chdir(chdir)
if creates:
# do not run the command if the line contains creates=filename
# and the filename already exists. This allows idempotence
# of command executions.
v = os.path.expanduser(creates)
if glob.glob(v):
module.exit_json(
cmd=args,
stdout="skipped, since %s exists" % v,
changed=False,
stderr=False,
rc=0
)
if removes:
# do not run the command if the line contains removes=filename
# and the filename does not exist. This allows idempotence
# of command executions.
v = os.path.expanduser(removes)
if not glob.glob(v):
module.exit_json(
cmd=args,
stdout="skipped, since %s does not exist" % v,
changed=False,
stderr=False,
rc=0
)
warnings = list()
if warn:
warnings = check_command(args)
if not shell:
args = shlex.split(args)
startd = datetime.datetime.now()
rc, out, err = module.run_command(args, executable=executable, use_unsafe_shell=shell)
endd = datetime.datetime.now()
delta = endd - startd
if out is None:
out = ''
if err is None:
err = ''
module.exit_json(
cmd = args,
stdout = out.rstrip("\r\n"),
stderr = err.rstrip("\r\n"),
rc = rc,
start = str(startd),
end = str(endd),
delta = str(delta),
changed = True,
warnings = warnings
)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.splitter import *
main()
| gpl-3.0 |
xbmc/atv2 | xbmc/lib/libPython/Python/Lib/plat-mac/lib-scriptpackages/Netscape/Required_suite.py | 8 | 3414 | """Suite Required suite:
Level 0, version 0
Generated from /Volumes/Sap/Applications (Mac OS 9)/Netscape Communicator\xe2\x84\xa2 Folder/Netscape Communicator\xe2\x84\xa2
AETE/AEUT resource version 1/0, language 0, script 0
"""
import aetools
import MacOS
_code = 'reqd'
from StdSuites.Required_Suite import *
class Required_suite_Events(Required_Suite_Events):
def open(self, _object, _attributes={}, **_arguments):
"""open: Open the specified object(s)
Required argument: list of objects to open
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'aevt'
_subcode = 'odoc'
if _arguments: raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
def print_(self, _object, _attributes={}, **_arguments):
"""print: Print the specified object(s)
Required argument: list of objects to print
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'aevt'
_subcode = 'pdoc'
if _arguments: raise TypeError, 'No optional args expected'
_arguments['----'] = _object
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
def quit(self, _no_object=None, _attributes={}, **_arguments):
"""quit: Quit Navigator
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'aevt'
_subcode = 'quit'
if _arguments: raise TypeError, 'No optional args expected'
if _no_object != None: raise TypeError, 'No direct arg expected'
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
def run(self, _no_object=None, _attributes={}, **_arguments):
"""run: Sent to an application when it is double-clicked
Keyword argument _attributes: AppleEvent attribute dictionary
"""
_code = 'aevt'
_subcode = 'oapp'
if _arguments: raise TypeError, 'No optional args expected'
if _no_object != None: raise TypeError, 'No direct arg expected'
_reply, _arguments, _attributes = self.send(_code, _subcode,
_arguments, _attributes)
if _arguments.get('errn', 0):
raise aetools.Error, aetools.decodeerror(_arguments)
# XXXX Optionally decode result
if _arguments.has_key('----'):
return _arguments['----']
#
# Indices of types declared in this module
#
_classdeclarations = {
}
_propdeclarations = {
}
_compdeclarations = {
}
_enumdeclarations = {
}
| gpl-2.0 |
ToonTownInfiniteRepo/ToontownInfinite | toontown/toon/GroupPanel.py | 1 | 18189 | from direct.directnotify import DirectNotifyGlobal
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
from otp.nametag import NametagGlobals
from direct.gui.DirectGui import *
from pandac.PandaModules import *
from direct.showbase import DirectObject
from toontown.toon import ToonAvatarPanel
from toontown.toontowngui import TTDialog
class GroupPanel(DirectObject.DirectObject):
notify = DirectNotifyGlobal.directNotify.newCategory('GroupPanel')
def __init__(self, boardingParty):
self.boardingParty = boardingParty
self.leaderId = self.boardingParty.getGroupLeader(localAvatar.doId)
self.elevatorIdList = self.boardingParty.getElevatorIdList()
self.frame = None
self.confirmQuitDialog = None
self.goButton = None
self.destScrollList = None
self.destFrame = None
self.goingToLabel = None
self.destIndexSelected = 0
self.__load()
self.ignore('stickerBookEntered')
self.accept('stickerBookEntered', self.__forceHide)
self.ignore('stickerBookExited')
self.accept('stickerBookExited', self.__forceShow)
return
def cleanup(self):
base.setCellsAvailable(base.leftCells, 1)
self.quitButton.destroy()
self.hideButton.destroy()
self.showButton.destroy()
self.scrollList.destroy()
if self.goButton:
self.goButton.destroy()
self.goButton = None
if self.destScrollList:
self.destScrollList.destroy()
self.destScrollList = None
if self.destFrame:
self.destFrame.destroy()
self.destFrame = None
if self.goingToLabel:
self.goingToLabel.destroy()
self.goingToLabel = None
if self.frame:
self.frame.destroy()
self.frame = None
self.leaveButton = None
self.boardingParty = None
self.ignoreAll()
return
def __load(self):
self.guiBg = loader.loadModel('phase_9/models/gui/tt_m_gui_brd_groupListBg')
self.__defineConstants()
if self.boardingParty.maxSize == 4:
bgImage = self.guiBg.find('**/tt_t_gui_brd_memberListTop_half')
bgImageZPos = 0.14
frameZPos = -0.121442
quitButtonZPos = -0.019958
else:
bgImage = self.guiBg.find('**/tt_t_gui_brd_memberListTop')
bgImageZPos = 0
frameZPos = 0.0278943
quitButtonZPos = -0.30366
guiButtons = loader.loadModel('phase_9/models/gui/tt_m_gui_brd_status')
self.frame = DirectFrame(parent=base.a2dLeftCenter, relief=None, image=bgImage, image_scale=(0.5, 1, 0.5), image_pos=(0, 0, bgImageZPos), textMayChange=1, pos=(0.32, 0, 0))
self.frameBounds = self.frame.getBounds()
leaveButtonGui = loader.loadModel('phase_3.5/models/gui/tt_m_gui_brd_leaveBtn')
leaveImageList = (leaveButtonGui.find('**/tt_t_gui_brd_leaveUp'),
leaveButtonGui.find('**/tt_t_gui_brd_leaveDown'),
leaveButtonGui.find('**/tt_t_gui_brd_leaveHover'),
leaveButtonGui.find('**/tt_t_gui_brd_leaveUp'))
self.leaderButtonImage = guiButtons.find('**/tt_t_gui_brd_statusLeader')
self.availableButtonImage = guiButtons.find('**/tt_t_gui_brd_statusOn')
self.battleButtonImage = guiButtons.find('**/tt_t_gui_brd_statusBattle')
if localAvatar.doId == self.leaderId:
quitText = TTLocalizer.QuitBoardingPartyLeader
else:
quitText = TTLocalizer.QuitBoardingPartyNonLeader
self.disabledOrangeColor = Vec4(1, 0.5, 0.25, 0.9)
self.quitButton = DirectButton(parent=self.frame, relief=None, image=leaveImageList, image_scale=0.065, command=self.__handleLeaveButton, text=('',
quitText,
quitText,
''), text_scale=0.06, text_fg=Vec4(1, 1, 1, 1), text_shadow=Vec4(0, 0, 0, 1), text_pos=(0.045, 0.0), text_align=TextNode.ALeft, pos=(0.223, 0, quitButtonZPos), image3_color=self.disabledOrangeColor)
arrowGui = loader.loadModel('phase_9/models/gui/tt_m_gui_brd_arrow')
hideImageList = (arrowGui.find('**/tt_t_gui_brd_arrow_up'), arrowGui.find('**/tt_t_gui_brd_arrow_down'), arrowGui.find('**/tt_t_gui_brd_arrow_hover'))
showImageList = (arrowGui.find('**/tt_t_gui_brd_arrow_up'), arrowGui.find('**/tt_t_gui_brd_arrow_down'), arrowGui.find('**/tt_t_gui_brd_arrow_hover'))
self.hideButton = DirectButton(parent=base.a2dLeftCenter, relief=None, text_pos=(0, 0.15), text_scale=0.06, text_align=TextNode.ALeft, text_fg=Vec4(0, 0, 0, 1), text_shadow=Vec4(1, 1, 1, 1), image=hideImageList, image_scale=(-0.35, 1, 0.5), pos=(0.04, 0, 0.03), scale=1.05, command=self.hide)
self.showButton = DirectButton(parent=base.a2dLeftCenter, relief=None, text=('', TTLocalizer.BoardingGroupShow, TTLocalizer.BoardingGroupShow), text_pos=(0.03, 0), text_scale=0.06, text_align=TextNode.ALeft, text_fg=Vec4(1, 1, 1, 1), text_shadow=Vec4(0, 0, 0, 1), image=showImageList, image_scale=(0.35, 1, 0.5), pos=(0.04, 0, 0.03), scale=1.05, command=self.show)
self.showButton.hide()
self.frame.show()
self.__makeAvatarNameScrolledList()
if localAvatar.doId == self.leaderId:
self.__makeDestinationScrolledList()
else:
self.__makeDestinationFrame()
self.__makeGoingToLabel()
self.accept('updateGroupStatus', self.__checkGroupStatus)
self.accept('ToonBattleIdUpdate', self.__possibleGroupUpdate)
base.setCellsAvailable([base.leftCells[1], base.leftCells[2]], 0)
if self.boardingParty.isGroupLeader(localAvatar.doId):
base.setCellsAvailable([base.leftCells[0]], 0)
self.__addTestNames(self.boardingParty.maxSize)
self.guiBg.removeNode()
guiButtons.removeNode()
leaveButtonGui.removeNode()
arrowGui.removeNode()
return
def __defineConstants(self):
self.forcedHidden = False
self.textFgcolor = Vec4(0.0, 0.6, 0.2, 1.0)
self.textBgRolloverColor = Vec4(1, 1, 0, 1)
self.textBgDownColor = Vec4(0.5, 0.9, 1, 1)
self.textBgDisabledColor = Vec4(0.4, 0.8, 0.4, 1)
def __handleLeaveButton(self):
messenger.send('wakeup')
if not base.cr.playGame.getPlace().getState() == 'elevator':
self.confirmQuitDialog = TTDialog.TTDialog(style=TTDialog.YesNo, text=TTLocalizer.QuitBoardingPartyConfirm, command=self.__confirmQuitCallback)
self.confirmQuitDialog.show()
def __confirmQuitCallback(self, value):
if self.confirmQuitDialog:
self.confirmQuitDialog.destroy()
self.confirmQuitDialog = None
if value > 0:
if self.boardingParty:
self.boardingParty.requestLeave()
return
def __handleGoButton(self):
offset = self.destScrollList.getSelectedIndex()
elevatorId = self.elevatorIdList[offset]
self.boardingParty.requestGoToFirstTime(elevatorId)
def __handleCancelGoButton(self):
self.boardingParty.cancelGoToElvatorDest()
def __checkGroupStatus(self):
if not self.boardingParty:
return
self.notify.debug('__checkGroupStatus %s' % self.boardingParty.getGroupMemberList(localAvatar.doId))
myMemberList = self.boardingParty.getGroupMemberList(localAvatar.doId)
self.scrollList.removeAndDestroyAllItems(refresh=0)
if myMemberList:
for avId in myMemberList:
avatarButton = self.__getAvatarButton(avId)
if avatarButton:
self.scrollList.addItem(avatarButton, refresh=0)
self.scrollList.refresh()
def __possibleGroupUpdate(self, avId):
self.notify.debug('GroupPanel __possibleGroupUpdate')
if not self.boardingParty:
return
myMemberList = self.boardingParty.getGroupMemberList(localAvatar.doId)
if avId in myMemberList:
self.__checkGroupStatus()
def __makeAvatarNameScrolledList(self):
friendsListGui = loader.loadModel('phase_3.5/models/gui/friendslist_gui')
self.scrollList = DirectScrolledList(parent=self.frame, relief=None, incButton_image=(friendsListGui.find('**/FndsLst_ScrollUp'),
friendsListGui.find('**/FndsLst_ScrollDN'),
friendsListGui.find('**/FndsLst_ScrollUp_Rllvr'),
friendsListGui.find('**/FndsLst_ScrollUp')), incButton_pos=(0.0, 0.0, -0.35), incButton_image1_color=Vec4(1.0, 0.9, 0.4, 0), incButton_image3_color=Vec4(1.0, 1.0, 0.6, 0), incButton_scale=(1.0, 1.0, -1.0), incButton_relief=None, decButton_image=(friendsListGui.find('**/FndsLst_ScrollUp'),
friendsListGui.find('**/FndsLst_ScrollDN'),
friendsListGui.find('**/FndsLst_ScrollUp_Rllvr'),
friendsListGui.find('**/FndsLst_ScrollUp')), decButton_pos=(0.0, 0.0, 0.1), decButton_image1_color=Vec4(1.0, 1.0, 0.6, 0), decButton_image3_color=Vec4(1.0, 1.0, 0.6, 0), decButton_relief=None, itemFrame_pos=(-0.195, 0.0, 0.185), itemFrame_borderWidth=(0.1, 0.1), numItemsVisible=8, itemFrame_scale=1.0, forceHeight=0.07, items=[], pos=(0, 0, 0.075))
clipper = PlaneNode('clipper')
clipper.setPlane(Plane(Vec3(-1, 0, 0), Point3(0.235, 0, 0)))
clipNP = self.scrollList.attachNewNode(clipper)
self.scrollList.setClipPlane(clipNP)
friendsListGui.removeNode()
return
def __makeDestinationScrolledList(self):
arrowGui = loader.loadModel('phase_9/models/gui/tt_m_gui_brd_gotoArrow')
incrementImageList = (arrowGui.find('**/tt_t_gui_brd_arrowL_gotoUp'),
arrowGui.find('**/tt_t_gui_brd_arrowL_gotoDown'),
arrowGui.find('**/tt_t_gui_brd_arrowL_gotoHover'),
arrowGui.find('**/tt_t_gui_brd_arrowL_gotoUp'))
if self.boardingParty.maxSize == 4:
zPos = -0.177083
else:
zPos = -0.463843
bottomImage = self.guiBg.find('**/tt_t_gui_brd_memberListBtm_leader')
self.destScrollList = DirectScrolledList(
parent=self.frame,
relief=None,
image=bottomImage,
image_scale=(0.5, 1, 0.5),
incButton_image=incrementImageList,
incButton_pos=(0.217302, 0, 0.07),
incButton_image3_color=Vec4(1.0, 1.0, 0.6, 0.5),
incButton_scale=(-0.5, 1, 0.5),
incButton_relief=None,
incButtonCallback=self.__informDestChange,
decButton_image=incrementImageList,
decButton_pos=(-0.217302, 0, 0.07),
decButton_scale=(0.5, 1, 0.5),
decButton_image3_color=Vec4(1.0, 1.0, 0.6, 0.5),
decButton_relief=None,
decButtonCallback=self.__informDestChange,
itemFrame_pos=(0, 0, 0.06),
itemFrame_borderWidth=(0.1, 0.1),
numItemsVisible=1,
itemFrame_scale=TTLocalizer.GPdestScrollList,
forceHeight=0.07,
items=[],
pos=(0, 0, zPos),
scrollSpeed=0.1)
arrowGui.removeNode()
self.__addDestNames()
self.__makeGoButton()
return
def __addDestNames(self):
for i in xrange(len(self.elevatorIdList)):
destName = self.__getDestName(i)
self.destScrollList.addItem(destName, refresh=0)
self.destScrollList.refresh()
def __getDestName(self, offset):
elevatorId = self.elevatorIdList[offset]
elevator = base.cr.doId2do.get(elevatorId)
if elevator:
destName = elevator.getDestName()
return destName
def __makeDestinationFrame(self):
destName = self.__getDestName(self.destIndexSelected)
if self.boardingParty.maxSize == 4:
zPos = -0.12
else:
zPos = -0.404267
bottomImage = self.guiBg.find('**/tt_t_gui_brd_memberListBtm_nonLeader')
self.destFrame = DirectFrame(parent=self.frame, relief=None, image=bottomImage, image_scale=(0.5, 1, 0.5), text=destName, text_align=TextNode.ACenter, text_scale=TTLocalizer.GPdestFrame, pos=(0, 0, zPos))
return
def __makeGoButton(self):
goGui = loader.loadModel('phase_9/models/gui/tt_m_gui_brd_gotoBtn')
self.goImageList = (goGui.find('**/tt_t_gui_brd_gotoUp'),
goGui.find('**/tt_t_gui_brd_gotoDown'),
goGui.find('**/tt_t_gui_brd_gotoHover'),
goGui.find('**/tt_t_gui_brd_gotoUp'))
self.cancelGoImageList = (goGui.find('**/tt_t_gui_brd_cancelGotoUp'),
goGui.find('**/tt_t_gui_brd_cancelGotoDown'),
goGui.find('**/tt_t_gui_brd_cancelGotoHover'),
goGui.find('**/tt_t_gui_brd_cancelGotoUp'))
if self.boardingParty.maxSize == 4:
zPos = -0.028
zPos = -0.0360483
else:
zPos = -0.0353787
self.goButton = DirectButton(parent=self.destScrollList, relief=None, image=self.goImageList, image_scale=(0.48, 1, 0.48), command=self.__handleGoButton, text=('',
TTLocalizer.BoardingGo,
TTLocalizer.BoardingGo,
''), text_scale=TTLocalizer.GPgoButton, text_fg=Vec4(1, 1, 1, 1), text_shadow=Vec4(0, 0, 0, 1), text_pos=(0, -0.12), pos=(-0.003, 0, zPos))
goGui.removeNode()
return
def __getAvatarButton(self, avId):
toon = base.cr.doId2do.get(avId)
if not toon:
return None
toonName = toon.getName()
inBattle = 0
buttonImage = self.availableButtonImage
if toon.battleId:
inBattle = 1
buttonImage = self.battleButtonImage
if avId == localAvatar.doId:
self.__forceHide()
else:
if avId == self.leaderId:
buttonImage = self.leaderButtonImage
if avId == localAvatar.doId:
self.__forceShow()
return DirectButton(parent=self.frame, relief=None, image=buttonImage, image_scale=(0.06, 1.0, 0.06), text=toonName, text_align=TextNode.ALeft, text_wordwrap=16, text_scale=0.04, text_pos=(0.05, -0.015), text_fg=self.textFgcolor, text1_bg=self.textBgDownColor, text2_bg=self.textBgRolloverColor, text3_fg=self.textBgDisabledColor, pos=(0, 0, 0.2), command=self.__openToonAvatarPanel, extraArgs=[toon, avId])
def __openToonAvatarPanel(self, avatar, avId):
if avId != localAvatar.doId and avatar:
messenger.send('clickedNametag', [avatar])
def __addTestNames(self, num):
for i in xrange(num):
avatarButton = self.__getAvatarButton(localAvatar.doId)
self.scrollList.addItem(avatarButton, refresh=0)
self.scrollList.refresh()
def __isForcedHidden(self):
if self.forcedHidden and self.frame.isHidden():
return True
else:
return False
def hide(self):
self.frame.hide()
self.hideButton.hide()
self.showButton.show()
def show(self):
self.frame.show()
self.forcedHidden = False
self.showButton.hide()
self.hideButton.show()
def __forceHide(self):
if not self.frame.isHidden():
self.forcedHidden = True
self.hide()
def __forceShow(self):
if self.__isForcedHidden():
self.show()
def __informDestChange(self):
self.boardingParty.informDestChange(self.destScrollList.getSelectedIndex())
def changeDestination(self, offset):
if localAvatar.doId != self.leaderId:
self.destIndexSelected = offset
if self.destFrame:
self.destFrame['text'] = self.__getDestName(self.destIndexSelected)
def scrollToDestination(self, offset):
if localAvatar.doId == self.leaderId:
if self.destScrollList:
self.destIndexSelected = offset
self.destScrollList.scrollTo(offset)
def __makeGoingToLabel(self):
if self.boardingParty.maxSize == 4:
zPos = -0.0466546
else:
zPos = -0.331731
self.goingToLabel = DirectLabel(parent=self.frame, relief=None, text=TTLocalizer.BoardingGoingTo, text_scale=0.045, text_align=TextNode.ALeft, text_fg=Vec4(0, 0, 0, 1), pos=(-0.1966, 0, zPos))
return
def disableQuitButton(self):
if self.quitButton and not self.quitButton.isEmpty():
self.quitButton['state'] = DGG.DISABLED
def enableQuitButton(self):
if self.quitButton and not self.quitButton.isEmpty():
self.quitButton['state'] = DGG.NORMAL
def disableGoButton(self):
if self.goButton and not self.goButton.isEmpty():
self.goButton['state'] = DGG.DISABLED
self.goButton['image_color'] = Vec4(1, 1, 1, 0.4)
def enableGoButton(self):
if self.goButton and not self.goButton.isEmpty():
self.goButton['state'] = DGG.NORMAL
self.goButton['image_color'] = Vec4(1, 1, 1, 1)
def disableDestinationScrolledList(self):
if self.destScrollList and not self.destScrollList.isEmpty():
self.destScrollList.incButton['state'] = DGG.DISABLED
self.destScrollList.decButton['state'] = DGG.DISABLED
def enableDestinationScrolledList(self):
if self.destScrollList and not self.destScrollList.isEmpty():
self.destScrollList.incButton['state'] = DGG.NORMAL
self.destScrollList.decButton['state'] = DGG.NORMAL
def changeGoToCancel(self):
if self.goButton and not self.goButton.isEmpty():
self.goButton['image'] = self.cancelGoImageList
self.goButton['text'] = (TTLocalizer.BoardingCancelGo,
TTLocalizer.BoardingCancelGo,
TTLocalizer.BoardingCancelGo,
'')
self.goButton['command'] = self.__handleCancelGoButton
def changeCancelToGo(self):
if self.goButton and not self.goButton.isEmpty():
self.goButton['image'] = self.goImageList
self.goButton['text'] = ('',
TTLocalizer.BoardingGo,
TTLocalizer.BoardingGo,
'')
self.goButton['command'] = self.__handleGoButton
| mit |
carsonmcdonald/selenium | py/test/selenium/webdriver/common/form_handling_tests.py | 65 | 9908 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
import unittest
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import WebDriverException
class FormHandlingTests(unittest.TestCase):
def testShouldClickOnSubmitInputElements(self):
self._loadPage("formPage")
self.driver.find_element_by_id("submitButton").click()
self.driver.implicitly_wait(5)
self.assertEqual(self.driver.title, "We Arrive Here")
def testClickingOnUnclickableElementsDoesNothing(self):
self._loadPage("formPage")
self.driver.find_element_by_xpath("//body").click()
def testShouldBeAbleToClickImageButtons(self):
self._loadPage("formPage")
self.driver.find_element_by_id("imageButton").click()
self.driver.implicitly_wait(5)
self.assertEqual(self.driver.title, "We Arrive Here")
def testShouldBeAbleToSubmitForms(self):
self._loadPage("formPage")
self.driver.find_element_by_name("login").submit()
self.driver.implicitly_wait(5)
self.assertEqual(self.driver.title, "We Arrive Here")
def testShouldSubmitAFormWhenAnyInputElementWithinThatFormIsSubmitted(self):
self._loadPage("formPage")
self.driver.find_element_by_id("checky").submit()
self.driver.implicitly_wait(5)
self.assertEqual(self.driver.title, "We Arrive Here")
def testShouldSubmitAFormWhenAnyElementWihinThatFormIsSubmitted(self):
self._loadPage("formPage")
self.driver.find_element_by_xpath("//form/p").submit()
self.driver.implicitly_wait(5)
self.assertEqual(self.driver.title, "We Arrive Here")
def testShouldNotBeAbleToSubmitAFormThatDoesNotExist(self):
self._loadPage("formPage")
try:
self.driver.find_element_by_name("there is no spoon").submit()
self.fail("Expected NoSuchElementException to have been thrown")
except NoSuchElementException as e:
pass
except Exception as e:
self.fail("Expected NoSuchElementException but got " + str(e))
def testShouldBeAbleToEnterTextIntoATextAreaBySettingItsValue(self):
self._loadPage("javascriptPage")
textarea = self.driver.find_element_by_id("keyUpArea")
cheesey = "Brie and cheddar"
textarea.send_keys(cheesey)
self.assertEqual(textarea.get_attribute("value"), cheesey)
def testShouldEnterDataIntoFormFields(self):
self._loadPage("xhtmlTest")
element = self.driver.find_element_by_xpath("//form[@name='someForm']/input[@id='username']")
originalValue = element.get_attribute("value")
self.assertEqual(originalValue, "change")
element.clear()
element.send_keys("some text")
element = self.driver.find_element_by_xpath("//form[@name='someForm']/input[@id='username']")
newFormValue = element.get_attribute("value")
self.assertEqual(newFormValue, "some text")
def testShouldBeAbleToSelectACheckBox(self):
self._loadPage("formPage")
checkbox = self.driver.find_element_by_id("checky")
self.assertEqual(checkbox.is_selected(), False)
checkbox.click()
self.assertEqual(checkbox.is_selected(), True)
checkbox.click()
self.assertEqual(checkbox.is_selected(), False)
def testShouldToggleTheCheckedStateOfACheckbox(self):
self._loadPage("formPage")
checkbox = self.driver.find_element_by_id("checky")
self.assertEqual(checkbox.is_selected(), False)
checkbox.click()
self.assertEqual(checkbox.is_selected(), True)
checkbox.click()
self.assertEqual(checkbox.is_selected(), False)
def testTogglingACheckboxShouldReturnItsCurrentState(self):
self._loadPage("formPage")
checkbox = self.driver.find_element_by_id("checky")
self.assertEqual(checkbox.is_selected(), False)
checkbox.click()
self.assertEqual(checkbox.is_selected(), True)
checkbox.click()
self.assertEqual(checkbox.is_selected(), False)
def testShouldBeAbleToSelectARadioButton(self):
self._loadPage("formPage")
radioButton = self.driver.find_element_by_id("peas")
self.assertEqual(radioButton.is_selected(), False)
radioButton.click()
self.assertEqual(radioButton.is_selected(), True)
def testShouldBeAbleToSelectARadioButtonByClickingOnIt(self):
self._loadPage("formPage")
radioButton = self.driver.find_element_by_id("peas")
self.assertEqual(radioButton.is_selected(), False)
radioButton.click()
self.assertEqual(radioButton.is_selected(), True)
def testShouldReturnStateOfRadioButtonsBeforeInteration(self):
self._loadPage("formPage")
radioButton = self.driver.find_element_by_id("cheese_and_peas")
self.assertEqual(radioButton.is_selected(), True)
radioButton = self.driver.find_element_by_id("cheese")
self.assertEqual(radioButton.is_selected(), False)
# [ExpectedException(typeof(NotImplementedException))]
#def testShouldThrowAnExceptionWhenTogglingTheStateOfARadioButton(self):
# self._loadPage("formPage")
# radioButton = self.driver.find_element_by_id("cheese"))
# radioButton.click()
# [IgnoreBrowser(Browser.IE, "IE allows toggling of an option not in a multiselect")]
# [ExpectedException(typeof(NotImplementedException))]
#def testTogglingAnOptionShouldThrowAnExceptionIfTheOptionIsNotInAMultiSelect(self):
# self._loadPage("formPage")
#
# select = self.driver.find_element_by_name("selectomatic"))
# option = select.find_elements_by_tag_name("option"))[0]
# option.click()
def testTogglingAnOptionShouldToggleOptionsInAMultiSelect(self):
if self.driver.capabilities['browserName'] == 'chrome' and int(self.driver.capabilities['version'].split('.')[0]) < 16:
pytest.skip("deselecting preselected values only works on chrome >= 16")
self._loadPage("formPage")
select = self.driver.find_element_by_name("multi")
option = select.find_elements_by_tag_name("option")[0]
selected = option.is_selected()
option.click()
self.assertFalse(selected == option.is_selected())
option.click()
self.assertTrue(selected == option.is_selected())
def testShouldThrowAnExceptionWhenSelectingAnUnselectableElement(self):
self._loadPage("formPage")
element = self.driver.find_element_by_xpath("//title")
try:
element.click()
self.fail("Expected WebDriverException to have been thrown")
except WebDriverException as e:
pass
except Exception as e:
self.fail("Expected WebDriverException but got " + str(type(e)))
def testSendingKeyboardEventsShouldAppendTextInInputs(self):
self._loadPage("formPage")
element = self.driver.find_element_by_id("working")
element.send_keys("Some")
value = element.get_attribute("value")
self.assertEqual(value, "Some")
element.send_keys(" text")
value = element.get_attribute("value")
self.assertEqual(value, "Some text")
def testShouldBeAbleToClearTextFromInputElements(self):
self._loadPage("formPage")
element = self.driver.find_element_by_id("working")
element.send_keys("Some text")
value = element.get_attribute("value")
self.assertTrue(len(value) > 0)
element.clear()
value = element.get_attribute("value")
self.assertEqual(len(value), 0)
def testEmptyTextBoxesShouldReturnAnEmptyStringNotNull(self):
self._loadPage("formPage")
emptyTextBox = self.driver.find_element_by_id("working")
self.assertEqual(emptyTextBox.get_attribute("value"), "")
emptyTextArea = self.driver.find_element_by_id("emptyTextArea")
self.assertEqual(emptyTextBox.get_attribute("value"), "")
def testShouldBeAbleToClearTextFromTextAreas(self):
self._loadPage("formPage")
element = self.driver.find_element_by_id("withText")
element.send_keys("Some text")
value = element.get_attribute("value")
self.assertTrue(len(value) > 0)
element.clear()
value = element.get_attribute("value")
self.assertEqual(len(value), 0)
def testRadioShouldNotBeSelectedAfterSelectingSibling(self):
self._loadPage("formPage")
cheese = self.driver.find_element_by_id("cheese")
peas = self.driver.find_element_by_id("peas")
cheese.click()
self.assertEqual(True, cheese.is_selected())
self.assertEqual(False, peas.is_selected())
peas.click()
self.assertEqual(False, cheese.is_selected())
self.assertEqual(True, peas.is_selected())
def _pageURL(self, name):
return self.webserver.where_is(name + '.html')
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
| apache-2.0 |
asadziach/tensorflow | tensorflow/python/debug/__init__.py | 15 | 2169 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Public Python API of TensorFlow Debugger (tfdbg).
See the @{$python/tfdbg} guide.
@@add_debug_tensor_watch
@@watch_graph
@@watch_graph_with_blacklists
@@DebugTensorDatum
@@DebugDumpDir
@@load_tensor_from_event_file
@@has_inf_or_nan
@@DumpingDebugHook
@@DumpingDebugWrapperSession
@@LocalCLIDebugHook
@@LocalCLIDebugWrapperSession
@@WatchOptions
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-imports
from tensorflow.python.debug.lib.debug_data import DebugDumpDir
from tensorflow.python.debug.lib.debug_data import DebugTensorDatum
from tensorflow.python.debug.lib.debug_data import has_inf_or_nan
from tensorflow.python.debug.lib.debug_data import load_tensor_from_event_file
from tensorflow.python.debug.lib.debug_utils import add_debug_tensor_watch
from tensorflow.python.debug.lib.debug_utils import watch_graph
from tensorflow.python.debug.lib.debug_utils import watch_graph_with_blacklists
from tensorflow.python.debug.wrappers.dumping_wrapper import DumpingDebugWrapperSession
from tensorflow.python.debug.wrappers.framework import WatchOptions
from tensorflow.python.debug.wrappers.hooks import DumpingDebugHook
from tensorflow.python.debug.wrappers.hooks import LocalCLIDebugHook
from tensorflow.python.debug.wrappers.local_cli_wrapper import LocalCLIDebugWrapperSession
from tensorflow.python.util import all_util as _all_util
_all_util.remove_undocumented(__name__)
| apache-2.0 |
futurulus/scipy | scipy/sparse/tests/test_extract.py | 122 | 1388 | """test sparse matrix construction functions"""
from __future__ import division, print_function, absolute_import
from numpy.testing import TestCase, assert_equal
from scipy.sparse import csr_matrix
import numpy as np
from scipy.sparse import extract
class TestExtract(TestCase):
def setUp(self):
self.cases = [
csr_matrix([[1,2]]),
csr_matrix([[1,0]]),
csr_matrix([[0,0]]),
csr_matrix([[1],[2]]),
csr_matrix([[1],[0]]),
csr_matrix([[0],[0]]),
csr_matrix([[1,2],[3,4]]),
csr_matrix([[0,1],[0,0]]),
csr_matrix([[0,0],[1,0]]),
csr_matrix([[0,0],[0,0]]),
csr_matrix([[1,2,0,0,3],[4,5,0,6,7],[0,0,8,9,0]]),
csr_matrix([[1,2,0,0,3],[4,5,0,6,7],[0,0,8,9,0]]).T,
]
def find(self):
for A in self.cases:
I,J,V = extract.find(A)
assert_equal(A.toarray(), csr_matrix(((I,J),V), shape=A.shape))
def test_tril(self):
for A in self.cases:
B = A.toarray()
for k in [-3,-2,-1,0,1,2,3]:
assert_equal(extract.tril(A,k=k).toarray(), np.tril(B,k=k))
def test_triu(self):
for A in self.cases:
B = A.toarray()
for k in [-3,-2,-1,0,1,2,3]:
assert_equal(extract.triu(A,k=k).toarray(), np.triu(B,k=k))
| bsd-3-clause |
kenorb/BitTorrent | twisted/conch/ui/ansi.py | 2 | 7259 | # Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
#
"""Module to parse ANSI escape sequences
Maintainer: U(Jean-Paul Calderone <exarkun@twistedmatrix.com>
"""
import string
# Twisted imports
from twisted.python import log
class ColorText:
"""
Represents an element of text along with the texts colors and
additional attributes.
"""
# The colors to use
COLORS = ('b', 'r', 'g', 'y', 'l', 'm', 'c', 'w')
BOLD_COLORS = tuple([x.upper() for x in COLORS])
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(len(COLORS))
# Color names
COLOR_NAMES = (
'Black', 'Red', 'Green', 'Yellow', 'Blue', 'Magenta', 'Cyan', 'White'
)
def __init__(self, text, fg, bg, display, bold, underline, flash, reverse):
self.text, self.fg, self.bg = text, fg, bg
self.display = display
self.bold = bold
self.underline = underline
self.flash = flash
self.reverse = reverse
if self.reverse:
self.fg, self.bg = self.bg, self.fg
class AnsiParser:
"""
Parser class for ANSI codes.
"""
# Terminators for cursor movement ansi controls - unsupported
CURSOR_SET = ('H', 'f', 'A', 'B', 'C', 'D', 'R', 's', 'u', 'd','G')
# Terminators for erasure ansi controls - unsupported
ERASE_SET = ('J', 'K', 'P')
# Terminators for mode change ansi controls - unsupported
MODE_SET = ('h', 'l')
# Terminators for keyboard assignment ansi controls - unsupported
ASSIGN_SET = ('p',)
# Terminators for color change ansi controls - supported
COLOR_SET = ('m',)
SETS = (CURSOR_SET, ERASE_SET, MODE_SET, ASSIGN_SET, COLOR_SET)
def __init__(self, defaultFG, defaultBG):
self.defaultFG, self.defaultBG = defaultFG, defaultBG
self.currentFG, self.currentBG = self.defaultFG, self.defaultBG
self.bold, self.flash, self.underline, self.reverse = 0, 0, 0, 0
self.display = 1
self.prepend = ''
def stripEscapes(self, string):
"""
Remove all ANSI color escapes from the given string.
"""
result = ''
show = 1
i = 0
L = len(string)
while i < L:
if show == 0 and string[i] in _sets:
show = 1
elif show:
n = string.find('\x1B', i)
if n == -1:
return result + string[i:]
else:
result = result + string[i:n]
i = n
show = 0
i = i + 1
return result
def writeString(self, colorstr):
pass
def parseString(self, str):
"""
Turn a string input into a list of ColorText elements.
"""
if self.prepend:
str = self.prepend + str
self.prepend = ''
parts = str.split('\x1B')
if len(parts) == 1:
self.writeString(self.formatText(parts[0]))
else:
self.writeString(self.formatText(parts[0]))
for s in parts[1:]:
L = len(s)
i = 0
type = None
while i < L:
if s[i] not in string.digits+'[;?':
break
i+=1
if not s:
self.prepend = '\x1b'
return
if s[0]!='[':
self.writeString(self.formatText(s[i+1:]))
continue
else:
s=s[1:]
i-=1
if i==L-1:
self.prepend = '\x1b['
return
type = _setmap.get(s[i], None)
if type is None:
continue
if type == AnsiParser.COLOR_SET:
self.parseColor(s[:i + 1])
s = s[i + 1:]
self.writeString(self.formatText(s))
elif type == AnsiParser.CURSOR_SET:
cursor, s = s[:i+1], s[i+1:]
self.parseCursor(cursor)
self.writeString(self.formatText(s))
elif type == AnsiParser.ERASE_SET:
erase, s = s[:i+1], s[i+1:]
self.parseErase(erase)
self.writeString(self.formatText(s))
elif type == AnsiParser.MODE_SET:
mode, s = s[:i+1], s[i+1:]
#self.parseErase('2J')
self.writeString(self.formatText(s))
elif i == L:
self.prepend = '\x1B[' + s
else:
log.msg('Unhandled ANSI control type: %c' % (s[i],))
s = s[i + 1:]
self.writeString(self.formatText(s))
def parseColor(self, str):
"""
Handle a single ANSI color sequence
"""
# Drop the trailing 'm'
str = str[:-1]
if not str:
str = '0'
try:
parts = map(int, str.split(';'))
except ValueError:
log.msg('Invalid ANSI color sequence (%d): %s' % (len(str), str))
self.currentFG, self.currentBG = self.defaultFG, self.defaultBG
return
for x in parts:
if x == 0:
self.currentFG, self.currentBG = self.defaultFG, self.defaultBG
self.bold, self.flash, self.underline, self.reverse = 0, 0, 0, 0
self.display = 1
elif x == 1:
self.bold = 1
elif 30 <= x <= 37:
self.currentFG = x - 30
elif 40 <= x <= 47:
self.currentBG = x - 40
elif x == 39:
self.currentFG = self.defaultFG
elif x == 49:
self.currentBG = self.defaultBG
elif x == 4:
self.underline = 1
elif x == 5:
self.flash = 1
elif x == 7:
self.reverse = 1
elif x == 8:
self.display = 0
elif x == 22:
self.bold = 0
elif x == 24:
self.underline = 0
elif x == 25:
self.blink = 0
elif x == 27:
self.reverse = 0
elif x == 28:
self.display = 1
else:
log.msg('Unrecognised ANSI color command: %d' % (x,))
def parseCursor(self, cursor):
pass
def parseErase(self, erase):
pass
def pickColor(self, value, mode, BOLD = ColorText.BOLD_COLORS):
if mode:
return ColorText.COLORS[value]
else:
return self.bold and BOLD[value] or ColorText.COLORS[value]
def formatText(self, text):
return ColorText(
text,
self.pickColor(self.currentFG, 0),
self.pickColor(self.currentBG, 1),
self.display, self.bold, self.underline, self.flash, self.reverse
)
_sets = ''.join(map(''.join, AnsiParser.SETS))
_setmap = {}
for s in AnsiParser.SETS:
for r in s:
_setmap[r] = s
del s
| gpl-3.0 |
nephomaniac/nephoria | nephoria/testcases/boto/ec2/network/net_tests_classic.py | 1 | 95035 | #!/usr/bin/python
# Software License Agreement (BSD License)
#
# Copyright (c) 2009-2011, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Redistribution and use of this software in source and binary forms, with or
# without modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
#
# Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author:
__author__ = 'matt.clark@eucalyptus.com'
'''
Test case class to test points of network security groups
See individual test descriptions for test objectives.
test1:
Definition:
Create test instances within each zone within security group1. This security group is authorized for
ssh access from 0.0.0.0/0.
This test attempts the following:
-To run an instance in each zone and confirm it reaches 'running' state.
-Confirm the instance is ping-able from the cc within a given timeout
-Establish and verify an ssh session directly from the local machine running this test.
-Place ssh key on instance for later use
-Add instance to global 'group1_instances'
test2:
Definition:
This test attempts to create an instance in each within security group2 which should not
be authorized for any remote access (outside of the CC).
The test attempts the following:
-To run an instance in each zone and confirm it reaches 'running' state.
-Confirm the instance is ping-able from the cc within a given timeout
-Establish and verify an ssh session using the cc as a proxy.
-Place ssh key on instance for later use
-Add instance to global 'group2_instances'
test3:
Definition:
This test attempts to set up security group rules between group1 and group2 to authorize group2 access
from group1. If use_cidr is True security groups will be setup using cidr notication ip/mask for each instance in
group1, otherwise the entire source group 1 will authorized.
the group will be
Test attempts to:
-Authorize security groups for inter group private ip access.
-Iterate through each zone and attempt to ssh from an instance in group1 to an instance in group2 over their
private ips.
test4:
Definition:
Test attempts to verify that the local machine cannot ssh to the instances within group2 which is not authorized
for ssh access from this source.
test5 (Multi-zone/cluster env):
Definition:
This test attempts to check connectivity for instances in the same security group, but in different zones.
Note: This test requires the CC have tunnelling enabled, or the CCs in each zone be on same
layer 2 network segment.
Test attempts to:
-Iterate through each zone and attempt to ssh from an instance in group1 to an instance in a separate zone
but same security group1 over their private ips.
test 6 (Multi-zone/cluster env):
Definition:
This test attempts to set up security group rules between group1 and group2 to authorize group2 access
from group1 across different zones.
If no_cidr is True security groups will be setup using cidr notication ip/mask for each instance in
group1, otherwise the entire source group 1 will authorized.
the group will be
Note: This test requires the CC have tunnelling enabled, or the CCs in each zone be on same
layer 2 network segment.
Test attempts to:
-Authorize security groups for inter group private ip access.
-Iterate through each zone and attempt to ssh from an instance in group1 to an instance in group2 over their
private ips.
'''
#todo: Make use of CC optional so test can be run with only creds and non-sys_admin user.
# CC only provides additional point of debug so can be removed from test for non-euca testing
#todo: Allow test to run with an sys_admin and non-sys_admin account, so debug can be provided through sys_admin and test can
# be run under non-sys_admin if desired.
from boto.ec2.instance import Instance
from paramiko import SSHException
from nephoria.aws.ec2.ec2ops import EC2ops
from nephoria.testcase_utils.cli_test_runner import CliTestRunner, SkipTestException
from nephoria.testcase_utils import wait_for_result, WaitForResultException
from nephoria.testcontroller import TestController
from nephoria.aws.ec2.euinstance import EuInstance
from cloud_utils.net_utils.sshconnection import SshConnection
from cloud_utils.net_utils.sshconnection import CommandExitCodeException, CommandTimeoutException
from cloud_utils.log_utils import red
from cloud_admin.backends.network.midget import Midget
from boto.exception import EC2ResponseError
from cloud_utils.net_utils import test_port_status
from cloud_utils.log_utils import get_traceback
import copy
import socket
import time
import os
import re
import sys
class TestZone():
def __init__(self, zonename):
self.name = zonename
self.zone = zonename
self.test_instance_group1 = None
self.test_instance_group2 = None
class MidoError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class NetTestsClassic(CliTestRunner):
'''
self._vpc_backend = None
'''
@property
def subnet_id(self):
if hasattr(self.args, 'subnet_id'):
return self.args.subnet_id
return None
@property
def test_controller(self):
tc = getattr(self, '_test_controller', None)
if not tc:
clc_ip = self.args.clc
clc_pw = self.args.password
test_account = self.args.test_account
test_user = self.args.test_user
log_level = getattr(self.args, 'log_level', 'DEBUG')
tc = TestController(hostname=clc_ip, password=clc_pw, log_level=log_level,
clouduser_name=test_user, clouduser_account=test_account)
setattr(self, '_test_controller', tc)
return tc
@test_controller.setter
def test_controller(self, value):
if value is None or isinstance(value, TestController):
setattr(self, '_test_controller', value)
else:
raise ValueError('Can only set testcontroller to type TestController or None, '
'got:"{0}/{1}"'.format(value, type(value)))
@property
def user(self):
return self.test_controller.user
@property
def admin(self):
return self.test_controller.admin
@property
def sysadmin(self):
return self.test_controller.sysadmin
@property
def keypair(self):
kp = getattr(self, '_keypair', None)
if not kp:
try:
keys = self.user.ec2.get_all_current_local_keys()
if keys:
kp = keys[0]
else:
kp = self.user.ec2.create_keypair_and_localcert(
"{0}_key_{1}".format(self.name, time.time()))
setattr(self, '_keypair', kp)
except Exception, ke:
raise Exception("Failed to find/create a keypair, error:" + str(ke))
return kp
@property
def zones(self):
zones = getattr(self, '_zones', None)
if not zones:
### Create local zone list to run nephoria_unit_tests in
if self.args.zone:
zones = str(self.args.zone).replace(',',' ')
zones = zones.split()
else:
zones = self.user.ec2.get_zones()
if not zones:
raise RuntimeError('No zones found to run this test?')
self.log.debug('Running test against zones:' + ",".join(zones))
setattr(self, '_zones', zones)
return zones
def setup_test_security_groups(self):
### Add and authorize security groups
group1 = self.group1
group2 = self.group2
if self.group1:
try:
self.group1 = self.user.ec2.get_security_group(id=self.group1.id)
except EC2ResponseError as ER:
if ER.status == 400:
self.group1 = None
else:
raise ER
if self.group2:
try:
self.group2 = self.user.ec2.get_security_group(id=self.group2.id)
except EC2ResponseError as ER:
if ER.status == 400:
self.group2 = None
else:
raise ER
if not self.group1:
self.log.debug("Creating group1..")
self.group1 = self.user.ec2.add_group(str(self.name) + "_group1_" + str(time.time()))
self.log.debug("Authorize ssh for group1 from '0.0.0.0/0'")
self.user.ec2.authorize_group(self.group1, port=22, protocol='tcp',
cidr_ip='0.0.0.0/0')
self.user.ec2.authorize_group(self.group1, protocol='icmp',port='-1',
cidr_ip='0.0.0.0/0')
if not self.group2:
self.log.debug("Creating group2, will authorize later from rules within test methods..")
self.group2 = self.user.ec2.add_group(str(self.name) + "_group2_" + str(time.time()))
self.user.ec2.authorize_group(self.group2, protocol='icmp', port='-1',
cidr_ip='0.0.0.0/0')
@property
def group1(self):
g1 = getattr(self, '_group1', None)
if not g1:
### Add and authorize securtiy groups
self.log.debug("Creating group1...")
g1 = self.user.ec2.add_group(str(self.name) + "_group1_" + str(time.time()))
self.log.debug("Authorize ssh for group1 from '0.0.0.0/0'")
self.user.ec2.authorize_group(g1, port=22, protocol='tcp',
cidr_ip='0.0.0.0/0')
self.user.ec2.authorize_group(g1, port=-1, protocol='icmp',
cidr_ip='0.0.0.0/0')
setattr(self, '_group1', g1)
return g1
@group1.setter
def group1(self, value):
setattr(self, '_group1', value)
@property
def group2(self):
g2 = getattr(self, '_group2', None)
if not g2:
self.log.debug("Creating group2, will authorize later from rules "
"within test methods...")
g2 = self.user.ec2.add_group(str(self.name) + "_group2_" + str(time.time()))
self.user.ec2.authorize_group(g2, port=-1, protocol='icmp',
cidr_ip='0.0.0.0/0')
setattr(self, '_group2', g2)
return g2
@group2.setter
def group2(self, value):
setattr(self, '_group2', value)
@property
def group1_instances(self):
gi = getattr(self, '_group1_instances', None)
if gi is None:
gi = []
self._group1_instances = gi
return gi
@group1_instances.setter
def group1_instances(self, value):
setattr(self, '_group1_instances', value)
@property
def group2_instances(self):
gi = getattr(self, '_group2_instances', None)
if gi is None:
gi = []
self._group2_instances = gi
return gi
@group2_instances.setter
def group2_instances(self, value):
setattr(self, '_group2_instances', value)
@property
def image(self):
image = getattr(self, '_image', None)
if not image:
### Get an image to work with
if self.args.emi:
image = self.user.ec2.get_emi(emi=str(self.args.emi))
else:
image = self.user.ec2.get_emi(root_device_type="instance-store", basic_image=True)
if not image:
raise RuntimeError('couldnt find instance store image')
setattr(self, '_image', image)
return image
@property
def vpc_backend(self):
if not self.is_vpc_mode():
return None
if not hasattr(self, '_vpc_backend'):
self._vpc_backend = None
if not self._vpc_backend:
vpc_backend_host = self.sysadmin.clc_machine.hostname
try:
self._vpc_backend = Midget(vpc_backend_host, systemconnection=self.sysadmin)
except ImportError as IE:
self._vpc_backend = None
self.errormsg('Not Creating VPC backend DEBUG interface, err:"{0}"'.format(str(IE)))
except Exception as VBE:
self._vpc_backend = None
self.errormsg('FYI... Failed to create vpc backend interface, err:\n{0}'
'\nUnable to get VPC backend debug. Ignoring Error:"{1}"'
.format(self.tester.get_traceback(), str(VBE)))
return None
return self._vpc_backend
def errormsg(self, msg):
return self.log.error(red(msg))
def authorize_group_for_instance_list(self, group, instances):
for instance in instances:
assert isinstance(instance, EuInstance)
try:
self.user.ec2.authorize_group(group, protocol='tcp', port=22,
cidr_ip=instance.private_ip_address + "/32")
self.user.ec2.authorize_group(group, protocol='icmp', port='-1',
cidr_ip=instance.private_ip_address + "/32")
except:
self.user.ec2.show_instance(instance)
self.user.ec2.show_security_group(group)
self.errormsg('Failed to authorize group:{0} to allow private ip for '
'instance:"{1}/{2}"'.format(group,
instance.id,
instance.private_ip_address))
raise
def revoke_group_for_instance_list(self, group, instances):
for instance in instances:
assert isinstance(instance, EuInstance)
self.user.ec2.revoke_security_group(group, from_port='22', protocol='tcp',
cidr_ip=instance.private_ip_address + "/32")
self.user.ec2.revoke_security_group(group, from_port='-1', protocol='icmp',
cidr_ip=instance.private_ip_address + "/32")
def clean_method(self):
if self.args.no_clean:
self.status('No clean flag set, not cleaning test resources')
else:
errors = []
ins = self.group1_instances
ins.extend(self.group2_instances)
try:
self.user.ec2.terminate_instances(ins)
except EC2ResponseError as ER:
if ER.status == 400:
pass
else:
raise
except Exception as E:
errors.append(E)
self.log.error("{0}\n{1}".format(get_traceback(), E))
try:
self.user.ec2.delete_group(self.group1)
except EC2ResponseError as ER:
if ER.status == 400:
pass
else:
raise
except Exception as E:
errors.append(E)
self.log.error("{0}\n{1}".format(get_traceback(), E))
try:
self.user.ec2.delete_group(self.group2)
except EC2ResponseError as ER:
if ER.status == 400:
pass
else:
raise
except Exception as E:
errors.append(E)
self.log.error("{0}\n{1}".format(get_traceback(), E))
if errors:
raise RuntimeError("Error in cleanup:{0}"
.format(", ".join(str(e) for e in errors)))
def is_vpc_mode(self):
return 'VPC' in self.user.ec2.get_supported_platforms()
def get_proxy_machine(self, instance, use_mido_gw=False):
if self.is_vpc_mode():
if use_mido_gw:
gw_hosts = self.user.ec2.get_backend_vpc_gateways()
if not gw_hosts:
raise ValueError('No backend VPC gateways were found?')
# pick single gw host and ip for lookup purposes
gw_host_ip = self.user.ec2.clc.ssh.get_ipv4_lookup(gw_hosts[0])
if not gw_host_ip:
raise RuntimeError('Failed to lookup ipv4 address for host:"{0}"'
.format(gw_hosts[0]))
gw_host_ip = gw_host_ip[0]
gw_machine = self.sysadmin.get_host_by_hostname(gw_host_ip)
else:
gw_machine = self.sysadmin.clc_machine
return gw_machine
prop = self.sysadmin.get_property('{0}.cluster.networkmode'.format(instance.placement))
if prop.value.lower() == "edge":
proxy_machine = self.get_active_nc_for_instance(instance)
else:
proxy_machine = self.get_active_cc_for_instance(instance)
self.log.debug("Instance is running on: " + proxy_machine.hostname)
return proxy_machine
def get_vpc_proxy_ssh_connection(self, instance):
"""
Provides a means to communicate to instances within a VPC on their private interfaces
from the VPC namespace (for now this is the CLC). This will act as a sudo proxy interface
to the instances on their private network(s).
:param instance: an instance object to connect to
:param ssh_to_instance: boolean. If true will attempt to ssh from clc to instance for each
command.
"""
gw_machine = self.get_proxy_machine(instance=instance)
self.log.debug('Using "{0}" as the internal proxy machine for instance:{1}'
.format(gw_machine.hostname, instance))
if gw_machine:
vpc_proxy_ssh = gw_machine.ssh
else:
raise ValueError('Could not find eutester machine for ip: "{0}"'
.format(gw_machine.hostname))
if instance.keypath:
keyname= '{0}_{1}'.format(instance.id, os.path.basename(instance.keypath))
try:
vpc_proxy_ssh.sys('ls {0}'.format(keyname), code=0)
except CommandExitCodeException:
vpc_proxy_ssh.sftp_put(instance.keypath, keyname)
if not hasattr(vpc_proxy_ssh, 'orig_cmd_method'):
vpc_proxy_ssh.orig_cmd_method = vpc_proxy_ssh.cmd
def newcmd(cmd, **kwargs):
ssh_cmd = ('ip netns {0} ssh -o StrictHostKeyChecking=no -n -i {1} {2}@{3} "{4}"'
.format(instance.vpc_id, keyname, instance.username,
instance.private_ip_address,
cmd))
return vpc_proxy_ssh.orig_cmd_method(cmd, **kwargs)
vpc_proxy_ssh.cmd = newcmd
return vpc_proxy_ssh
def create_proxy_ssh_connection_to_instance(self, instance, retry=10):
if self.is_vpc_mode():
return self.get_vpc_proxy_ssh_connection(instance=instance)
proxy_machine = self.get_proxy_machine(instance)
ssh = None
attempts = 0
elapsed = 0
next_retry_time = 10
start = time.time()
proxy_keypath=proxy_machine.ssh.keypath or None
while not ssh and attempts < retry:
attempts += 1
elapsed = int(time.time()-start)
self.log.debug('Attempting to ssh to instances private ip:' + str(instance.private_ip_address) +
'through the cc ip:' + str(proxy_machine.hostname) + ', attempts:' +str(attempts) + "/" + str(retry) +
", elapsed:" + str(elapsed))
try:
ssh = SshConnection(host=instance.private_ip_address,
keypath=instance.keypath,
proxy=proxy_machine.hostname,
proxy_username=proxy_machine.ssh.username,
proxy_password=proxy_machine.ssh.password,
proxy_keypath=proxy_keypath)
except Exception, ce:
tb = get_traceback()
if attempts >= retry:
self.log.debug("\n" + tb,linebyline=False)
self.log.debug('Failed to connect error:' + str(ce))
if attempts < retry:
time.sleep(next_retry_time)
if not ssh:
raise Exception('Could not ssh to instances private ip:' + str(instance.private_ip_address) +
' through the cc ip:' + str(proxy_machine.hostname) + ', attempts:' +str(attempts) + "/" + str(retry) +
", elapsed:" + str(elapsed))
return ssh
def get_active_cc_for_instance(self,instance,refresh_active_cc=30):
elapsed = time.time()-self.cc_last_checked
self.cc_last_checked = time.time()
if elapsed > refresh_active_cc:
use_cached_list = False
else:
use_cached_list = True
cc = self.sysadmin.get_hosts_for_cluster_controllers(partition=instance.placement)[0]
return cc
def get_active_nc_for_instance(self,instance):
nc = self.sysadmin.get_hosts_for_node_controllers(instanceid=instance.id)[0]
return nc.machine
def ping_instance_private_ip_from_euca_internal(self, instance, ping_timeout=120):
assert isinstance(instance, EuInstance)
proxy_machine = self.get_proxy_machine(instance)
net_namespace = None
if self.is_vpc_mode():
net_namespace = instance.vpc_id
vpc_backend_retries = 0
max_retries = 1
while vpc_backend_retries <= max_retries:
if not self.vpc_backend:
vpc_backend_retries = max_retries + 1
try:
wait_for_result(self._ping_instance_private_ip_from_euca_internal,
result=True,
timeout=ping_timeout,
instance=instance,
proxy_machine=proxy_machine,
net_namespace=net_namespace)
except WaitForResultException:
self.errormsg('Failed to ping instance: {0}, private ip:{1} from internal host: {2}'
.format(instance.id,
instance.private_ip_address,
proxy_machine.hostname))
self.errormsg('Ping failure. Fetching network debug info from internal host...')
proxy_machine.dump_netfail_info(ip=instance.private_ip_address,
net_namespace=net_namespace)
self.errormsg('Done dumping network debug info from the "internal euca proxy host" @ '
'{0} '
'used in attempting to ping instance {1}, private ip: {2}'
.format(proxy_machine.hostname,
instance.id,
instance.private_ip_address))
if self.vpc_backend:
self.dump_vpc_backend_info_for_instance(instance)
raise
else:
raise
vpc_backend_retries += 1
self.log.debug('Successfully pinged instance: {0}, private ip:{1} from internal host: {2}'
.format(instance.id,
instance.private_ip_address,
proxy_machine.hostname))
def dump_vpc_backend_info_for_instance(self, instance):
if self.vpc_backend:
try:
self.vpc_backend.show_instance_network_summary(instance)
except Exception, ME:
self.log.debug('{0}\nCould not dump vpc backend debug, err:{1}'
.format(ME, get_traceback()))
def _ping_instance_private_ip_from_euca_internal(self,
instance,
proxy_machine,
net_namespace=None):
assert isinstance(instance, EuInstance)
try:
proxy_machine.ping_check(instance.private_ip_address,
net_namespace=net_namespace)
return True
except Exception, PE:
self.log.debug('Ping Exception:{0}'.format(PE))
self.log.debug('Failed to ping instance: {0}, private ip:{1} from internal host: {2}'
.format(instance.id,
instance.private_ip_address,
proxy_machine.hostname))
return False
def is_port_in_use_on_instance(self, instance, port, tcp=True, ipv4=True):
args = '-ln'
if tcp:
args += 't'
else:
args += 'u'
if ipv4:
args += 'A inet'
else:
args += 'A inet6'
use = instance.sys("netstat " + str(args) + " | awk '$6 ==" +
' "LISTEN" && $4 ~ ".' + str(port) +
'"' + "' | grep LISTEN")
if use:
self.log.debug('Port {0} IS in use on instance:'
.format(port, instance.id))
return True
else:
self.log.debug('Port {0} IS NOT in use on instance:'
.format(port, instance.id))
False
def is_port_range_in_use_on_instance(self, instance, start, end,
tcp=True, ipv4=True):
for x in xrange(start, end):
if self.is_port_in_use_on_instance(instance=instance,
port=x,
tcp=tcp,
ipv4=ipv4):
return True
return False
def show_instance_security_groups(self, instance):
assert isinstance(instance, Instance)
self.status('Showing security groups for instance: {0}'.format(instance.id))
for group in instance.groups:
self.user.ec2.show_security_group(group)
################################################################
# Test Methods
################################################################
def test1_create_instance_in_zones_for_security_group1(self, ping_timeout=180, zones=None):
'''
Definition:
Create test instances within each zone within security group1. This security group is authorized for
ssh access from 0.0.0.0/0.
This test attempts the following:
-To run an instance in each zone and confirm it reaches 'running' state.
-Confirm the instance is ping-able from the cc within a given timeout
-Establish and verify an ssh session directly from the local machine running this test.
-Place ssh key on instance for later use
-Add instance to global 'group1_instances'
'''
if zones and not isinstance(zones, list):
zones = [zones]
zones = zones or self.zones
for zone in zones:
#Create an instance, monitor it's state but disable the auto network/connect checks till afterward
instance = self.user.ec2.run_image(image=self.image,
keypair=self.keypair,
group=self.group1,
zone=zone,
auto_connect=False,
subnet_id=self.subnet_id,
monitor_to_running=False)[0]
self.group1_instances.append(instance)
self.user.ec2.monitor_euinstances_to_running(self.group1_instances)
#Now run the network portion.
for instance in self.group1_instances:
self.status('Checking connectivity to:'
+ str(instance.id) + ":" + str(instance.private_ip_address)
+ ", zone:" + str(instance.placement) )
assert isinstance(instance, EuInstance)
self.log.debug('Attempting to ping instances private ip from cc...')
self.ping_instance_private_ip_from_euca_internal(instance=instance, ping_timeout=ping_timeout)
self.log.debug('Attempting to ssh to instance from local test machine...')
self.log.debug('Check some debug information re this data connection in this security group first...')
self.show_instance_security_groups(instance)
self.user.ec2.does_instance_sec_group_allow(instance=instance,
src_addr=None,
protocol='tcp',
port=22)
try:
start = time.time()
instance.connect_to_instance(timeout=120)
except Exception, ConnectErr:
if self.vpc_backend:
self.errormsg('{0}\n{1}\nFailed to connect to instance:"{2}", dumping info '
.format(ConnectErr, get_traceback(), instance.id))
self.dump_vpc_backend_info_for_instance(instance)
raise ConnectErr
if instance.ssh:
self.status('SSH connection to instance:' + str(instance.id) +
' successful to public ip:' + str(instance.ip_address) +
', zone:' + str(instance.placement))
else:
raise RuntimeError('intance:{0} ssh is none, failed to connect after {1} seconds?'
.format(instance.id, int(time.time()-start)))
instance.sys('uname -a', code=0)
instance.ssh.sftp_put(instance.keypath, os.path.basename(instance.keypath))
instance.sys('chmod 0600 ' + os.path.basename(instance.keypath), code=0 )
def test2_create_instance_in_zones_for_security_group2(self, ping_timeout=180,
auto_connect=False, zones=None):
'''
Definition:
This test attempts to create an instance in each zone within security group2 which should not
be authorized for any remote access (outside of the CC).
The test attempts the following:
-To run an instance in each zone and confirm it reaches 'running' state.
-Confirm the instance is ping-able from the cc within a given timeout
-Establish and verify an ssh session using the cc as a proxy.
-Place ssh key on instance for later use
-Add instance to global 'group2_instances'
:params ping_timeout: Int Time to wait for ping for successful ping to instance(s)
:params auto_connect: Boolean. If True will auto ssh to instance(s), if False will
use cc/nc as ssh proxy
:params zones: List of names of Availability zone(s) to create instances in
'''
if zones and not isinstance(zones, list):
zones = [zones]
zones = zones or self.zones
for zone in self.zones:
instance = self.user.ec2.run_image(image=self.image,
keypair=self.keypair,
group=self.group2,
zone=zone,
subnet_id = self.subnet_id,
auto_connect=auto_connect,
monitor_to_running=False)[0]
self.group2_instances.append(instance)
self.user.ec2.monitor_euinstances_to_running(self.group2_instances)
for instance in self.group2_instances:
self.status('Checking connectivity to:' + str(instance.id) + ":" + str(instance.private_ip_address)+
", zone:" + str(instance.placement) )
assert isinstance(instance, EuInstance)
self.show_instance_security_groups(instance)
self.ping_instance_private_ip_from_euca_internal(instance=instance,
ping_timeout=ping_timeout)
if not auto_connect:
self.status('Make sure ssh is working through an internal euca path before '
'trying between instances...')
instance.proxy_ssh = self.create_proxy_ssh_connection_to_instance(instance)
self.status('SSH connection to instance:' + str(instance.id) +
' successful to private ip:' + str(instance.private_ip_address) +
', zone:' + str(instance.placement))
else:
instance.proxy_ssh = instance.ssh
instance.proxy_ssh.sys('uname -a', code=0)
self.status('Uploading keypair to instance in group2...')
instance.proxy_ssh.sftp_put(instance.keypath, os.path.basename(instance.keypath))
instance.proxy_ssh.sys('chmod 0600 ' + os.path.basename(instance.keypath), code=0 )
self.status('Done with create instance security group2:' + str(instance.id))
def test3_test_ssh_between_instances_in_diff_sec_groups_same_zone(self):
'''
Definition:
This test attempts to set up security group rules between group1 and group2 to authorize group2 access
from group1. If no_cidr is True security groups will be setup using cidr notation ip/mask for each instance in
group1, otherwise the entire source group 1 will be authorized.
Test attempts to:
-Authorize security groups for inter group private ip access.
-Iterate through each zone and attempt to ssh from an instance in group1 to an instance in group2 over their
private ips.
- Run same 2 tests from above by authorizing a SecurityGroup
'''
def check_instance_connectivity():
max_retries = 1
vpc_backend_retries = 0
while vpc_backend_retries <= max_retries:
if not self.vpc_backend:
vpc_backend_retries = max_retries + 1
try:
for zone in self.zones:
instance1 = None
instance2 = None
for instance in self.group1_instances:
if instance.placement == zone:
assert isinstance(instance, EuInstance)
instance1 = instance
break
if not instance1:
raise Exception('Could not find instance in group1 for zone:' +
str(zone))
for instance in self.group2_instances:
if instance.placement == zone:
assert isinstance(instance, EuInstance)
instance2 = instance
break
if not instance2:
raise Exception('Could not find instance in group2 for zone:'
+ str(zone))
self.status(
'Attempting to run ssh command "uname -a" between instances across '
'security groups:\n'
+ str(instance1.id) + '/sec grps(' + str(instance1.security_groups) +
") --> "
+ str(instance2.id) + '/sec grps(' + str(instance2.security_groups) + ")\n"
+ "Current test run in zone: " + str(zone))
self.log.debug('Check some debug information re this data connection in this '
'security group first...')
self.show_instance_security_groups(instance2)
self.user.ec2.does_instance_sec_group_allow(instance=instance2,
src_addr=instance1.private_ip_address,
protocol='tcp',
port=22)
self.log.debug('Reset ssh connection to instance:{0} first...'
.format(instance1.id))
instance1.connect_to_instance()
self.status('Now Running the ssh command which checks connectivity from '
'instance1 to instance2...')
instance1.sys("ssh -o StrictHostKeyChecking=no -i "
+ str(os.path.basename(instance1.keypath))
+ " root@" + str(instance2.private_ip_address)
+ " 'uname -a'", code=0)
self.status('"{0}" to "{1}" connectivity test succeeded'.format(instance1.id,
instance2.id))
except Exception, ConnectivityErr:
if vpc_backend_retries:
if self.vpc_backend:
self.errormsg('Retry still failed connectivity test after restarting '
'vpc backend')
raise ConnectivityErr
elif self.vpc_backend:
self.dump_vpc_backend_info_for_instance(instance1)
self.dump_vpc_backend_info_for_instance(instance2)
self.errormsg('Could not connect to instance:"{0}"'
.format(instance.id))
raise ConnectivityErr
else:
raise ConnectivityErr
else:
if self.vpc_backend and vpc_backend_retries:
self.log.debug('MidoRetries:{0}'.format(vpc_backend_retries))
raise MidoError('Connectivity test passed, but only after '
'restarting Midolman.')
else:
self.status('Ssh between instances passed')
break
self.status('Authorizing access from group1 individual instance IPs to group2, '
'then checking connectivity...')
self.authorize_group_for_instance_list(self.group2, self.group1_instances)
self.status('group2 should now allow access from each individual instance IP from '
'group1...')
self.user.ec2.show_security_group(self.group2)
check_instance_connectivity()
self.status('Revoking auth for group1 instances from group2, then re-add using '
'the using the group id instead of invididual instance IPs...')
self.revoke_group_for_instance_list(self.group2, self.group1_instances)
self.status('group2 should no longer have authorization from the individual instance IPs'
'from group1...')
self.user.ec2.show_security_group(self.group2)
self.status('Auth group1 access to group2...')
self.user.ec2.authorize_group(self.group2, cidr_ip=None, port=22,
protocol='tcp', src_security_group=self.group1)
self.user.ec2.authorize_group(self.group2, cidr_ip=None, port=None,
protocol='icmp', src_security_group=self.group1)
self.status('Group2 should now allow access from source group1 on tcp/22 and icmp...')
self.user.ec2.show_security_group(self.group2)
check_instance_connectivity()
def test4_attempt_unauthorized_ssh_from_test_machine_to_group2(self):
'''
Description:
Test attempts to verify that the local machine cannot ssh to the instances within group2 which is not authorized
for ssh access from this source.
'''
for instance in self.group2_instances:
assert isinstance(instance, EuInstance)
#Provide some debug information re this data connection in this security group
self.status('Attempting to ssh from local test machine to instance: {0}, '
'this should not be allowed...'.format(instance.id))
self.show_instance_security_groups(instance)
self.user.ec2.does_instance_sec_group_allow(instance=instance, src_addr=None, protocol='tcp',port=22)
try:
instance.reset_ssh_connection(timeout=5)
if self.vpc_backend:
try:
self.vpc_backend.show_instance_network_summary(instance)
except Exception, ME:
self.log.debug('{0}\nCould not dump Mido debug, err:{1}'
.format(ME, get_traceback()))
raise Exception('Was able to connect to instance: ' + str(instance.id) + ' in security group:'
+ str(self.group2.name))
except:
self.log.debug('Success: Was not able to ssh from the local machine to instance in unauthorized sec group')
def test5_test_ssh_between_instances_in_same_sec_groups_different_zone(self):
'''
Definition:
This test attempts to check connectivity for instances in the same security group, but in different zones.
Note: This test requires the CC have tunnelling enabled, or the CCs in each zone be on same
layer 2 network segment.
Test attempts to:
-Iterate through each zone and attempt to ssh from an instance in group1 to an instance in a separate zone
but same security group1 over their private ips.
'''
zones = []
if len(self.zones) < 2:
raise SkipTestException('Skipping test5, only a single zone found or provided')
for zone in self.zones:
zones.append(TestZone(zone))
#Grab a single instance from each zone within security group1
for zone in zones:
instance = None
for instance in self.group1_instances:
if instance.placement == zone.zone:
assert isinstance(instance, EuInstance)
zone.test_instance_group1 = instance
break
instance = None
if not zone.test_instance_group1:
raise Exception('Could not find an instance in group1 for zone:' + str(zone.zone))
self.log.debug('Iterating through zones, attempting ssh between zones within same security group...')
for zone in zones:
instance1 = zone.test_instance_group1
for zone2 in zones:
if zone.zone != zone2.zone:
instance2 = zone2.test_instance_group1
if not instance1 or not instance2:
raise Exception('Security group: ' + str(self.group1.name) + ", missing instances in a Zone:"
+ str(zone.zone) + " = instance:" + str(instance1) +
", Zone:" + str(zone2.zone) + " = instance:" + str(instance2))
self.log.debug('Attempting to run ssh command "uname -a" between instances across zones and security groups:\n'
+ str(instance1.id) + '/sec grps(' + str(instance1.security_groups)+") --> "
+ str(instance2.id) + '/sec grps(' + str(instance2.security_groups)+")\n"
+ "Current test run in zones: " + str(instance1.placement) + "-->" + str(instance2.placement),
linebyline=False )
self.log.debug('Check some debug information re this data connection in this security group first...')
self.user.ec2.does_instance_sec_group_allow(instance=instance2,
src_addr=instance1.private_ip_address,
protocol='tcp',
port=22)
self.log.debug('Now Running the ssh command...')
try:
instance1.sys("ssh -o StrictHostKeyChecking=no -i "
+ str(os.path.basename(instance1.keypath))
+ " root@" + str(instance2.private_ip_address)
+ " ' uname -a'", code=0)
self.log.debug('Ssh between instances passed')
except Exception, ME:
if self.vpc_backend:
try:
self.vpc_backend.show_instance_network_summary(instance)
except Exception, ME:
self.log.debug('{0}\nCould not dump Mido debug, err:{1}'
.format(ME, get_traceback()))
raise
def test6_test_ssh_between_instances_in_diff_sec_groups_different_zone(self):
'''
Definition:
This test attempts to set up security group rules between group1 and group2 to authorize group2 access
from group1 across different zones.
If no_cidr is True security groups will be setup using cidr notication ip/mask for each instance in
group1, otherwise the entire source group 1 will authorized.
the group will be
Note: This test requires the CC have tunnelling enabled, or the CCs in each zone be on same
layer 2 network segment.
Test attempts to:
-Authorize security groups for inter group private ip access.
-Iterate through each zone and attempt to ssh from an instance in group1 to an instance in group2 over their
private ips.
'''
zones = []
if len(self.zones) < 2:
raise SkipTestException('Skipping test5, only a single zone found or provided')
self.status('Authorizing group2:' + str(self.group2.name) + ' for access from group1:' + str(self.group1.name))
self.user.ec2.authorize_group(self.group2, cidr_ip=None, port=None,
src_security_group=self.group1)
for zone in self.zones:
zones.append(TestZone(zone))
self.log.debug('Grabbing a single instance from each zone and from each test security group to use in this test...')
for zone in zones:
instance = None
for instance in self.group1_instances:
if instance.placement == zone.zone:
assert isinstance(instance, EuInstance)
zone.test_instance_group1 = instance
break
instance = None
if not zone.test_instance_group1:
raise Exception('Could not find an instance in group1 for zone:' + str(zone.zone))
instance = None
for instance in self.group2_instances:
if instance.placement == zone.zone:
assert isinstance(instance, EuInstance)
zone.test_instance_group2 = instance
break
if not zone.test_instance_group2:
raise Exception('Could not find instance in group2 for zone:' + str(zone.zone))
instance = None
self.status('Checking connectivity for instances in each zone, in separate but authorized security groups...')
for zone in zones:
instance1 = zone.test_instance_group1
if not instance1:
raise Exception('Missing instance in Security group: ' + str(self.group1.name) + ', Zone:' +
str(zone) + " = instance:" + str(instance1) )
for zone2 in zones:
if zone.zone != zone2.zone:
instance2 = zone2.test_instance_group2
if not instance2:
raise Exception('Missing instance in Security group: ' + str(self.group2.name) + ', Zone:' +
str(zone2.zone) + " = instance:" + str(instance2) )
self.log.debug('Attempting to run ssh command "uname -a" between instances across zones and security groups:\n'
+ str(instance1.id) + '/sec grps(' + str(instance1.security_groups)+") --> "
+ str(instance2.id) + '/sec grps(' + str(instance2.security_groups)+")\n"
+ "Current test run in zones: " + str(instance1.placement) + "-->" + str(instance2.placement),
linebyline=False )
self.log.debug('Check some debug information re this data connection in this security group first...')
self.user.ec2.does_instance_sec_group_allow(instance=instance2,
src_addr=instance1.private_ip_address,
protocol='tcp',
port=22)
self.log.debug('Now Running the ssh command...')
instance1.sys("ssh -o StrictHostKeyChecking=no -i "
+ str(os.path.basename(instance1.keypath))
+ " root@" + str(instance2.private_ip_address)
+ " ' uname -a'", code=0)
self.log.debug('Ssh between instances passed')
def test7_add_and_revoke_tcp_port_range(self,
start=None,
src_cidr_ip='0.0.0.0/0',
count=10,
instances=None,
retry_interval=15):
'''
Definition:
Attempts to add a range of ports to a security group and test
the ports from the local machine to make sure they are available.
Next the test revokes the ports and verifies they are no longer
available.
:param start: starting port of range to scan
:param src_cidr_ip: cidr ip for src authorization. If None the test
will attempt to discovery the cidr ip of the
machine running this test to use for src auth ip.
:param count: number of consecutive ports from 'start' to test
:param tcp: boolean tcp if true, udp if false
'''
if instances:
if not isinstance(instances, list):
instances = [instances]
for instance in instances:
assert isinstance(instance, EuInstance)
else:
instances = self.group1_instances
if not instances:
raise ValueError('Could not find instance in group1')
# Iterate through all instances and test...
for instance1 in instances:
# Make sure we can ssh to this instance (note this may need to be
# adjusted for windows access
# 'does_instance_sec_group_allow' will set user.ec2.local_machine_source_ip to the
# ip the local machine uses to communicate with the instance.
instance1.netcat_name = 'netcat'
if src_cidr_ip is None:
if not self.user.ec2.does_instance_sec_group_allow(instance=instance1,
protocol='tcp',
port=22):
src_cidr_ip = str(self.user.ec2.local_machine_source_ip) + '/32'
self.user.ec2.authorize_group(self.group1,
protocol='tcp',
cidr_ip=src_cidr_ip,
port=22)
else:
self.user.ec2.authorize_group(self.group1,
protocol='tcp',
cidr_ip=src_cidr_ip,
port=22)
try:
instance1.sys('which {0}'.format(instance1.netcat_name), code=0)
except CommandExitCodeException:
got_it = False
for pkg in ['nc', 'netcat']:
try:
instance1.sys('apt-get install {0} -y'.format(pkg), code=0)
got_it = True
break
except CommandExitCodeException:
try:
instance1.sys('yum install {0} -y'.format(pkg), code=0)
got_it = True
break
except CommandExitCodeException:
self.log.debug('could install "{0}" on this instance'.format(pkg))
if not got_it:
raise RuntimeError('Could not install netcat on: {0}'.format(instance1))
instance1.netcat_name = pkg
#make sure we have an open port range to play with...
if start is None:
for x in xrange(2000,65000):
if self.is_port_range_in_use_on_instance(instance=instance1,
start=x,
end=x+count,
tcp=True):
x=x+count
else:
start=x
break
if not start:
raise RuntimeError('Free consecutive port range of count:{0} '
'not found on instance:{1}'
.format(count, instance1.id))
# authorize entire port range...
self.user.ec2.authorize_group(self.group1,
protocol='tcp',
cidr_ip=src_cidr_ip,
port=start,
end_port=start+count)
auth_starttime = time.time()
# test entire port range is accessible from this machine
test_file = 'eutester_port_test.txt'
#Allow some delay for the rule to be applied in the network...
time.sleep(10)
for x in xrange(start, start+count):
# Set up socket listener with netcat, to make sure we're not
# connecting to the CC or other device write port to file and
# verify file contents as well.
test_string = '{0} last port tested[{1}]'.format(time.time(), x)
self.log.debug("Gathering debug information as to whether the "
"tester's src ip is authorized for this port test...")
if not self.user.ec2.does_instance_sec_group_allow(
instance=instance1,
src_addr=src_cidr_ip.split('/')[0],
protocol='tcp',
port=x):
raise ValueError('Group:{0} did not have {1}:{2} authorized'
.format(self.group1.name,
src_cidr_ip.split('/')[0],
x))
# start up netcat, sleep to allow nohup to work before quiting
# the shell...
instance1.sys('killall -9 {0} 2> /dev/null'.format(instance1.netcat_name),
timeout=5)
instance1.sys('{' + ' ( nohup {0} -k -l {1} > {2} ) & sleep 1; '
.format(instance1.netcat_name, x, test_file) + '}',
code=0, timeout=5)
# attempt to connect socket at instance/port and send the
# test_string...
time.sleep(2) #Allow listener to setup...
done = False
attempt =0
while not done:
try:
attempt += 1
test_port_status(ip=instance1.ip_address,
port=x,
tcp=True,
send_buf=test_string,
verbose=True)
done = True
except socket.error as SE:
self.log.debug('Failed to poll port status on attempt {0}, elapsed since auth '
'request:"{1}"'
.format(attempt, int(time.time()-auth_starttime)))
try:
self.log.debug('Failed to connect to "{0}":IP:"{1}":'
'PORT:"{2}"'.format(instance1.id,
instance1.ip_address,
x))
self.user.show_security_group(self.group1)
try:
self.log.debug('Getting netcat info from instance...')
instance1.sys('ps aux | grep {0}'.format(instance1.netcat_name),
timeout=10)
except CommandExitCodeException:
pass
self.log.debug('Iptables info from Euca network component '
'responsible for this instance/security '
'group...')
proxy_machine = self.get_proxy_machine(instance1)
proxy_machine.sys('iptables-save', timeout=10)
except:
self.log.debug('Error when fetching debug output for '
'failure, ignoring:' +
str(get_traceback()))
if attempt >= 2:
raise SE
self.log.debug('Sleeping {0} seconds before next attempt:({1}/{2})'
.format(retry_interval, attempt, '2'))
time.sleep(retry_interval)
# Since no socket errors were encountered assume we connected,
# check file on instance to make sure we didn't connect somewhere
# else like the CC...
instance1.sys('grep "{0}" {1}; echo "" > {1}'
.format(test_string, test_file),
code=0)
self.status('Port "{0}" successfully tested on instance:{1}/{2}'
.format(x, instance1.id, instance1.ip_address))
self.status('Authorizing port range {0}-{1} passed'
.format(start, start+count))
self.status('Now testing revoking by removing the same port'
'range...')
time.sleep(3)
self.user.ec2.revoke_security_group(group=self.group1, from_port=start,
to_port=start + count, protocol='tcp',
cidr_ip=src_cidr_ip)
#Allow some delay for the rule to be applied in the network...
time.sleep(10)
for x in xrange(start, start+count):
# Set up socket listener with netcat, to make sure we're not
# connecting to the CC or other device write port to file and
# verify file contents as well.
# This portion of the test expects that the connection will fail.
test_string = '{0} last port tested[{1}]'.format(time.time(), x)
self.log.debug("Gathering debug information as to whether the "
"tester's src ip is authorized for this port test...")
if self.user.ec2.does_instance_sec_group_allow(
instance=instance1,
src_addr=src_cidr_ip.split('/')[0],
protocol='tcp',
port=x):
raise ValueError('Group:{0} has {1}:{2} authorized after revoke'
.format(self.group1.name,
src_cidr_ip,
x))
try:
instance1.sys('killall -9 {0} 2> /dev/null'.format(instance1.netcat_name),
timeout=5)
instance1.sys('{' + ' ( nohup {0} -k -l {1} > {2} ) & sleep 1; '
.format(instance1.netcat_name, x, test_file) + '}',
code=0, timeout=5)
test_port_status(ip=instance1.ip_address,
port=x,
tcp=True,
send_buf=test_string,
verbose=True)
#We may still need to test the file content for the UDP case...
# Since no socket errors were encountered assume we connected,
# check file on instance to make sure we didn't connect somewhere
# else like the CC. Dont' error here cuz it's already a bug...
instance1.sys('grep "{0}" {1}; echo "" > {1}'
.format(test_string, test_file))
except (socket.error, CommandExitCodeException) as OK:
self.status('Port "{0}" successfully revoked on '
'instance:{1}/{2}'
.format(x, instance1.id, instance1.ip_address))
self.status('Add and revoke ports test passed')
def test8_verify_deleting_of_auth_source_group2(self):
"""
Definition:
Attempts to delete a security group which has been authorized by another security group.
-Authorizes group1 access from group2
-Validates connectivity for instances in group1 can be accessed from group2
-Deletes group2, validates group1 still allows traffic from other authorized sources
"""
zones = []
for zone in self.zones:
zones.append(TestZone(zone))
# Make sure the groups are created.
self.status('Checking and/or create test security groups, and at least one instance'
'running in them per zone...')
self.setup_test_security_groups()
self.user.ec2.authorize_group(self.group1, port=22, protocol='tcp', cidr_ip='0.0.0.0/0')
self.user.ec2.authorize_group(self.group2, port=22, protocol='tcp', cidr_ip='0.0.0.0/0')
for zone in self.zones:
instances_group1 = []
instances_group2 = []
for instance in self.group1_instances:
if instance.placement == zone:
assert isinstance(instance, EuInstance)
instances_group1.append(instance)
if len(instances_group1) < 1:
self.test1_create_instance_in_zones_for_security_group1(zones=[zone])
for instance in self.group2_instances:
if instance.placement == zone:
assert isinstance(instance, EuInstance)
instances_group2.append(instance)
if len(instances_group2) < 1:
self.test2_create_instance_in_zones_for_security_group2(zones=[zone])
self.status('Clean out any existing rules in group1 to start with a clean group...')
self.user.ec2.revoke_all_rules(self.group1)
self.user.ec2.show_security_group(self.group1)
instance1 = self.group1_instances[0]
#Add back ssh
assert not self.user.ec2.does_instance_sec_group_allow(instance=instance1,
protocol='tcp',
port=22), \
'Instance: {0}, security group still allows access after ' \
'revoking all rules'
self.status('Authorize group1 access from group testing machine ssh (tcp/22)...')
self.user.ec2.authorize_group(self.group1,
# cidr_ip=str(user.ec2.local_machine_source_ip) + '/32',
cidr_ip='0.0.0.0/0', # open to 0/0 to avoid nat issues
protocol='tcp',
port=22)
self.user.ec2.authorize_group(self.group1, port=-1, protocol='icmp', cidr_ip='0.0.0.0/0')
self.user.ec2.show_security_group(self.group1)
self.status('Test ssh access from this testing machine to each instance in group1...')
for instance in self.group1_instances:
try:
instance.printself()
self.user.ec2.does_instance_sec_group_allow(instance=instance, protocol='tcp', port=22)
except:
pass
instance.connect_to_instance()
instance.sys('echo "reset ssh worked"', code=0)
self.status('Authorizing group2 access to group1...')
self.user.ec2.authorize_group(self.group1,
cidr_ip=None,
port=-1,
protocol='icmp',
src_security_group=self.group2)
# For debug purposes allow ssh from anywhere here...
self.user.ec2.authorize_group(self.group1,
cidr_ip=None,
port=22,
protocol='tcp')
self.status('Sleeping for 10 seconds to allow rule/network'
' to set...')
time.sleep(10)
self.user.ec2.show_security_group(self.group1)
self.status('Checking auth from group2 to group1 instances...')
self.log.debug('Check some debug information re this data connection in this security '
'group first...')
for zone in zones:
for instance in self.group1_instances:
if instance.placement == zone.name:
zone.test_instance_group1 = instance
if not zone.test_instance_group1.ssh:
self.status('Instance in group1 did not have an ssh connection, '
'trying to setup ssh now...')
self.user.ec2.show_security_groups_for_instance(zone.test_instance_group1)
self.log.debug('ssh connect using instance:"{0}", keypath:"{1}"'
.format(zone.test_instance_group1,
zone.test_instance_group1.keypath))
zone.test_instance_group1.connect_to_instance()
break
for instance in self.group2_instances:
if instance.placement == zone.name:
zone.test_instance_group2 = instance
if not zone.test_instance_group2.ssh:
self.status('Instance in group1 did not have an ssh connection, '
'trying to setup ssh now...')
self.user.ec2.show_security_groups_for_instance(zone.test_instance_group2)
self.log.debug('ssh connect using instance:"{0}", keypath:"{1}"'
.format(zone.test_instance_group2,
zone.test_instance_group2.keypath))
zone.test_instance_group2.connect_to_instance()
break
if not zone.test_instance_group1:
raise ValueError('Could not find instances in sec group1'
'group for zone:' + str(zone.name))
if not zone.test_instance_group2:
raise ValueError('Could not find instances in sec group2'
'group for zone:' + str(zone.name))
assert isinstance(zone.test_instance_group1, EuInstance)
assert isinstance(zone.test_instance_group2, EuInstance)
for zone in zones:
#Make sure the instance in group1 has allowed icmp access from group2
allowed = False
if self.user.ec2.does_instance_sec_group_allow(
instance=zone.test_instance_group1,
src_group=self.group2,
protocol='icmp',
port='-1'):
allowed = True
if not allowed:
raise ValueError('Group2 instance not allowed in group1'
' after authorizing group2')
self.status('Attempting to ping group1 instance from group2 '
'instance using their private IPs')
try:
zone.test_instance_group2.ssh.verbose = True
zone.test_instance_group2.sys(
'ping -c 1 {0}'
.format(zone.test_instance_group1.private_ip_address),
code=0,verbose=True)
except:
self.errormsg('Failed to ping from group2 to group1 instance '
'after authorizing the source group2')
raise
self.status('Terminating all instances in group2 in order to delete '
'security group2')
self.user.ec2.terminate_instances(self.group2_instances)
self.group2_instances = []
self.user.ec2.delete_group(self.group2)
self.status('Now confirm that ssh still works for all instances in group1')
for instance in self.group1_instances:
self.user.ec2.show_security_groups_for_instance(instance)
self.log.debug('Attempting to connect to instance from source IP: "{0}"'
.format(self.user.ec2.local_machine_source_ip))
instance.connect_to_instance(timeout=300)
instance.sys('echo "Getting hostname from {0}"; hostname'
.format(instance.id), code=0)
self.status('Passed. Group1 ssh working after deleting src group which '
'was authorized to group1')
def test9_ssh_between_instances_same_group_same_zone_public(self):
"""
Definition:
For each zone this test will attempt to test ssh between two instances in the same
security group using the public ips of the instances.
-Authorize group for ssh access
-Re-use or create 2 instances within the same security group, same zone
-For each zone, attempt to ssh to a vm in the same security group same zone
"""
self.user.ec2.authorize_group(self.group1, port=22, protocol='tcp', cidr_ip='0.0.0.0/0')
self.user.ec2.authorize_group(self.group1, port=-1, protocol='icmp', cidr_ip='0.0.0.0/0')
for zone in self.zones:
instances =[]
for instance in self.group1_instances:
if instance.placement == zone:
assert isinstance(instance, EuInstance)
instances.append(instance)
if len(instances) < 2:
for x in xrange(len(instances), 2):
self.test1_create_instance_in_zones_for_security_group1(zones=[zone])
for zone in self.zones:
zone_instances = []
for instance in self.group1_instances:
if instance.placement == zone:
zone_instances.append(instance)
instance1 = zone_instances[0]
instance2 = zone_instances[1]
instance1.ssh.sftp_put(instance1.keypath, 'testkey.pem')
instance1.sys('chmod 0600 testkey.pem')
testphrase = "pubsamezone_test_from_instance1_{0}".format(instance1.id)
testfile = 'testfile.txt'
retry = 2
for x in xrange(0, retry):
try:
instance1.sys("ssh -o StrictHostKeyChecking=no -i testkey.pem root@{0} "
"\'echo {1} > {2}; hostname; ifconfig; pwd; ls\'"
.format(instance2.ip_address, testphrase, testfile),
code=0, timeout=20)
break
except (CommandTimeoutException, CommandExitCodeException) as CE:
self.status('Attempt #{0} to connect between instances failed:"{1}'
.format(x, str(CE)))
if x:
raise
instance2.sys('hostname; ifconfig; pwd; ls; cat {0} | grep {1}'
.format(testfile, testphrase), code=0)
def test10_ssh_between_instances_same_group_public_different_zone(self):
"""
Definition:
If multiple zones are detected, this test will attempt to test ssh between
two instances in the same security group and accross each zone using the public ips
of the instances
-Authorize group for ssh access
-Re-use or create 2 instances within the same security group, different zone(s)
-For each zone, attempt to ssh to a vm in the same security group different zone(s)
"""
if len(self.zones) < 2:
raise SkipTestException('Skipping multi-zone test, '
'only a single zone found or provided')
self.user.ec2.authorize_group(self.group1, port=22, protocol='tcp', cidr_ip='0.0.0.0/0')
self.user.ec2.authorize_group(self.group1, port=-1, protocol='icmp', cidr_ip='0.0.0.0/0')
zone_instances = {}
for zone in self.zones:
instances =[]
for instance in self.group1_instances:
if instance.placement == zone:
assert isinstance(instance, EuInstance)
instances.append(instance)
if len(instances) < 1:
for x in xrange(len(instances), 1):
self.test1_create_instance_in_zones_for_security_group1(zones=[zone])
zone_instances[zone] = instances
for zone1 in self.zones:
instance1 = zone_instances[zone1][0]
instance1.ssh.sftp_put(instance1.keypath, 'testkey.pem')
instance1.sys('chmod 0600 testkey.pem')
for zone2 in self.zones:
if zone != zone2:
instance2 = zone_instances[zone2][0]
testphrase = "diffpubzone_test_from_instance1_{0}".format(instance1.id)
testfile = 'testfile.txt'
instance1.sys("ssh -o StrictHostKeyChecking=no -i testkey.pem root@{0} "
"\'echo {1} > {2}; hostname; ifconfig; pwd; ls\'"
.format(instance2.ip_address, testphrase, testfile),
code=0,
timeout=10)
instance2.sys('cat {0} | grep {1}'.format(testfile, testphrase), code=0)
def test11_ssh_between_instances_same_group_same_zone_private(self):
"""
Definition:
For each zone this test will attempt to test ssh between two instances in the same
security group using the private ips of the instances.
-Authorize group for ssh access
-Re-use or create 2 instances within the same security group, same zone
-For each zone, attempt to ssh to a vm in the same security group same zone
"""
# Remove all rules from the group and add back the minimum amount of rules to run
# this test...
self.user.ec2.revoke_all_rules(self.group1)
time.sleep(1)
self.user.ec2.authorize_group(self.group1, port=22, protocol='tcp', cidr_ip='0.0.0.0/0')
self.user.ec2.authorize_group(self.group1, port=-1, protocol='icmp', cidr_ip='0.0.0.0/0')
self.status('Using 2 instances from each zone within the following security group to'
'test private ip connectivity:"{0}"'.format(self.group1))
self.user.ec2.show_security_group(self.group1)
for zone in self.zones:
instances =[]
for instance in self.group1_instances:
if instance.placement == zone:
assert isinstance(instance, EuInstance)
instances.append(instance)
if len(instances) < 2:
for x in xrange(len(instances), 2):
self.test1_create_instance_in_zones_for_security_group1(zones=[zone])
for zone in self.zones:
zone_instances = []
zone_name = getattr(zone, 'name', None) or zone
for instance in self.group1_instances:
if instance.placement == zone_name:
zone_instances.append(instance)
instance1 = zone_instances[0]
instance2 = zone_instances[1]
instance1.ssh.sftp_put(instance1.keypath, 'testkey.pem')
instance1.sys('chmod 0600 testkey.pem')
testphrase = "hello_from_instance1_{0}".format(instance1.id)
testfile = 'testfile.txt'
self.status("Attempting to ssh from instance:{0} to instance:{1}'s private ip:{2}"
.format(instance1.id, instance2.id, instance2.private_ip_address))
try:
instance1.sys("ssh -o StrictHostKeyChecking=no -i testkey.pem root@{0} "
"\'echo {1} > {2}; hostname; ifconfig; pwd; ls\'"
.format(instance2.private_ip_address, testphrase, testfile),
code=0,
timeout=10)
except Exception, se:
self.status('First attempt to ssh between instances failed, err: ' + str(se) +
'\nIncreasing command timeout to 20 seconds, and trying again. ')
instance1.sys("ssh -o StrictHostKeyChecking=no -i testkey.pem root@{0} "
"\'echo {1} > {2}; hostname; ifconfig; pwd; ls\'"
.format(instance2.private_ip_address, testphrase, testfile),
code=0,
timeout=20)
self.status('Cat the test file create from the ssh cmd {0} ran on on {1}...'
.format(instance1, instance2))
instance2.sys('cat {0} | grep {1}'.format(testfile, testphrase), code=0)
def test12_ssh_between_instances_same_group_private_different_zone(self):
"""
Definition:
If multiple zones are detected, this test will attempt to test ssh between
two instances in the same security group and across each zone using the instances'
private ip addresses.
-Authorize group for ssh access
-Re-use or create 2 instances within the same security group, different zone(s)
-For each zone, attempt to ssh to a vm in the same security group different zone(s)
"""
if len(self.zones) < 2:
raise SkipTestException('Skipping multi-zone test, '
'only a single zone found or provided')
self.user.ec2.authorize_group(self.group1, port=22, protocol='tcp', cidr_ip='0.0.0.0/0')
self.user.ec2.authorize_group(self.group1, port=-1, protocol='icmp', cidr_ip='0.0.0.0/0')
for zone in self.zones:
instances =[]
for instance in self.group1_instances:
if instance.placement == zone:
assert isinstance(instance, EuInstance)
instances.append(instance)
if len(instances) < 1:
for x in xrange(len(instances), 1):
self.test1_create_instance_in_zones_for_security_group1(zones=[zone])
for zone1 in self.zones:
zone_instances = []
for instance in self.group1_instances:
if instance.placement == zone1:
zone_instances.append(instance)
instance1 = zone_instances[0]
instance1.ssh.sftp_put(instance1.keypath, 'testkey.pem')
instance1.sys('chmod 0600 testkey.pem')
for zone2 in self.zones:
if zone1 != zone2:
zone2_instances = []
for instance in self.group1_instances:
if instance.placement == zone2:
zone2_instances.append(instance)
instance2 = zone_instances[0]
testphrase = "diffprivzone_test_from_instance1_{0}".format(instance1.id)
testfile = 'testfile.txt'
instance1.sys("ssh -o StrictHostKeyChecking=no -i testkey.pem root@{0} "
"\'echo {1} > {2}; hostname; ifconfig; pwd; ls\'"
.format(instance2.ip_address, testphrase, testfile),
code=0,
timeout=10)
instance2.sys('cat {0} | grep {1}'.format(testfile, testphrase), code=0)
def test13_ssh_between_instances_diff_group_private_different_zone(self):
"""
Definition:
If multiple zones are detected, this test will attempt to test ssh between
two instances in the same security group and across each zone using the instances'
private ip addresses.
-Authorize group for ssh access
-Re-use or create 2 instances within the same security group, different zone(s)
-For each zone, attempt to ssh to a vm in the same security group different zone(s)
"""
if len(self.zones) < 2:
raise SkipTestException('Skipping multi-zone test, '
'only a single zone found or provided')
self.user.ec2.authorize_group(self.group1, port=22, protocol='tcp', cidr_ip='0.0.0.0/0')
self.user.ec2.authorize_group(self.group1, port=-1, protocol='icmp', cidr_ip='0.0.0.0/0')
# In case a previous test has deleted group2...
self.group2 = self.user.ec2.add_group(self.group2.name)
self.user.ec2.authorize_group(self.group2, port=22, protocol='tcp', cidr_ip='0.0.0.0/0')
self.user.ec2.authorize_group(self.group2, port=-1, protocol='icmp', cidr_ip='0.0.0.0/0')
for zone in self.zones:
instance1 = None
instances =[]
for instance in self.group1_instances:
if instance.placement == zone:
instance1 = instance
if not instance1:
self.test1_create_instance_in_zones_for_security_group1(zones=[zone])
for instance in self.group1_instances:
if instance.placement == zone:
instance1 = instance
instance1.ssh.sftp_put(instance1.keypath, 'testkey.pem')
instance1.sys('chmod 0600 testkey.pem')
for zone2 in self.zones:
instance2 = None
if zone2 != zone:
for instance in self.group2_instances:
if instance.placement == zone2:
instance2 = instance
if not instance2:
self.test2_create_instance_in_zones_for_security_group2(zones=[zone2],
auto_connect=True)
for instance in self.group2_instances:
if instance.placement == zone2:
instance2 = instance
testphrase = "diffprivzone_test_from_instance1_{0}".format(instance1.id)
testfile = 'testfile.txt'
self.status('Testing instance:{0} zone:{1} --ssh--> instance:{2} zone:{3} '
'-- private ip'.format(instance1.id, zone,instance2.id, zone2))
instance1.sys("ssh -o StrictHostKeyChecking=no -i testkey.pem root@{0} "
"\'echo {1} > {2}; hostname; ifconfig; pwd; ls\'"
.format(instance2.private_ip_address, testphrase, testfile),
code=0,
timeout=10)
instance2.sys('cat {0} | grep {1}'.format(testfile, testphrase), code=0)
def test14_ssh_between_instances_diff_group_public_different_zone(self):
"""
Definition:
If multiple zones are detected, this test will attempt to test ssh between
two instances in the same security group and across each zone using the instances'
private ip addresses.
-Authorize group for ssh access
-Re-use or create 2 instances within the same security group, different zone(s)
-For each zone, attempt to ssh to a vm in the same security group different zone(s)
"""
if len(self.zones) < 2:
raise SkipTestException('Skipping multi-zone test, '
'only a single zone found or provided')
self.user.ec2.authorize_group(self.group1, port=22, protocol='tcp', cidr_ip='0.0.0.0/0')
self.user.ec2.authorize_group(self.group1, port=-1, protocol='icmp', cidr_ip='0.0.0.0/0')
# In case a previous test has deleted group2...
self.group2 = self.user.ec2.add_group(self.group2.name)
self.user.ec2.authorize_group(self.group2, port=22, protocol='tcp', cidr_ip='0.0.0.0/0')
self.user.ec2.authorize_group(self.group2, port=-1, protocol='icmp', cidr_ip='0.0.0.0/0')
for zone in self.zones:
instance1 = None
instances =[]
for instance in self.group1_instances:
if instance.placement == zone:
instance1 = instance
if not instance1:
self.test1_create_instance_in_zones_for_security_group1(zones=[zone])
for instance in self.group1_instances:
if instance.placement == zone:
instance1 = instance
instance1.ssh.sftp_put(instance1.keypath, 'testkey.pem')
instance1.sys('chmod 0600 testkey.pem')
for zone2 in self.zones:
instance2 = None
if zone2 != zone:
for instance in self.group2_instances:
if instance.placement == zone2:
instance2 = instance
if not instance2:
self.test2_create_instance_in_zones_for_security_group2(zones=[zone2],
auto_connect=True)
for instance in self.group2_instances:
if instance.placement == zone2:
instance2 = instance
testphrase = "diffprivzone_test_from_instance1_{0}".format(instance1.id)
testfile = 'testfile.txt'
self.status('Testing instance:{0} zone:{1} --ssh--> instance:{2} zone:{3} '
'-- private ip'.format(instance1.id, zone,instance2.id, zone2))
instance1.sys("ssh -o StrictHostKeyChecking=no -i testkey.pem root@{0} "
"\'echo {1} > {2}; hostname; ifconfig; pwd; ls\'"
.format(instance2.ip_address, testphrase, testfile),
code=0,
timeout=10)
instance2.sys('cat {0} | grep {1}'.format(testfile, testphrase), code=0)
# add revoke may be covered above...?
def test_revoke_rules(self):
revoke_group = self.user.ec2.add_group("revoke-group-" + str(int(time.time())))
self.user.ec2.authorize_group(revoke_group, protocol='tcp', port=22)
for zone in self.zones:
instance = self.user.ec2.run_image(image=self.image,
keypair=self.keypair,
subnet_id = self.subnet_id,
group=revoke_group,
zone=zone)[0]
self.user.ec2.revoke_security_group(revoke_group, from_port=22, protocol='tcp')
self.log.debug('Sleeping for 60 seconds before retrying group')
time.sleep(60)
try:
instance.reset_ssh_connection(timeout=30)
self.user.ec2.delete_group(revoke_group)
raise Exception("Was able to SSH without authorized rule")
except SSHException, e:
self.log.debug("SSH was properly blocked to the instance")
self.user.ec2.authorize_group(revoke_group, protocol='tcp', port=22)
instance.reset_ssh_connection()
self.user.ec2.terminate_instances(instance)
self.user.ec2.delete_group(revoke_group)
def _run_suite(self, testlist=None, basic_only=False, exclude=None):
# The first tests will have the End On Failure flag set to true. If these tests fail
# the remaining tests will not be attempted.
unit_list = []
testlist = testlist or []
exclude = exclude or []
if exclude:
exclude = re.sub('[",]', " ", str(exclude)).split()
if testlist:
if not isinstance(testlist, list):
testlist.replace(',',' ')
testlist = testlist.split()
for test in testlist:
unit_list.append(nettests.create_testunit_by_name(test))
else:
unit_list =[
self.create_testunit_by_name('test1_create_instance_in_zones_for_security_group1',
eof=True),
self.create_testunit_by_name('test2_create_instance_in_zones_for_security_group2',
eof=True),
self.create_testunit_by_name(
'test3_test_ssh_between_instances_in_diff_sec_groups_same_zone', eof=True)]
if basic_only:
testlist = []
else:
# Then add the rest of the tests...
testlist = [ 'test4_attempt_unauthorized_ssh_from_test_machine_to_group2',
'test5_test_ssh_between_instances_in_same_sec_groups_different_zone',
'test7_add_and_revoke_tcp_port_range',
'test8_verify_deleting_of_auth_source_group2',
'test9_ssh_between_instances_same_group_same_zone_public',
'test10_ssh_between_instances_same_group_public_different_zone',
'test11_ssh_between_instances_same_group_same_zone_private',
'test12_ssh_between_instances_same_group_private_different_zone',
'test13_ssh_between_instances_diff_group_private_different_zone',
'test14_ssh_between_instances_diff_group_public_different_zone']
for test in exclude:
if test in testlist:
testlist.remove(test)
for test in testlist:
unit_list.append(self.create_testunit_by_name(test))
self.status('Got running the following list of tests:' + str(testlist))
### Run the EutesterUnitTest objects
result = self.run(unit_list,eof=False,clean_on_exit=True)
self.status('Test finished with status:"{0}"'.format(result))
return result
if __name__ == "__main__":
nettests = NetTestsClassic()
exit(nettests._run_suite(testlist=nettests.args.test_list))
| bsd-2-clause |
ilastikdev/ilastik | ilastik/applets/dataSelection/dataLaneSummaryTableModel.py | 4 | 7961 | ###############################################################################
# ilastik: interactive learning and segmentation toolkit
#
# Copyright (C) 2011-2014, the ilastik developers
# <team@ilastik.org>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# In addition, as a special exception, the copyright holders of
# ilastik give you permission to combine ilastik with applets,
# workflows and plugins which are not covered under the GNU
# General Public License.
#
# See the LICENSE file for details. License information is also available
# on the ilastik web site at:
# http://ilastik.org/license.html
###############################################################################
from PyQt4.QtCore import Qt, QAbstractItemModel, QModelIndex
from volumina.utility import decode_to_qstring
from lazyflow.utility import PathComponents
from ilastik.utility import bind
from opDataSelection import DatasetInfo
class LaneColumn():
LabelsAllowed = 0
NumColumns = 1
class DatasetInfoColumn():
Name = 0
NumColumns = 1
def rowOfButtonsProxy(model_cls):
"""
Given a TableModel class, return a new class that pretends to have an
extra row at the end. This row is used to display "Add..." buttons in
the GUI.
"""
class ProxyModel(model_cls):
def __init__(self, *args, **kwds):
super(ProxyModel, self).__init__(*args, **kwds)
def rowCount(self, parent=QModelIndex()):
"""
Return number of rows in the model.
This proxy model keeps an extra row at the end for buttons.
"""
return super(ProxyModel, self).rowCount(parent) + 1
def headerData(self, section, orientation, role=Qt.DisplayRole ):
"""
Return header information for row/column.
Skip vertical header for the last row, which is used for buttons.
"""
if orientation == Qt.Vertical:
if section >= super(ProxyModel, self).rowCount():
return ""
return super(ProxyModel, self).headerData(section, orientation,
role)
def _getDisplayRoleData(self, index):
# Last row is just buttons
if index.row() >= super(ProxyModel, self).rowCount():
return ""
return model_cls._getDisplayRoleData(self, index)
return ProxyModel
@rowOfButtonsProxy
class DataLaneSummaryTableModel(QAbstractItemModel):
def __init__(self, parent, topLevelOperator):
"""
:param topLevelOperator: An instance of OpMultiLaneDataSelectionGroup
"""
# super does not work here in Python 2.x, decorated class confuses it
QAbstractItemModel.__init__(self, parent)
self._op = topLevelOperator
def handleNewLane( multislot, laneIndex):
assert multislot is self._op.DatasetGroup
self.beginInsertRows( QModelIndex(), laneIndex, laneIndex )
self.endInsertRows()
def handleDatasetInfoChanged(slot):
# Get the row of this slot
laneSlot = slot.operator
laneIndex = laneSlot.operator.index( laneSlot )
# FIXME: For now, we update the whole row.
# Later, update only the columns that correspond to this dataset.
firstIndex = self.createIndex(laneIndex, 0)
lastIndex = self.createIndex(laneIndex, self.columnCount()-1)
self.dataChanged.emit(firstIndex, lastIndex)
def handleNewDatasetInserted(mslot, index):
mslot[index].notifyDirty( bind(handleDatasetInfoChanged) )
for laneIndex, datasetMultiSlot in enumerate(self._op.DatasetGroup):
datasetMultiSlot.notifyInserted( bind(handleNewDatasetInserted) )
for roleIndex, datasetSlot in enumerate(datasetMultiSlot):
handleNewDatasetInserted( datasetMultiSlot, roleIndex )
self._op.DatasetGroup.notifyInserted( bind(handleNewLane) )
def handleLaneRemoved( multislot, laneIndex ):
assert multislot is self._op.DatasetGroup
self.beginRemoveRows( QModelIndex(), laneIndex, laneIndex )
self.endRemoveRows()
self._op.DatasetGroup.notifyRemoved( bind(handleLaneRemoved) )
# Any lanes that already exist must be added now.
for laneIndex, slot in enumerate(self._op.DatasetGroup):
handleNewLane( self._op.DatasetGroup, laneIndex )
def columnCount(self, parent=QModelIndex()):
if not self._op.DatasetRoles.ready():
return 0
roles = self._op.DatasetRoles.value
return LaneColumn.NumColumns + DatasetInfoColumn.NumColumns * len(roles)
def rowCount(self, parent=QModelIndex()):
return len( self._op.ImageGroup )
def data(self, index, role=Qt.DisplayRole):
if role == Qt.DisplayRole:
return self._getDisplayRoleData(index)
def index(self, row, column, parent=QModelIndex()):
return self.createIndex( row, column, object=None )
def parent(self, index):
return QModelIndex()
def headerData(self, section, orientation, role=Qt.DisplayRole ):
if role != Qt.DisplayRole:
return None
if orientation == Qt.Vertical:
return section+1
if section == LaneColumn.LabelsAllowed:
return "Labelable"
infoColumn = section - LaneColumn.NumColumns
roleIndex = infoColumn // DatasetInfoColumn.NumColumns
infoColumn %= LaneColumn.NumColumns
if infoColumn == DatasetInfoColumn.Name:
if self._op.DatasetRoles.ready():
return self._op.DatasetRoles.value[roleIndex]
return ""
assert False, "Unknown header column: {}".format( section )
def _getDisplayRoleData(self, index):
laneIndex = index.row()
if index.column() < LaneColumn.NumColumns:
if index.column() == LaneColumn.LabelsAllowed:
firstInfoSlot = self._op.DatasetGroup[laneIndex][0]
if not firstInfoSlot.ready():
return ""
info = firstInfoSlot.value
return { True: "True", False : "False" }[ info.allowLabels ]
else:
assert False
## Dataset info item
roleIndex = (index.column() - LaneColumn.NumColumns) // DatasetInfoColumn.NumColumns
datasetInfoIndex = (index.column() - LaneColumn.NumColumns) % DatasetInfoColumn.NumColumns
datasetSlot = self._op.DatasetGroup[laneIndex][roleIndex]
if not datasetSlot.ready():
return ""
UninitializedDisplayData = { DatasetInfoColumn.Name : "<please select>" }
datasetSlot = self._op.DatasetGroup[laneIndex][roleIndex]
if datasetSlot.ready():
datasetInfo = self._op.DatasetGroup[laneIndex][roleIndex].value
else:
return UninitializedDisplayData[ datasetInfoIndex ]
if datasetInfoIndex == DatasetInfoColumn.Name:
if datasetInfo.nickname is not None and datasetInfo.nickname != "":
return datasetInfo.nickname
return decode_to_qstring( PathComponents( datasetInfo.filePath ).filename )
if datasetInfoIndex == DatasetInfoColumn.Location:
LocationNames = { DatasetInfo.Location.FileSystem : "External File",
DatasetInfo.Location.ProjectInternal : "Project File" }
return LocationNames[ datasetInfo.location ]
assert False, "Unknown column"
| gpl-3.0 |
endlessm/chromium-browser | third_party/chromite/scripts/cros_oobe_autoconfig_unittest.py | 1 | 6578 | # -*- coding: utf-8 -*-
# Copyright 2018 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unittests for cros_oobe_autoconfig.py"""
from __future__ import print_function
import json
import os
import pwd
import sys
from chromite.lib import constants
from chromite.lib import cros_build_lib
from chromite.lib import cros_test_lib
from chromite.lib import image_lib
from chromite.lib import osutils
from chromite.scripts import cros_oobe_autoconfig
pytestmark = [cros_test_lib.pytestmark_inside_only,
cros_test_lib.pytestmark_skip('https://crbug.com/1000761')]
assert sys.version_info >= (3, 6), 'This module requires Python 3.6+'
_TEST_DOMAIN = 'test.com'
_TEST_CLI_PARAMETERS = (
'image.bin', '--x-demo-mode', '--x-network-onc', '{}',
'--x-network-auto-connect', '--x-eula-send-statistics',
'--x-eula-auto-accept', '--x-update-skip', '--x-wizard-auto-enroll',
'--enrollment-domain', _TEST_DOMAIN)
_TEST_CONFIG_JSON = {
'demo-mode': True,
'network-onc': '{}',
'network-auto-connect': True,
'eula-send-statistics': True,
'eula-auto-accept': True,
'update-skip': True,
'wizard-auto-enroll': True
}
_IMAGE_SIZE = 4 * 1024 * 1024
_BLOCK_SIZE = 4096
_SECTOR_SIZE = 512
_STATEFUL_SIZE = _IMAGE_SIZE // 2
_STATEFUL_OFFSET = 120 * _SECTOR_SIZE
class SanitizeDomainTests(cros_test_lib.TestCase):
"""Tests for SanitizeDomain()"""
def testASCII(self):
"""Tests that ASCII-only domains are not mangled."""
self.assertEqual(cros_oobe_autoconfig.SanitizeDomain('FoO.cOm'), 'foo.com')
def testUnicodeCase(self):
"""Tests that ASCII-only domains are not mangled."""
self.assertEqual(cros_oobe_autoconfig.SanitizeDomain(u'föo.com'),
'xn--fo-fka.com')
self.assertEqual(cros_oobe_autoconfig.SanitizeDomain(u'fÖo.com'),
'xn--fo-fka.com')
def testHomographs(self):
"""Tests that a Unicode domain is punycoded."""
# "tеѕt.com" looks like "test.com" but isn't!
self.assertEqual(cros_oobe_autoconfig.SanitizeDomain(
u't\u0435\u0455t.com'), 'xn--tt-nlc2k.com')
class PrepareImageTests(cros_test_lib.MockTempDirTestCase):
"""Tests for PrepareImage()"""
def setUp(self):
"""Create a small test disk image for testing."""
self.image = os.path.join(self.tempdir, 'image.bin')
state = os.path.join(self.tempdir, 'state.bin')
# Allocate space for the disk image and stateful partition.
osutils.AllocateFile(self.image, _IMAGE_SIZE)
osutils.AllocateFile(state, _STATEFUL_SIZE)
commands = (
# Format the stateful image as ext4.
['/sbin/mkfs.ext4', state],
# Create the GPT headers and entry for the stateful partition.
['cgpt', 'create', self.image],
['cgpt', 'boot', '-p', self.image],
['cgpt', 'add', self.image, '-t', 'data',
'-l', str(constants.CROS_PART_STATEFUL),
'-b', str(_STATEFUL_OFFSET // _SECTOR_SIZE),
'-s', str(_STATEFUL_SIZE // _SECTOR_SIZE), '-i', '1'],
# Copy the stateful partition into the GPT image.
['dd', 'if=%s' % state, 'of=%s' % self.image, 'conv=notrunc', 'bs=4K',
'seek=%d' % (_STATEFUL_OFFSET // _BLOCK_SIZE),
'count=%s' % (_STATEFUL_SIZE // _BLOCK_SIZE)],
['sync'])
for cmd in commands:
cros_build_lib.run(cmd, quiet=True)
# Run the preparation script on the image.
cros_oobe_autoconfig.main([self.image] + list(_TEST_CLI_PARAMETERS)[1:])
# Mount the image's stateful partition for inspection.
self.mount_tmp = os.path.join(self.tempdir, 'mount')
osutils.SafeMakedirs(self.mount_tmp)
self.mount_ctx = image_lib.LoopbackPartitions(self.image, self.mount_tmp)
self.mount = os.path.join(self.mount_tmp,
'dir-%s' % constants.CROS_PART_STATEFUL)
self.oobe_autoconf_path = os.path.join(self.mount, 'unencrypted',
'oobe_auto_config')
self.config_path = os.path.join(self.oobe_autoconf_path, 'config.json')
self.domain_path = os.path.join(self.oobe_autoconf_path,
'enrollment_domain')
def testChronosOwned(self):
"""Test that the OOBE autoconfig directory is owned by chronos."""
with self.mount_ctx:
# TODO(mikenichols): Remove unneeded mount call once context
# handling is in place, http://crrev/c/1795578
_ = self.mount_ctx.Mount((constants.CROS_PART_STATEFUL,))[0]
chronos_uid = pwd.getpwnam('chronos').pw_uid
self.assertExists(self.oobe_autoconf_path)
self.assertEqual(os.stat(self.config_path).st_uid, chronos_uid)
def testConfigContents(self):
"""Test that the config JSON matches the correct data."""
with self.mount_ctx:
# TODO(mikenichols): Remove unneeded mount call once context
# handling is in place, http://crrev/c/1795578
_ = self.mount_ctx.Mount((constants.CROS_PART_STATEFUL,))[0]
with open(self.config_path) as fp:
data = json.load(fp)
self.assertEqual(data, _TEST_CONFIG_JSON)
def testDomainContents(self):
"""Test that the domain file matches the correct data."""
with self.mount_ctx:
# TODO(mikenichols): Remove unneeded mount call once context
# handling is in place, http://crrev/c/1795578
_ = self.mount_ctx.Mount((constants.CROS_PART_STATEFUL,))[0]
self.assertEqual(osutils.ReadFile(self.domain_path), _TEST_DOMAIN)
class GetConfigContentTests(cros_test_lib.MockTestCase):
"""Tests for GetConfigContent()"""
def testBasic(self):
"""Test that config is generated correctly with all options."""
opts = cros_oobe_autoconfig.ParseArguments(_TEST_CLI_PARAMETERS)
conf = cros_oobe_autoconfig.GetConfigContent(opts)
self.assertEqual(json.loads(conf), _TEST_CONFIG_JSON)
def testUnspecified(self):
"""Test that config is generated correctly with some options missing."""
cli = list(_TEST_CLI_PARAMETERS)
cli.remove('--x-update-skip')
expected = dict(_TEST_CONFIG_JSON)
expected['update-skip'] = False
opts = cros_oobe_autoconfig.ParseArguments(cli)
conf = cros_oobe_autoconfig.GetConfigContent(opts)
self.assertEqual(json.loads(conf), expected)
class MainTests(cros_test_lib.MockTestCase):
"""Tests for main()"""
def setUp(self):
self.PatchObject(cros_oobe_autoconfig, 'PrepareImage')
def testBasic(self):
"""Simple smoke test"""
cros_oobe_autoconfig.main(_TEST_CLI_PARAMETERS)
| bsd-3-clause |
AndroidOpenDevelopment/android_external_chromium_org | tools/memory_inspector/memory_inspector/core/symbol.py | 107 | 1870 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Symbols(object):
"""A dictionary of symbols indexed by the key 'exec_path+0xoffset'."""
def __init__(self):
self.symbols = {} # 'foo.so+0x1234' -> |Symbol|
def Add(self, exec_file_rel_path, offset, symbol):
assert(isinstance(symbol, Symbol))
self.symbols[Symbols._GetKey(exec_file_rel_path, offset)] = symbol
def Lookup(self, exec_file_rel_path, offset):
return self.symbols.get(Symbols._GetKey(exec_file_rel_path, offset))
def Merge(self, other):
assert(isinstance(other, Symbols))
self.symbols.update(other.symbols) # pylint: disable=W0212
def __len__(self):
return len(self.symbols)
@staticmethod
def _GetKey(exec_file_rel_path, offset):
return '%s+0x%x' % (exec_file_rel_path, offset)
class Symbol(object):
"""Debug information relative to a symbol.
Note: a symbol can have more than one source line associated to it.
"""
def __init__(self, name, source_file_path=None, line_number=None):
self.name = name
self.source_info = []
if source_file_path:
self.AddSourceLineInfo(source_file_path, line_number or 0)
def AddSourceLineInfo(self, source_file_path, line_number):
self.source_info += [SourceInfo(source_file_path, line_number)]
def __str__(self):
return '%s %s' % (
self.name,
self.source_info[0] if len(self.source_info) else '')
class SourceInfo(object):
"""Source file + line information for a given |Symbol|."""
def __init__(self, source_file_path, line_number):
assert(isinstance(line_number, int))
self.source_file_path = source_file_path
self.line_number = line_number
def __str__(self):
return '%s:%d' % (self.source_file_path, self.line_number)
| bsd-3-clause |
dhuang/incubator-airflow | dags/test_dag.py | 23 | 1333 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from airflow import utils
from airflow import DAG
from airflow.operators.dummy_operator import DummyOperator
from datetime import datetime, timedelta
now = datetime.now()
now_to_the_hour = (now - timedelta(0, 0, 0, 0, 0, 3)).replace(minute=0, second=0, microsecond=0)
START_DATE = now_to_the_hour
DAG_NAME = 'test_dag_v1'
default_args = {
'owner': 'airflow',
'depends_on_past': True,
'start_date': utils.dates.days_ago(2)
}
dag = DAG(DAG_NAME, schedule_interval='*/10 * * * *', default_args=default_args)
run_this_1 = DummyOperator(task_id='run_this_1', dag=dag)
run_this_2 = DummyOperator(task_id='run_this_2', dag=dag)
run_this_2.set_upstream(run_this_1)
run_this_3 = DummyOperator(task_id='run_this_3', dag=dag)
run_this_3.set_upstream(run_this_2)
| apache-2.0 |
Godmaster49/mtasa-blue | vendor/google-breakpad/src/testing/gtest/test/gtest_help_test.py | 2968 | 5856 | #!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests the --help flag of Google C++ Testing Framework.
SYNOPSIS
gtest_help_test.py --build_dir=BUILD/DIR
# where BUILD/DIR contains the built gtest_help_test_ file.
gtest_help_test.py
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
import gtest_test_utils
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
IS_WINDOWS = os.name == 'nt'
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_help_test_')
FLAG_PREFIX = '--gtest_'
DEATH_TEST_STYLE_FLAG = FLAG_PREFIX + 'death_test_style'
STREAM_RESULT_TO_FLAG = FLAG_PREFIX + 'stream_result_to'
UNKNOWN_FLAG = FLAG_PREFIX + 'unknown_flag_for_testing'
LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests'
INCORRECT_FLAG_VARIANTS = [re.sub('^--', '-', LIST_TESTS_FLAG),
re.sub('^--', '/', LIST_TESTS_FLAG),
re.sub('_', '-', LIST_TESTS_FLAG)]
INTERNAL_FLAG_FOR_TESTING = FLAG_PREFIX + 'internal_flag_for_testing'
SUPPORTS_DEATH_TESTS = "DeathTest" in gtest_test_utils.Subprocess(
[PROGRAM_PATH, LIST_TESTS_FLAG]).output
# The help message must match this regex.
HELP_REGEX = re.compile(
FLAG_PREFIX + r'list_tests.*' +
FLAG_PREFIX + r'filter=.*' +
FLAG_PREFIX + r'also_run_disabled_tests.*' +
FLAG_PREFIX + r'repeat=.*' +
FLAG_PREFIX + r'shuffle.*' +
FLAG_PREFIX + r'random_seed=.*' +
FLAG_PREFIX + r'color=.*' +
FLAG_PREFIX + r'print_time.*' +
FLAG_PREFIX + r'output=.*' +
FLAG_PREFIX + r'break_on_failure.*' +
FLAG_PREFIX + r'throw_on_failure.*' +
FLAG_PREFIX + r'catch_exceptions=0.*',
re.DOTALL)
def RunWithFlag(flag):
"""Runs gtest_help_test_ with the given flag.
Returns:
the exit code and the text output as a tuple.
Args:
flag: the command-line flag to pass to gtest_help_test_, or None.
"""
if flag is None:
command = [PROGRAM_PATH]
else:
command = [PROGRAM_PATH, flag]
child = gtest_test_utils.Subprocess(command)
return child.exit_code, child.output
class GTestHelpTest(gtest_test_utils.TestCase):
"""Tests the --help flag and its equivalent forms."""
def TestHelpFlag(self, flag):
"""Verifies correct behavior when help flag is specified.
The right message must be printed and the tests must
skipped when the given flag is specified.
Args:
flag: A flag to pass to the binary or None.
"""
exit_code, output = RunWithFlag(flag)
self.assertEquals(0, exit_code)
self.assert_(HELP_REGEX.search(output), output)
if IS_LINUX:
self.assert_(STREAM_RESULT_TO_FLAG in output, output)
else:
self.assert_(STREAM_RESULT_TO_FLAG not in output, output)
if SUPPORTS_DEATH_TESTS and not IS_WINDOWS:
self.assert_(DEATH_TEST_STYLE_FLAG in output, output)
else:
self.assert_(DEATH_TEST_STYLE_FLAG not in output, output)
def TestNonHelpFlag(self, flag):
"""Verifies correct behavior when no help flag is specified.
Verifies that when no help flag is specified, the tests are run
and the help message is not printed.
Args:
flag: A flag to pass to the binary or None.
"""
exit_code, output = RunWithFlag(flag)
self.assert_(exit_code != 0)
self.assert_(not HELP_REGEX.search(output), output)
def testPrintsHelpWithFullFlag(self):
self.TestHelpFlag('--help')
def testPrintsHelpWithShortFlag(self):
self.TestHelpFlag('-h')
def testPrintsHelpWithQuestionFlag(self):
self.TestHelpFlag('-?')
def testPrintsHelpWithWindowsStyleQuestionFlag(self):
self.TestHelpFlag('/?')
def testPrintsHelpWithUnrecognizedGoogleTestFlag(self):
self.TestHelpFlag(UNKNOWN_FLAG)
def testPrintsHelpWithIncorrectFlagStyle(self):
for incorrect_flag in INCORRECT_FLAG_VARIANTS:
self.TestHelpFlag(incorrect_flag)
def testRunsTestsWithoutHelpFlag(self):
"""Verifies that when no help flag is specified, the tests are run
and the help message is not printed."""
self.TestNonHelpFlag(None)
def testRunsTestsWithGtestInternalFlag(self):
"""Verifies that the tests are run and no help message is printed when
a flag starting with Google Test prefix and 'internal_' is supplied."""
self.TestNonHelpFlag(INTERNAL_FLAG_FOR_TESTING)
if __name__ == '__main__':
gtest_test_utils.Main()
| gpl-3.0 |
coderbone/SickRage-alt | lib/bs4/tests/test_docs.py | 607 | 1067 | "Test harness for doctests."
# pylint: disable-msg=E0611,W0142
__metaclass__ = type
__all__ = [
'additional_tests',
]
import atexit
import doctest
import os
#from pkg_resources import (
# resource_filename, resource_exists, resource_listdir, cleanup_resources)
import unittest
DOCTEST_FLAGS = (
doctest.ELLIPSIS |
doctest.NORMALIZE_WHITESPACE |
doctest.REPORT_NDIFF)
# def additional_tests():
# "Run the doc tests (README.txt and docs/*, if any exist)"
# doctest_files = [
# os.path.abspath(resource_filename('bs4', 'README.txt'))]
# if resource_exists('bs4', 'docs'):
# for name in resource_listdir('bs4', 'docs'):
# if name.endswith('.txt'):
# doctest_files.append(
# os.path.abspath(
# resource_filename('bs4', 'docs/%s' % name)))
# kwargs = dict(module_relative=False, optionflags=DOCTEST_FLAGS)
# atexit.register(cleanup_resources)
# return unittest.TestSuite((
# doctest.DocFileSuite(*doctest_files, **kwargs)))
| gpl-3.0 |
jonberliner/keras | keras/optimizers.py | 1 | 7022 | from __future__ import absolute_import
import theano
import theano.tensor as T
import numpy as np
from .utils.theano_utils import shared_zeros, shared_scalar
from six.moves import zip
def clip_norm(g, c, n):
if c > 0:
g = T.switch(T.ge(n, c), g * c / n, g)
return g
def kl_divergence(p, p_hat):
return p_hat - p + p * T.log(p / p_hat)
class Optimizer(object):
def get_updates(self, params, constraints, loss):
raise NotImplementedError
def get_gradients(self, loss, params):
grads = T.grad(loss, params)
if hasattr(self, 'clipnorm') and self.clipnorm > 0:
norm = T.sqrt(sum([T.sum(g ** 2) for g in grads]))
grads = [clip_norm(g, self.clipnorm, norm) for g in grads]
return grads
def get_config(self):
return {"name": self.__class__.__name__}
class SGD(Optimizer):
def __init__(self, lr=0.01, momentum=0., decay=0., nesterov=False, *args, **kwargs):
self.__dict__.update(kwargs)
self.__dict__.update(locals())
self.iterations = shared_scalar(0)
def get_updates(self, params, constraints, loss):
grads = self.get_gradients(loss, params)
lr = self.lr * (1.0 / (1.0 + self.decay * self.iterations))
updates = [(self.iterations, self.iterations + 1.)]
for p, g, c in zip(params, grads, constraints):
m = shared_zeros(p.get_value().shape) # momentum
v = self.momentum * m - lr * g # velocity
updates.append((m, v))
if self.nesterov:
new_p = p + self.momentum * v - lr * g
else:
new_p = p + v
updates.append((p, c(new_p))) # apply constraints
return updates
def get_config(self):
return {"name": self.__class__.__name__,
"lr": self.lr,
"momentum": self.momentum,
"decay": self.decay,
"nesterov": self.nesterov}
class RMSprop(Optimizer):
def __init__(self, lr=0.001, rho=0.9, epsilon=1e-6, *args, **kwargs):
self.__dict__.update(kwargs)
self.__dict__.update(locals())
def get_updates(self, params, constraints, loss):
grads = self.get_gradients(loss, params)
accumulators = [shared_zeros(p.get_value().shape) for p in params]
updates = []
for p, g, a, c in zip(params, grads, accumulators, constraints):
new_a = self.rho * a + (1 - self.rho) * g ** 2 # update accumulator
updates.append((a, new_a))
new_p = p - self.lr * g / T.sqrt(new_a + self.epsilon)
updates.append((p, c(new_p))) # apply constraints
return updates
def get_config(self):
return {"name": self.__class__.__name__,
"lr": self.lr,
"rho": self.rho,
"epsilon": self.epsilon}
class Adagrad(Optimizer):
def __init__(self, lr=0.01, epsilon=1e-6, *args, **kwargs):
self.__dict__.update(kwargs)
self.__dict__.update(locals())
def get_updates(self, params, constraints, loss):
grads = self.get_gradients(loss, params)
accumulators = [shared_zeros(p.get_value().shape) for p in params]
updates = []
for p, g, a, c in zip(params, grads, accumulators, constraints):
new_a = a + g ** 2 # update accumulator
updates.append((a, new_a))
new_p = p - self.lr * g / T.sqrt(new_a + self.epsilon)
updates.append((p, c(new_p))) # apply constraints
return updates
def get_config(self):
return {"name": self.__class__.__name__,
"lr": self.lr,
"epsilon": self.epsilon}
class Adadelta(Optimizer):
'''
Reference: http://arxiv.org/abs/1212.5701
'''
def __init__(self, lr=1.0, rho=0.95, epsilon=1e-6, *args, **kwargs):
self.__dict__.update(kwargs)
self.__dict__.update(locals())
def get_updates(self, params, constraints, loss):
grads = self.get_gradients(loss, params)
accumulators = [shared_zeros(p.get_value().shape) for p in params]
delta_accumulators = [shared_zeros(p.get_value().shape) for p in params]
updates = []
for p, g, a, d_a, c in zip(params, grads, accumulators, delta_accumulators, constraints):
new_a = self.rho * a + (1 - self.rho) * g ** 2 # update accumulator
updates.append((a, new_a))
# use the new accumulator and the *old* delta_accumulator
update = g * T.sqrt(d_a + self.epsilon) / T.sqrt(new_a + self.epsilon)
new_p = p - self.lr * update
updates.append((p, c(new_p))) # apply constraints
# update delta_accumulator
new_d_a = self.rho * d_a + (1 - self.rho) * update ** 2
updates.append((d_a, new_d_a))
return updates
def get_config(self):
return {"name": self.__class__.__name__,
"lr": self.lr,
"rho": self.rho,
"epsilon": self.epsilon}
class Adam(Optimizer):
'''
Reference: http://arxiv.org/abs/1412.6980
Default parameters follow those provided in the original paper
lambda is renamed kappa.
'''
def __init__(self, lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-8, kappa=1-1e-8, *args, **kwargs):
self.__dict__.update(kwargs)
self.__dict__.update(locals())
self.iterations = shared_scalar(0)
def get_updates(self, params, constraints, loss):
grads = self.get_gradients(loss, params)
updates = [(self.iterations, self.iterations+1.)]
i = self.iterations
beta_1_t = self.beta_1 * (self.kappa**i)
# the update below seems missing from the paper, but is obviously required
beta_2_t = self.beta_2 * (self.kappa**i)
for p, g, c in zip(params, grads, constraints):
m = theano.shared(p.get_value() * 0.) # zero init of moment
v = theano.shared(p.get_value() * 0.) # zero init of velocity
m_t = (beta_1_t * m) + (1 - beta_1_t) * g
v_t = (beta_2_t * v) + (1 - beta_2_t) * (g**2)
m_b_t = m_t / (1 - beta_1_t)
v_b_t = v_t / (1 - beta_2_t)
p_t = p - self.lr * m_b_t / (T.sqrt(v_b_t) + self.epsilon)
updates.append((m, m_t))
updates.append((v, v_t))
updates.append((p, c(p_t))) # apply constraints
return updates
def get_config(self):
return {"name": self.__class__.__name__,
"lr": self.lr,
"beta_1": self.beta_1,
"beta_2": self.beta_2,
"epsilon": self.epsilon,
"kappa": self.kappa}
# aliases
sgd = SGD
rmsprop = RMSprop
adagrad = Adagrad
adadelta = Adadelta
adam = Adam
from .utils.generic_utils import get_from_module
def get(identifier, kwargs=None):
return get_from_module(identifier, globals(), 'optimizer', instantiate=True, kwargs=kwargs)
| mit |
dims/heat | heat/api/middleware/ssl.py | 4 | 1575 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from debtcollector import removals
from oslo_config import cfg
from oslo_middleware import ssl
ssl_middleware_opts = [
cfg.StrOpt('secure_proxy_ssl_header',
default='X-Forwarded-Proto',
deprecated_group='DEFAULT',
help="The HTTP Header that will be used to determine which "
"the original request protocol scheme was, even if it was "
"removed by an SSL terminator proxy.")
]
removals.removed_module(__name__,
"oslo_middleware.http_proxy_to_wsgi")
class SSLMiddleware(ssl.SSLMiddleware):
def __init__(self, application, *args, **kwargs):
# NOTE(cbrandily): calling super(ssl.SSLMiddleware, self).__init__
# allows to define our opt (including a deprecation).
super(ssl.SSLMiddleware, self).__init__(application, *args, **kwargs)
self.oslo_conf.register_opts(
ssl_middleware_opts, group='oslo_middleware')
def list_opts():
yield None, ssl_middleware_opts
| apache-2.0 |
bgxavier/nova | nova/virt/vmwareapi/read_write_util.py | 73 | 2238 | # Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Classes to handle image files
Collection of classes to handle image upload/download to/from Image service
(like Glance image storage and retrieval service) from/to ESX/ESXi server.
"""
import urllib
from oslo_utils import netutils
from oslo_vmware import rw_handles
class VMwareHTTPReadFile(rw_handles.FileHandle):
"""VMware file read handler class."""
def __init__(self, host, port, data_center_name, datastore_name, cookies,
file_path, scheme="https"):
self._base_url = self._get_base_url(scheme, host, port, file_path)
param_list = {"dcPath": data_center_name, "dsName": datastore_name}
self._base_url = self._base_url + "?" + urllib.urlencode(param_list)
self._conn = self._create_read_connection(self._base_url,
cookies=cookies)
rw_handles.FileHandle.__init__(self, self._conn)
def read(self, chunk_size):
return self._file_handle.read(rw_handles.READ_CHUNKSIZE)
def _get_base_url(self, scheme, host, port, file_path):
if netutils.is_valid_ipv6(host):
base_url = "%s://[%s]:%s/folder/%s" % (scheme, host, port,
urllib.pathname2url(file_path))
else:
base_url = "%s://%s:%s/folder/%s" % (scheme, host, port,
urllib.pathname2url(file_path))
return base_url
def get_size(self):
"""Get size of the file to be read."""
return self._file_handle.headers.get("Content-Length", -1)
| apache-2.0 |
SlimRoms/kernel_motorola_shamu | scripts/rt-tester/rt-tester.py | 11005 | 5307 | #!/usr/bin/python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <tglx@linutronix.de>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Separate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
| gpl-2.0 |
errikos/amtt | amtt/exporter/isograph/__init__.py | 1 | 2915 | """Exporter module for Isograph Availability Workbench."""
import logging
import networkx as nx
from itertools import count
from amtt.translator.ir import component_basename
from amtt.exporter import Exporter
from amtt.exporter.isograph.emitter.xml import XmlEmitter
from amtt.exporter.isograph.rbd import Rbd
from amtt.exporter.isograph.failure_models import fm_export
_logger = logging.getLogger(__name__)
class IsographExporter(Exporter):
"""Exporter to export the model to Isograph."""
def __init__(self, translator):
"""Initialize IsographExporter."""
self._translator = translator
self._emitter = XmlEmitter(translator.output_basedir)
@staticmethod
def normalize_block_names(ir_container):
"""Normalize the component (block) names.
Isograph imposes a 40 character limit for the component names.
In case the model uses template components, there is a big chance that
the names will grow very big in length. Therefore, we store the
base name in the description field and assign a unique integer (ID)
as the components name.
"""
g = ir_container.component_graph
if ir_container.uses_templates:
_logger.info('Template usage detected:')
_logger.info(' * Normalizing component names for Isograph')
# Create relabeling mapping.
# Each component name will be replaced with a number (ID).
relabel_mapping = {n: c for n, c in zip(g.nodes_iter(), count(1))}
del relabel_mapping['ROOT'] # We don't want to relabel ROOT
# Relabel and rename components graph
# -- copy=False means "relabel in-place"
nx.relabel_nodes(g, relabel_mapping, copy=False)
for u, v in nx.bfs_edges(g, 'ROOT'):
# -- get a hold of the associated object
vo = g.node[v]['obj']
# -- set base name as description
vo.description = component_basename(vo.name)
# -- set ID number as name
vo.name = v
# Note: No need to relabel or rename failures graph
def export(self):
"""Export the model to Isograph importable format."""
# Normalize block names, if necessary
self.normalize_block_names(self._translator.ir_container)
# Export RBD (blocks, nodes, connections)
self._export_rbd()
# Export failure model definitions
self._export_failure_models()
# Write output file
self._emitter.commit()
def _export_rbd(self):
# Create block diagram from input
rbd = Rbd()
rbd.from_ir_container(self._translator.ir_container)
# Dump reliability block diagram to output
rbd.serialize(self._emitter)
def _export_failure_models(self):
fm_export(self._translator.ir_container, self._emitter)
| gpl-3.0 |
catapult-project/catapult-csm | third_party/google-endpoints/cachetools/ttl.py | 8 | 6050 | import collections
import time
from .cache import Cache
class _Link(object):
__slots__ = ('key', 'expire', 'next', 'prev')
def __init__(self, key=None, expire=None):
self.key = key
self.expire = expire
def __reduce__(self):
return _Link, (self.key, self.expire)
def unlink(self):
next = self.next
prev = self.prev
prev.next = next
next.prev = prev
class _Timer(object):
def __init__(self, timer):
self.__timer = timer
self.__nesting = 0
def __call__(self):
if self.__nesting == 0:
return self.__timer()
else:
return self.__time
def __enter__(self):
if self.__nesting == 0:
self.__time = time = self.__timer()
else:
time = self.__time
self.__nesting += 1
return time
def __exit__(self, *exc):
self.__nesting -= 1
def __reduce__(self):
return _Timer, (self.__timer,)
def __getattr__(self, name):
return getattr(self.__timer, name)
class TTLCache(Cache):
"""LRU Cache implementation with per-item time-to-live (TTL) value."""
def __init__(self, maxsize, ttl, timer=time.time, missing=None,
getsizeof=None):
Cache.__init__(self, maxsize, missing, getsizeof)
self.__root = root = _Link()
root.prev = root.next = root
self.__links = collections.OrderedDict()
self.__timer = _Timer(timer)
self.__ttl = ttl
def __contains__(self, key):
try:
link = self.__links[key] # no reordering
except KeyError:
return False
else:
return not (link.expire < self.__timer())
def __getitem__(self, key, cache_getitem=Cache.__getitem__):
try:
link = self.__getlink(key)
except KeyError:
expired = False
else:
expired = link.expire < self.__timer()
if expired:
return self.__missing__(key)
else:
return cache_getitem(self, key)
def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
with self.__timer as time:
self.expire(time)
cache_setitem(self, key, value)
try:
link = self.__getlink(key)
except KeyError:
self.__links[key] = link = _Link(key)
else:
link.unlink()
link.expire = time + self.__ttl
link.next = root = self.__root
link.prev = prev = root.prev
prev.next = root.prev = link
def __delitem__(self, key, cache_delitem=Cache.__delitem__):
cache_delitem(self, key)
link = self.__links.pop(key)
link.unlink()
if link.expire < self.__timer():
raise KeyError(key)
def __iter__(self):
root = self.__root
curr = root.next
while curr is not root:
# "freeze" time for iterator access
with self.__timer as time:
if not (curr.expire < time):
yield curr.key
curr = curr.next
def __len__(self):
root = self.__root
curr = root.next
time = self.__timer()
count = len(self.__links)
while curr is not root and curr.expire < time:
count -= 1
curr = curr.next
return count
def __setstate__(self, state):
self.__dict__.update(state)
root = self.__root
root.prev = root.next = root
for link in sorted(self.__links.values(), key=lambda obj: obj.expire):
link.next = root
link.prev = prev = root.prev
prev.next = root.prev = link
self.expire(self.__timer())
def __repr__(self, cache_repr=Cache.__repr__):
with self.__timer as time:
self.expire(time)
return cache_repr(self)
@property
def currsize(self):
with self.__timer as time:
self.expire(time)
return super(TTLCache, self).currsize
@property
def timer(self):
"""The timer function used by the cache."""
return self.__timer
@property
def ttl(self):
"""The time-to-live value of the cache's items."""
return self.__ttl
def expire(self, time=None):
"""Remove expired items from the cache."""
if time is None:
time = self.__timer()
root = self.__root
curr = root.next
links = self.__links
cache_delitem = Cache.__delitem__
while curr is not root and curr.expire < time:
cache_delitem(self, curr.key)
del links[curr.key]
next = curr.next
curr.unlink()
curr = next
def clear(self):
with self.__timer as time:
self.expire(time)
Cache.clear(self)
def get(self, *args, **kwargs):
with self.__timer:
return Cache.get(self, *args, **kwargs)
def pop(self, *args, **kwargs):
with self.__timer:
return Cache.pop(self, *args, **kwargs)
def setdefault(self, *args, **kwargs):
with self.__timer:
return Cache.setdefault(self, *args, **kwargs)
def popitem(self):
"""Remove and return the `(key, value)` pair least recently used that
has not already expired.
"""
with self.__timer as time:
self.expire(time)
try:
key = next(iter(self.__links))
except StopIteration:
raise KeyError('%s is empty' % self.__class__.__name__)
else:
return (key, self.pop(key))
if hasattr(collections.OrderedDict, 'move_to_end'):
def __getlink(self, key):
value = self.__links[key]
self.__links.move_to_end(key)
return value
else:
def __getlink(self, key):
value = self.__links.pop(key)
self.__links[key] = value
return value
| bsd-3-clause |
corredD/upy | autodeskmaya/mayaHelper.py | 1 | 118218 |
"""
Copyright (C) <2010> Autin L. TSRI
This file git_upy/autodeskmaya/mayaHelper.py is part of upy.
upy is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
upy is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with upy. If not, see <http://www.gnu.org/licenses/gpl-3.0.html>.
"""
# -*- coding: utf-8 -*-
"""
Created on Sun Dec 5 23:30:44 2010
@author: Ludovic Autin - ludovic.autin@gmail.com
"""
import sys, os, os.path, struct, math, string
from math import *
#import numpy
from types import StringType, ListType
import maya
from maya import cmds,mel,utils
import maya.OpenMaya as om
import maya.OpenMayaAnim as oma
import maya.OpenMayaFX as omfx
import pymel.core as pm
#base helper class
from upy import hostHelper
if hostHelper.usenumpy:
import numpy
from numpy import matrix
from upy.hostHelper import Helper
lefthand =[[ 1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, -1, 0],
[0, 0, 0, 1]]
from upy.transformation import decompose_matrix
class MayaSynchro:
#period problem
def __init__(self,cb=None, period=0.1):
self.period = period
self.callback = None
self.timeControl = oma.MAnimControl()
if cb is not None :
self.doit = cb
def change_period(self,newP):
self.period = newP
self.remove_callback()
self.set_callback()
def set_callback(self):
self.callback = om.MTimerMessage.addTimerCallback(self.period,self.doit)
def remove_callback(self):
om.MMessage.removeCallback(self.callback)
def doit(self,*args,**kw):#period,time,userData=None):
pass
class mayaHelper(Helper):
"""
The maya helper abstract class
============================
This is the maya helper Object. The helper
give access to the basic function need for create and edit a host 3d object and scene.
"""
SPLINE = "kNurbsCurve"
INSTANCE = "kTransform"
MESH = "kTransform"
POLYGON = "kMesh"#"kTransform"
# MESH = "kMesh"
EMPTY = "kTransform"
BONES="kJoint"
PARTICULE = "kParticle"
SPHERE = "sphere"
CYLINDER = "cylinder"
CUBE = "cube"
IK="kIkHandle"
msutil = om.MScriptUtil()
pb = False
pbinited = False
host = "maya"
def __init__(self,master=None,**kw):
Helper.__init__(self)
self.updateAppli = self.update
self.Cube = self.box
self.Box = self.box
self.Geom = self.newEmpty
#self.getCurrentScene = c4d.documents.GetActiveDocument
self.IndexedPolygons = self.polygons
self.Points = self.PointCloudObject
self.pb = True
self.hext = "ma"
self.timeline_cb={}
self.LIGHT_OPTIONS = {"Area" : maya.cmds.ambientLight,
"Sun" : maya.cmds.directionalLight,
"Spot":maya.cmds.spotLight}
def fit_view3D(self):
pass#
def resetProgressBar(self,max=None):
"""reset the Progress Bar, using value"""
if self.pb :
gMainProgressBar = maya.mel.eval('$tmp = $gMainProgressBar');
maya.cmds.progressBar(gMainProgressBar, edit=True, endProgress=True)
self.pbinited = False
# self.pb = False
# maya.cmds.progressBar(maya.pb, edit=True, maxValue=max,progress=0)
def progressBar(self,progress=None,label=None):
""" update the progress bar status by progress value and label string
@type progress: Int/Float
@param progress: the new progress
@type label: string
@param label: the new message to put in the progress status
"""
if self.pb :
gMainProgressBar = maya.mel.eval('$tmp = $gMainProgressBar');
if not self.pbinited :
cmds.progressBar( gMainProgressBar,
edit=True,
beginProgress=True,
isInterruptable=False,
status=label,
maxValue=100)
# if progress == 1 :
# prev = cmds.progressBar(gMainProgressBar,q=1,progress=1)
# progress = prev/100. + 0.1
# progress*=100.
if label is not None and progress is None :
cmds.progressBar(gMainProgressBar, edit=True, status = label)
elif label is not None and progress is not None:
cmds.progressBar(gMainProgressBar, edit=True, progress=progress*100.,status = label)
elif label is None and progress is not None:
cmds.progressBar(gMainProgressBar, edit=True, progress=progress*100.)
if progress == 1 or progress == 100.:
self.resetProgressBar()
#maxValue = 100
#did not work
#maya.cmds.progressBar(maya.pb, edit=True, progress=progress*100)
# cmds.progressBar(maya.pb, edit=True, step=1)
#maya.cmds.progressBar(maya.pb, edit=True, step=1)
def synchronize(self,cb):
self.timeline_cb[cb] = MayaSynchro(cb=cb,period=0.05)
self.timeline_cb[cb].set_callback()
def unsynchronize(self,cb):
self.timeline_cb[cb].remove_callback()
def update(self,):
#how do I update the redraw
cmds.refresh()
def updateAppli(self,):
#how do I update the redraw
cmds.refresh()
def checkName(self,name):
invalid=[]
if type(name) is None :
print ("None name or not a string",name)
return ""
#sometime the name is a list ie [u'name']
if type(name) is list or type(name) is tuple :
if len(name) == 1 :
name = name[0]
elif len(name) == 2 :
name = name[1]#transform node
else :
name = name[0] #?
if (type(name) is not str and type(name) is not unicode) :
print ("not a string",name,type(name))
return ""
if not len(name):
print ("empty name",name)
for i in range(9):
invalid.append(str(i))
if type(name) is list or type(name) is tuple:
name = name[0]
if type(name) is not str and type(name) is not unicode:
name = name.name()
if len(name) and name[0] in invalid:
name= name[1:]
#also remove some character and replace it by _
name=name.replace(":","_").replace(" ","_").replace("'","").replace("-","_")
return name
def setCurrentSelection(self,obj):
if obj is None :
return
if type (obj) is list or type (obj) is tuple :
for o in obj :
cmds.select(self.getObject(o))
else :
cmds.select(self.getObject(obj))
def getCurrentSelection(self):
slist = om.MSelectionList()
if not slist :
return []
om.MGlobal.getActiveSelectionList(slist)
selection = []
slist.getSelectionStrings(selection)
return selection
def checkPrimitive(self,object):
try :
cmds.polySphere(object,q=1,r=1)
return "sphere"
except :
pass
try :
cmds.sphere(object,q=1,r=1)
return "sphere"
except :
pass
try :
cmds.polyCube(object,q=1,w=1)
return "cube"
except :
pass
try :
cmds.polyCylinder(object,q=1,r=1)
return "cylinder"
except :
pass
return None
def getType(self,object):
#first tryto see if isa primitive
prim = self.checkPrimitive(object)
if prim is not None :
return prim
object = self.getNode(object)
if hasattr(object,"apiTypeStr"):
# print (object.apiTypeStr())
return object.apiTypeStr()
else :
# print (type(object))
return type(object)
# return type(object)
def getMName(self,o):
return o.name()
def setName(self,o,name):
if o is None :
return
cmds.rename( self.checkName(o), name, ignoreShape=False)
def getName(self,o):
if o is None: return ""
if type(o) == str or type(o) == unicode :
name = o.replace(":","_").replace(" ","_").replace("'","").replace("-","_")
elif type(o) == unicode : name = o
elif type(o) is om.MFnMesh:
return o
elif hasattr(o,"name") :
if type(o.name) == str :
return o.name
else : return o.name()
elif type(o) is list or type(o) is tuple:
name=o[0]
else : name=o
return name
def getMObject(self,name):
# Create a selection list, get an MObject of the nodes which name is name
selectionList = om.MSelectionList()
selectionList.add( name ) #should be unic..
node = om.MObject()
selectionList.getDependNode( 0, node )
#//Create a function set, connect to it,
fnDep = om.MFnDependencyNode(node)
#print fnDep.name() #object name
#print fnDep.typeName() #type name ie mesh, transform etc..
return node,fnDep
def getObject(self,name,doit=True):
if type(name) is list or type(name) is tuple :
if len(name) == 1 :
name = name[0]
elif len(name) == 2 :
name = name[1]#transform node
else :
name = name[0] #?
name=self.checkName(name)
if name.find(":") != -1 :
name=name.replace(":","_").replace(" ","_").replace("'","").replace("-","_")
if doit :
name=cmds.ls(name)
if len(name)==0:
return None
if len(name) == 1 :
return name[0]
return name
def checkIsMesh(self,poly):
if type(poly) is str or type(poly) is unicode :
mesh = self.getMShape(poly)#dagPath
else :
#have to a object shape node or dagpath
mesh = poly
try :
meshnode = om.MFnMesh(mesh)
return meshnode
except :
return mesh
def getMesh(self,name):
mesh = None
if type(name) != str:
return name
# path = om.MDagPath()
try :
name = self.checkName(name)
mesh = cmds.ls(name)#NMesh.GetRaw(name)
except:
mesh = None
return mesh
def getMeshFrom(self,obj):
if type(obj) is not str and type(obj) is not unicode:
obj = self.getMName(obj)
return self.getMShape(obj)
def getTransformNode(self,name):
if type(name) is list :
name = name[0]
if type(name) is str or type(name) is unicode :
name = self.checkName(name)
node = self.getNode(name)
else :
node = name
dag = om.MFnDagNode(node)
path = om.MDagPath()
dag.getPath(path)
return path.transform(),path
def getMShape(self,name,):
# print name,type(name)
if type(name) is list :
name = name[0]
if type(name) is str or type(name) is unicode :
name = self.checkName(name)
node = self.getNode(name)
else :
node = name
dag = om.MFnDagNode(node)
path = om.MDagPath()
dag.getPath(path)
# self.msutil.createFromInt(0)
# pInt = self.msutil.asUintPtr()
# path.numberOfShapesDirectlyBelow(pInt)
try :
path.extendToShape()
return path
except :
# if self.msutil.getUint(pInt) == 0 :
node = path.child(0)
return self.getMShape(node)
#problem with primitive
# try :
# path.extendToShape()
# except :
# path = None
# return path
def deleteObject(self,obj):
sc = self.getCurrentScene()
if type(obj) is str or type(obj) is unicode:
obj=self.checkName(obj)
else :
if type(obj) is list or type(obj) is tuple :
for o in obj :
self.deleteObject(o)
else :
obj = obj.name()
try :
#print "del",obj
cmds.delete(obj)
except:
print "problem deleting ", obj
#######Special for maya#######################
def getNode( self,name ):
# print "getNode",type(name)
# if type(name) != str :
# return name
name = self.checkName(name)
selectionList = om.MSelectionList()
selectionList.add( name )
node = om.MObject()
selectionList.getDependNode( 0, node )
return node
def getNodePlug(self, attrName, nodeObject ):
"""
example:
translatePlug = nameToNodePlug( "translateX", perspNode )
print "Plug name: %s" % translatePlug.name()
print "Plug value %g" % translatePlug.asDouble()
"""
depNodeFn = om.MFnDependencyNode( nodeObject )
attrObject = depNodeFn.attribute( attrName )
plug = om.MPlug( nodeObject, attrObject )
return plug
################################################
def newLocator(self,name,location=None,**kw):
name = self.checkName(name)
if name.find(":") != -1 : name=name.replace(":","_")
empty=cmds.spaceLocator( n=name, a=True)
parent = None
if "parent" in kw :
parent = kw["parent"]
self.reParent(empty,parent)
return str(empty)
def newEmpty(self,name,location=None,**kw):
#return self.newLocator(name,location=location, **kw)
name = self.checkName(name)
if name.find(":") != -1 : name=name.replace(":","_")
empty=cmds.group( em=True, n=name)
parent = None
if "parent" in kw :
parent = kw["parent"]
self.reParent(empty,parent)
return str(empty)
def updateMasterInstance(self,master, newobjects,instance=True, **kw):
"""
Update the reference of the passed instance by adding/removing-hiding objects
* overwrited by children class for each host
>>> sph = helper.Sphere("sph1")
>>> instance_sph = helper.newInstance("isph1",sph,location = [10.0,0.0,0.0])
@type instance: string/hostObj
@param instance: name of the instance
@type objects: list hostObject/string
@param objects: the list of object to remove/add to the instance reference
@type add: bool
@param add: if True add the objec else remove
@type hide: bool
@param hide: hide instead of remove
@type kw: dictionary
@param kw: you can add your own keyword, but it should be interpreted by all host
"""
#the instance shoud point to an empy that have shape as child
#what we should do is eitherduplicae or reParent the the new object under this master parent
#or usethe replace command ? use particule ?
#replace the mesh node of the master by the given ones....
#hide and remove every previous children....
chs = self.getChilds(master)
for o in chs :
r=cmds.duplicate(o, renameChildren=True)
print r
cmds.delete(chs)#or move or uninstance ?
if instance :
n=[]
for o in newobjects :
name = self.getName(master)+"Instance"
i1=self.getObject(name+"1")
if i1 is not None :
cmds.delete(i1)
i=self.newInstance(name,o,parent=master)
else :
self.reParent(newobjects,master)
def newMInstance(self,name,object,location=None,
hostmatrice=None,matrice=None,parent=None,**kw):
#first create a MObject?
#only work on Mes
name = self.checkName(name)
fnTrans = om.MFnTransform()
minstance = fnTrans.create()
fnTrans.setName(name)
#now add the child as an instance.
#print fnTrans.name()
#is this will work withany object ?
object=self.getNode(object)#or the shape ?
fnTrans.addChild(object,fnTrans.kNextPos,True)
#print name, object , fnTrans
if matrice is not None and isinstance(matrice,om.MTransformationMatrix):
hostmatrice=matrice
matrice = None
if hostmatrice is not None and not isinstance(hostmatrice,om.MTransformationMatrix):
matrice = hostmatrice
hostmatrice = None
if location is not None :
fnTrans.setTranslation(self.vec2m(location),om.MSpace.kPostTransform)
elif hostmatrice is not None :
fnTrans.set(hostmatrice)
elif matrice is not None :
#first convert
hmatrice = self.matrixp2m(matrice)
fnTrans.set(hmatrice)
if parent is not None:
mparent = self.getNode(parent)
# onode = om.MFnDagNode(mobj)
# print "name",fnTrans.name()
oparent = om.MFnDagNode(mparent)
oparent.addChild(self.getNode(fnTrans.name()),oparent.kNextPos,False)
return fnTrans.name()
def newInstance(self,name,object,location=None,hostmatrice=None,matrice=None,
parent=None,material=None,**kw):
#instance = None#
#instance parent = object
#instance name = name
# return self.newMInstance(name,object,location=location,
# hostmatrice=hostmatrice,matrice=matrice,parent=parent,**kw)
#
name = self.checkName(name)
instance = cmds.instance(object,name=name)
if location != None :
#set the position of instance with location
cmds.move(float(location[0]),float(location[1]),float(location[2]), name,
absolute=True )
if matrice is not None :
if self._usenumpy :
#matrice = numpy.array(matrice)#matrix(matrice)*matrix(lefthand)#numpy.array(matrice)
#transpose only rotation
matrice = numpy.array(matrice).transpose()#we do transpoe hee
#m = matrice.copy()
# m[0,:3]=matrice[0,:3]#thi work with numpy
# m[1,:3]=matrice[1,:3]
# m[2,:3]=matrice[2,:3]
#matrice[:3,:3] = matrice[:3,:3].transpose()
hm = matrice.reshape(16,).tolist()
#shoudl I apply some transformatio first ?
cmds.xform(name, a=True, m=hm,roo="xyz")#a for absolute
else :
self.setTransformation(instance[0],mat=matrice)
#set the instance matrice
#self.setObjectMatrix(self,object,matrice=matrice,hostmatrice=hostmatrice)
if parent is not None:
self.reParent(instance,parent)
if material is not None:
self.assignMaterial(instance,material)
return instance
#alias
setInstance = newInstance
def matrixToParticles(self,name,matrices,vector=[0.,1.,0.],transpose=True,**kw):#edge size ?
#blender user verex normal for rotated the instance
#quad up vector should use the inpu vector
axe=self.rerieveAxis(vector)
#axe="+Y"
quad=numpy.array(self.quad[axe])#*10.0
print ("matrixToParticles",axe,vector,quad)
# f=[0,1,2,3]
v=[]
f=[]
e=[]
n=[]
vi=0
#one mat is
#rot[3][:3] tr
# rot[:3,:3] rot
#create particle system
# obj = self.checkName(obj)
# partO=self.getMShape(obj) #shape..
# fnP = omfx.MFnParticleSystem(partO)
# oriPsType = fnP.renderType()
rot=om.MVectorArray()#fnP.count())
pos=om.MVectorArray()#fnP.count())
tr=[]
#set position and rotation
for i,m in enumerate(matrices):
mat = numpy.array(m)
if transpose :
mat = numpy.array(m).transpose()
# t = m[3][:3]
# rot = m[:3,:3]
scale, shear, euler, translate, perspective=decompose_matrix(mat)
tr.append(translate.tolist())
#need euler angle
# e=self.FromMat(rot).rotation().asEulerRotation()
p = om.MVector( float(translate[0]),float(translate[1]),float(translate[2]) )
pos.append(p)
r = om.MVector( float(euler[0]),float(euler[1]),float(euler[2]) )/(math.pi) *180
rot.append(r)
# fnP.setPerParticleAttribute("rotationPP",rot)
# fnP.setPerParticleAttribute("position",pos)
part,partShape= pm.nParticle(n=name+"_ps",position = tr)
# part,partShape=cmds.particle(n=name+"_ps",p=list(tr))
pm.setAttr('nucleus1.gravity', 0.0)#?
# cmds.setAttr(partShape+'.computeRotation',1)
partShape.computeRotation.set(True)
pm.addAttr(partShape, ln = 'rotationPP', dt = 'vectorArray')
pm.addAttr(partShape, ln = 'rotationPP0', dt = 'vectorArray')
particle_fn = omfx.MFnParticleSystem(partShape.__apimobject__())
particle_fn.setPerParticleAttribute('rotationPP', rot)
particle_fn.setPerParticleAttribute('rotationPP0', rot)
if 'parent' in kw and kw['parent'] is not None:
parent = self.getObject(kw['parent'])
self.reParent(name+"_ps",parent)
return part,partShape
#particleInstancer -addObject
#-object locator1 -cycle None -cycleStep 1 -cycleStepUnits Frames
#-levelOfDetail Geometry -rotationUnits Degrees
#-rotationOrder XYZ -position worldPosition -age age crn_A_clouddsShape;
def instancePolygon(self,name, matrices=None,hmatrices=None, mesh=None,parent=None,
transpose=False,globalT=True,**kw):
hm = False
if hmatrices is not None :
matrices = hmatrices
hm = True
if matrices == None : return None
if mesh == None : return None
instance = []
#print len(matrices)#4,4 mats
if self.instance_dupliFace:
v=[0.,1.,0.]
if "axis" in kw and kw["axis"] is not None:
v=kw["axis"]
print ("axis",v)
o = self.getObject(name+"_pis")
if o is None :
# o,m=self.matrixToVNMesh(name,matrices,vector=v)
particle,partShape=self.matrixToParticles(name,matrices,vector=v,
transpose=transpose,parent=parent)
p_instancer = pm.PyNode(pm.particleInstancer(
partShape, addObject=True, object=pm.ls(mesh),name=name+"_pis",
cycle='None', cycleStep=1, cycleStepUnits='Frames',
levelOfDetail='Geometry', rotationUnits='Degrees',
rotationOrder='XYZ', position='worldPosition', age='age'))
pm.particleInstancer(partShape, name = p_instancer, edit = True, rotation = "rotationPP")
if parent is not None :
self.reParent(name+"_pis",parent)
# cmds.particleInstancer(
# partShape, addObject=True, object=self.getMShape(mesh),
# cycle='None', cycleStep=1, cycleStepUnits='Frames',
# levelOfDetail='Geometry', rotationUnits='Degrees',
# rotationOrder='XYZ', position='worldPosition', age='age')
# cmds.particleInstancer(partShape, name = "p_instancer",
# edit = True, rotation = "rotationPP")
else :
#update
pass
return name+"_pis"
#rotation checkbox->use normal
else :
for i,mat in enumerate(matrices):
inst = self.getObject(name+str(i))
if inst is None :
#Minstance?
if hm :
inst=self.newInstance(name+str(i),mesh,hostmatrice=mat,
parent=parent,globalT=globalT)
else :
inst=self.newInstance(name+str(i),mesh,matrice=mat,
parent=parent,globalT=globalT)
instance.append(inst)
return instance
def resetTransformation(self,name):
m= [1.,0.,0.,0.,
0.,1.,0.,0.,
0.,0.,1.,0.,
0.,0.,0.,0.]
cmds.xform(name, a=True, m=m)
def setObjectMatrix(self,object,matrice,hostmatrice=None,**kw):
"""
set a matrix to an hostObject
@type object: hostObject
@param object: the object who receive the transformation
@type hostmatrice: list/Matrix
@param hostmatrice: transformation matrix in host format
@type matrice: list/Matrix
@param matrice: transformation matrix in epmv/numpy format
"""
#have to manipulate the DAG/upper transform node...
#let just take the owner Transofrm node of the shape
#we should be able to setAttr either 'matrix' or 'worldMatrix'
object = self.getObject(object)
if hostmatrice !=None :
#set the instance matrice
matrice=hostmatrice
if matrice != None:
#convert the matrice in host format
#set the instance matrice
pass
transpose = True
if "transpose" in kw :
transpose = kw["transpose"]
if matrice is not None :
if self._usenumpy :
#matrice = numpy.array(matrice)#matrix(matrice)*matrix(lefthand)#numpy.array(matrice)
#transpose only rotation
matrice = numpy.array(matrice)
if transpose :
matrice=matrice.transpose()#we do transpoe hee
#m = matrice.copy()
# m[0,:3]=matrice[0,:3]#thi work with numpy
# m[1,:3]=matrice[1,:3]
# m[2,:3]=matrice[2,:3]
#matrice[:3,:3] = matrice[:3,:3].transpose()
hm = matrice.reshape(16,).tolist()
#shoudl I apply some transformatio first ?
cmds.xform(object, a=True, m=hm,roo="xyz")#a for absolute
else :
self.setTransformation(object,mat=matrice)
def concatObjectMatrix(self,object,matrice,hostmatrice=None):
"""
apply a matrix to an hostObject
@type object: hostObject
@param object: the object who receive the transformation
@type hostmatrice: list/Matrix
@param hostmatrice: transformation matrix in host format
@type matrice: list/Matrix
@param matrice: transformation matrix in epmv/numpy format
"""
#get current transformation
if hostmatrice !=None :
#compute the new matrix: matrice*current
#set the new matrice
pass
if matrice != None:
#convert the matrice in host format
#compute the new matrix: matrice*current
#set the new matrice
pass
def addObjectToScene(self,doc,obj,parent=None,**kw):
#its just namely put the object under a parent
#return
if obj == None : return
if parent is not None :
if type(obj) is list or type(obj) is tuple :
if len(obj) == 1 :
obj = obj[0]
elif len(obj) == 2 :
obj = obj[1]#transform node
else :
obj = obj[0] #?
obj=self.checkName(obj)
parent=self.checkName(parent)
#print obj,parent
# cmds.parent( obj, parent)
self.parent(obj, parent)
def parent(self,obj,parent,instance=False):
if type(parent) == unicode :
parent = str(parent)
if type(parent) != str :
print ("parent is not String ",type(parent))
return
# print ("parenting ", obj,parent, instance )
mobj = self.getNode(obj)
mparent = self.getNode(parent)
# onode = om.MFnDagNode(mobj)
oparent = om.MFnDagNode(mparent)
# print ("parenting dag node", obj,parent, mobj,oparent.kNextPos,instance )
oparent.addChild(mobj,oparent.kNextPos,instance)
def reParent(self,obj,parent,instance=False):
if parent == None :
print ("parent is None")
return
if type(obj) is not list and type(obj) is not tuple :
obj = [obj,]
try :
[self.parent(o,parent,instance=instance) for o in obj]
except :
print ("failure")
def getChilds(self,obj):
if type(obj) is str or type(obj) is unicode:
o = self.checkName(obj)
else :
o = self.getName(obj)
childs= cmds.listRelatives(o, c=True)
if childs is None :
return []
else :
return childs
def addCameraToScene(self,name,Type='persp',focal=30.0,center=[0.,0.,0.],sc=None):
# Create a camera and get the shape name.
cameraName = cmds.camera(n=name)
cameraShape = cameraName[1]
# Set the focal length of the camera.
cmds.camera(cameraShape, e=True, fl=focal)
#change the location
cmds.move(float(center[0]),float(center[1]),float(center[2]), cameraName[0], absolute=True )
#should I rotate it
cmds.rotate( 0, '0', '360deg',cameraName[0] )
# Change the film fit type.
#cmds.camera( cameraShape, e=True, ff='overscan' )
return cameraName
def addLampToScene(self,name,Type='Area',rgb=[1.,1.,1.],dist=25.0,energy=1.0,
soft=1.0,shadow=False,center=[0.,0.,0.],sc=None,**kw):
#print Type
#each type have a different cmds
lcmd = self.LIGHT_OPTIONS[Type]
light = lcmd(n=name)
# light = cmds.pointLight(n=name)
#cmds.pointLight(light,e=1,i=energy,rgb=rgb,ss=soft,drs=dist)
lcmd(light,e=1,i=energy)
lcmd(light,e=1,ss=soft)
# cmds.pointLight(light,e=1,drs=dist)
lcmd(light,e=1,rgb=rgb)
cmds.move(float(center[0]),float(center[1]),float(center[2]), light, absolute=True )
return light
def toggleDisplay(self,ob,display,**kw):
# ob = self.getObject(ob)
# if ob is None :
# return
# ob=self.checkName(ob)
# if display :
# cmds.showHidden(ob)
# else :
# cmds.hide(ob)
if ob is None :
return
node = self.getNode(self.checkName(ob))
if node is None :
return
attrDis = self.getNodePlug("visibility",node)
attrDis.setBool(bool(display))
# def toggleXray(self,object,xray):
# o = self.getObject(object)
# cmds.select(o)
# cmds.displySurface(xRay = True)
def getVisibility(self,obj,editor=True, render=False, active=False):
#0 off, 1#on, 2 undef
node = self.getNode(self.checkName(obj))
attrDis = self.getNodePlug("visibility",node)
if editor and not render and not active:
return attrDis.asBool()
elif not editor and render and not active:
return attrDis.asBool()
elif not editor and not render and active:
return attrDis.asBool()
else :
return attrDis.get(),attrDis.get(),attrDis.get()
def getTranslation(self,name,absolue=True):
name = self.checkName(name)
return self.FromVec(cmds.xform(name,q=1,ws=int(absolue),t=1))
def getTranslationOM(self,name):
node = self.getNode(name)
fnTrans = om.MFnTransform(node,)
return fnTrans.getTranslation(om.MSpace.kWorld)#kPostTransform)
def setTranslation(self,name,pos):
node = self.getNode(name)
fnTrans = om.MFnTransform(node,)
newT = self.vec2m(pos)
fnTrans.setTranslation(newT,om.MSpace.kPostTransform)
def translateObj(self,obj,position,use_parent=False):
#is om would be faster ?
if len(position) == 1 : c = position[0]
else : c = position
#print "upadteObj"
newPos=c#c=c4dv(c)
o=self.getObject(obj)
if use_parent :
parentPos = self.getPosUntilRoot(obj)#parent.get_pos()
c = newPos - parentPos
cmds.move(float(c[0]),float(c[1]),float(c[2]), o, absolute=True )
else :
cmds.move(float(c[0]),float(c[1]),float(c[2]), o, absolute=True )
def scaleObj(self,obj,sc):
obj = self.checkName(obj)
if type(sc) is float :
sc = [sc,sc,sc]
cmds.scale(float(sc[0]),float(sc[1]),float(sc[2]), obj,absolute=True )
def getScale(self,name,absolue=True,**kw):
node = self.getNode(name)
fnTrans = om.MFnTransform(node,)
# First create an array and a pointer to it
scaleDoubleArray = om.MScriptUtil()
scaleDoubleArray.createFromList( [0.0, 0.0, 0.0], 3 )
scaleDoubleArrayPtr = scaleDoubleArray.asDoublePtr()
# Now get the scale
fnTrans.getScale( scaleDoubleArrayPtr )
# Each of these is a decimal number reading from the pointer's reference
x_scale = om.MScriptUtil().getDoubleArrayItem( scaleDoubleArrayPtr, 0 )
y_scale = om.MScriptUtil().getDoubleArrayItem( scaleDoubleArrayPtr, 1 )
z_scale = om.MScriptUtil().getDoubleArrayItem( scaleDoubleArrayPtr, 2 )
return [x_scale,y_scale,z_scale]#kPostTransform) or om.MVector(v[0], v[1], v[2])?
def getSize(self,obj):
#take degree
obj = self.checkName(obj)
meshnode = self.getMShape(obj)
try :
mesh = om.MFnMesh(meshnode)
except :
return [1,1,1]
obj = self.getMName(mesh)
x=cmds.getAttr(obj+'.width')
y=cmds.getAttr(obj+'.height')
z=cmds.getAttr(obj+'.depth')
return [x,y,z]
def rotateObj(self,obj,rot):
#take degree
obj = self.checkName(obj)
cmds.setAttr(obj+'.rx',degrees(float(rot[0])))
cmds.setAttr(obj+'.ry',degrees(float(rot[1])))
cmds.setAttr(obj+'.rz',degrees(float(rot[2])))
def getTransformation(self,name):
node = self.getNode(name)
fnTrans = om.MFnTransform(node)
mmat = fnTrans.transformation()
#maya matrix
return mmat
def setTransformation(self,name,mat=None,rot=None,scale=None,trans=None,order="str",**kw):
node = self.getNode(name)
fnTrans = om.MFnTransform(node)
if mat is not None :
if isinstance(mat,om.MTransformationMatrix):
fnTrans.set(mat)
else :
fnTrans.set(self.matrixp2m(mat))
if trans is not None :
fnTrans.setTranslation(self.vec2m(trans),om.MSpace.kPostTransform)
if rot is not None :
rotation = om.MEulerRotation (rot[0], rot[1], rot[2])
fnTrans.setRotation(rotation)
if scale is not None :
fnTrans.setScale(self.arr2marr(scale))
def ObjectsSelection(self,listeObjects,typeSel="new"):
"""
Modify the current object selection.
@type listeObjects: list
@param listeObjects: list of object to joins
@type typeSel: string
@param listeObjects: type of modification: new,add,...
"""
dic={"add":True,"new":False}
sc = self.getCurrentScene()
for obj in listeObjects:
cmds.select(self.getObject(obj),add=dic[typeSel])
#Put here the code to add/set an object to the current slection
#[sc.SetSelection(x,dic[typeSel]) for x in listeObjects]
def JoinsObjects(self,listeObjects):
"""
Merge the given liste of object in one unique geometry.
@type listeObjects: list
@param listeObjects: list of object to joins
"""
sc = self.getCurrentScene()
#put here the code to add the liste of object to the selection
cmds.select(self.getObject(listeObjects[0]))
for i in range(1,len(listeObjects)):
cmds.select(listeObjects[i],add=True)
cmds.polyUnite()
#no need to joins? but maybe better
#then call the command/function that joins the object selected
# c4d.CallCommand(CONNECT)
#need face indice
def color_mesh_perVertex(self,mesh,colors,faces=None,perVertex=True,
facesSelection=None,faceMaterial=False):
if colors[0] is not list and len(colors) == 3 :
colors = [colors,]
if not isinstance(mesh,maya.OpenMaya.MFnMesh):
if self.getType(mesh) != self.POLYGON and self.getType(mesh) != self.MESH:
return False
mcolors=om.MColorArray()
iv=om.MIntArray()
meshnode = mesh
# print mesh
if type(mesh) is str or type(mesh) is unicode :
meshnode = self.getMShape(mesh)
try :
mesh = om.MFnMesh(meshnode)
except:
return False
mesh.findPlug('displayColors').setBool(True)
if not isinstance(mesh,maya.OpenMaya.MFnMesh):
return
nv=mesh.numVertices()
nf=mesh.numPolygons()
mfaces = self.getMeshFaces(meshnode)
if facesSelection is not None :
if type(facesSelection) is bool :
fsel,face_sel_indice = self.getMeshFaces(mesh,selected=True)
else :
face_sel_indice = facesSelection
fsel=[]
for i in face_sel_indice:
fsel.append(mfaces[i])
vsel=[]
for f in fsel:
for v in f:
if v not in vsel:
vsel.append(v)
mfaces = fsel
nf = len(fsel)
nv = len(vsel)
# print "selected ",face_sel_indice
#check if its ok
if len(colors) == nv:
perVertex = True
elif len(colors) == nf:
perVertex = False
if perVertex:
N=range(nv)
else :
N=range(nf)
if facesSelection is not None :
N = face_sel_indice
perVertex = False
for k,i in enumerate(N) :
if len(colors) == 1 : ncolor = colors[0]
else :
if k >= len(colors) :
ncolor = [0.,0.,0.] #problem
else :
ncolor = colors[i]
#print ncolor
#if max(ncolor) < 1 : ncolor = map( lambda x: x*255, ncolor)
col=om.MColor(float(ncolor[0]),float(ncolor[1]),float(ncolor[2]))
#print ncolor
mcolors.append(col)
iv.append(int(i))
# print "i",i,ncolor
#mesh.setVertexColor(col,int(i))
if perVertex:
mesh.setVertexColors(mcolors,iv)
else :
# print iv#should be the fdace index
mesh.setFaceColors(mcolors,iv)
return True
###################MATERIAL CODE FROM Rodrigo Araujo#####################################################################################
#see http://linil.wordpress.com/2008/01/31/python-maya-part-2/
def createMaterial(self, name, color, type ):
name = self.checkName(name)
mat=cmds.ls(name, mat=True)
if len(mat)==0: #create only if mat didnt exist already
#shading group
shaderSG = cmds.sets(renderable=True, noSurfaceShader=True, empty=True,
name=name+"SG" )
#material
cmds.shadingNode( type, asShader=True, name=name )
#phong ?
#cmds.setAttr((shader+ '.reflectivity'), 0)# no rayTrace
#cmds.setAttr((shader+ '.cosinePower'), 3)
cmds.setAttr( name+".color", color[0], color[1], color[2],
type="double3")
cmds.connectAttr(name+".outColor", shaderSG+".surfaceShader")
def createTexturedMaterial(self,name,filename):
name = self.checkName(name)
mat=cmds.ls(name, mat=True)
if len(mat)==0: #create only if mat didnt exist already
#shading group
shaderSG = cmds.sets(renderable=True, noSurfaceShader=True, empty=True,
name=name+"SG" )
#material
cmds.shadingNode("lambert", asShader=True, name=name )
cmds.connectAttr(name+".outColor", shaderSG+".surfaceShader")
#create the texture and connect it
texture = cmds.shadingNode('file', asTexture=True,name=name+"Texture")
cmds.connectAttr(name+"Texture"+'.outColor', name+".color")
cmds.setAttr(name+"Texture"+'.fileTextureName', filename, type='string')
return name
def create_mMayaMaterials(self):
existingSGs = cmds.ls(type = 'shadingEngine')
shaderHits = 0;
shaderSG, shaderSGAmbOcc, ambOcc, ramp = '', '', '', ''
for existingSG in existingSGs:
if mel.eval('attributeExists mMaya_atomShaderSG ' +existingSG):
shaderSG = existingSG
shaderHits += 1
if mel.eval('attributeExists mMaya_atomShaderSGAmbOcc ' +existingSG):
shaderSGAmbOcc = existingSG
shaderHits += 1
existingAmbOccs = cmds.ls(type = 'mib_amb_occlusion')
for existingAmbOcc in existingAmbOccs:
if mel.eval('attributeExists mMaya_atomShaderAmbOcc ' +existingAmbOcc):
ambOcc = existingAmbOcc
shaderHits += 1
existingRamps = cmds.ls(type = 'ramp')
for existingRamp in existingRamps:
if mel.eval('attributeExists mMaya_atomShaderRGBRamp ' +existingRamp):
ramp = existingRamp
shaderHits += 1
if shaderHits == 4:
return shaderSG, shaderSGAmbOcc, ambOcc, ramp
elif shaderHits == 0:
shader = cmds.shadingNode('phong', asShader = 1, name = ("atomShader"))
cmds.setAttr((shader+ '.reflectivity'), 0)# no rayTrace
cmds.setAttr((shader+ '.cosinePower'), 3)
shaderSG = cmds.sets(renderable = 1, noSurfaceShader = 1, empty = 1)
cmds.addAttr(shaderSG, ln = 'mMaya_atomShaderSG', at = 'bool', h = 1)
cmds.connectAttr((shader+ '.outColor'), (shaderSG+ '.surfaceShader'))
shaderAmbOcc = cmds.shadingNode('phong', asShader = 1, name = ("atomShaderAmbOcc"))
cmds.setAttr((shaderAmbOcc+ '.reflectivity'), 0)
cmds.setAttr((shaderAmbOcc+ '.cosinePower'), 3)
cmds.setAttr((shaderAmbOcc+ '.ambientColor'), 0.7, 0.7, 0.7)
cmds.setAttr((shaderAmbOcc+ '.diffuse'), 0.2)
ambOcc = cmds.createNode('mib_amb_occlusion')
cmds.addAttr(ambOcc, ln = 'mMaya_atomShaderAmbOcc', at = 'bool', h = 1)
cmds.connectAttr((ambOcc+ '.outValue'), (shaderAmbOcc+ '.color'))
cmds.connectAttr((shaderAmbOcc+ '.color'), (shaderAmbOcc+ '.specularColor'))
partySampler = cmds.createNode('particleSamplerInfo')
cmds.connectAttr((partySampler+ '.outTransparency'), (shader+ '.transparency'))
cmds.connectAttr((partySampler+ '.outIncandescence'), (shader+ '.incandescence'))
cmds.connectAttr((partySampler+ '.outColor'), (shader+ '.color'))
cmds.connectAttr((partySampler+ '.outTransparency'), (shaderAmbOcc+ '.transparency'))
cmds.connectAttr((partySampler+ '.outIncandescence'), (shaderAmbOcc+ '.incandescence'))
cmds.connectAttr((partySampler+ '.outColor'), (ambOcc+ '.bright'))
shaderSGAmbOcc = cmds.sets(renderable = 1, noSurfaceShader = 1, empty = 1)
cmds.addAttr(shaderSGAmbOcc, ln = 'mMaya_atomShaderSGAmbOcc', at = 'bool', h = 1)
cmds.connectAttr((shaderAmbOcc+ '.outColor'), (shaderSGAmbOcc+ '.surfaceShader'))
ramp = cmds.createNode('ramp')
cmds.setAttr((ramp + '.interpolation'), 0)
cmds.addAttr(ramp, ln = 'mMaya_atomShaderRGBRamp', at = 'bool', h = 1)
valChangePMA = cmds.createNode('plusMinusAverage')
cmds.addAttr(valChangePMA, ln = 'mMaya_atomShaderRGBRampPMA', at = 'bool', h = 1)
cmds.connectAttr((ramp+ '.mMaya_atomShaderRGBRamp'), (valChangePMA+ '.mMaya_atomShaderRGBRampPMA'))
indexDivFactor = 1000.0;
for elem in elems:
indexElem = vanRad_CPK[elem][4]
col = vanRad_CPK[elem][1:-1]
cmds.setAttr((ramp + '.colorEntryList[' +str(indexElem)+ '].position'), (indexElem/indexDivFactor))
#cmds.setAttr((ramp + '.colorEntryList[' +str(indexElem)+ '].color'), col[0], col[1], col[2], type = 'double3')
shade = cmds.shadingNode('surfaceShader', asTexture = 1)
cmds.setAttr((shade + '.outColor'), col[0], col[1], col[2], type = 'double3')
cmds.connectAttr((shade+ '.outColor'), (ramp+ '.colorEntryList[' +str(indexElem)+ '].color'))
cmds.connectAttr((shade+ '.outColor'), (valChangePMA+ '.input3D[' +str(indexElem)+ ']'))
cmds.rename(shade, elems[elem])
return shaderSG, shaderSGAmbOcc, ambOcc, ramp
else:
mel.eval('error "a mMaya default shader has been deleted"')
def addMaterial(self, name, color ):
if color is None :
color = (1.,0.,0.)
name = self.checkName(name)
mat=cmds.ls(name, mat=True)
if len(mat)==0: #create only if mat didnt exist already
#shading group
cmds.sets( renderable=True, noSurfaceShader=True, empty=True, name=name+"SG" )
#material
# = name[1:]
cmds.shadingNode( 'lambert', asShader=True, name=name )
cmds.setAttr( name+".color", color[0], color[1], color[2], type="double3")
cmds.connectAttr(name+".outColor", name+"SG.surfaceShader")
mat = cmds.ls(name, mat=True)
return mat
def assignMaterial(self,object,matname,texture = True,**kw):
object = self.getObject(object,doit=True)
#print "assign " , matname
#print matname
if type(matname) != list :
# name = name.replace(":","_")
matname = self.checkName(matname)
mat=cmds.ls(matname, mat=True)
else :
if type(matname[0]) is list :
mat = matname[0]
matname = str(matname[0][0])
else :
mat = matname
matname = str(matname[0])
#print "find " ,mat
matname = self.checkName(matname)
# if not mat:
# self.createMaterial (matname, (1.,1.,1.), 'lambert')
# conn = cmds.listConnections(cmds.listHistory(object))
## if len(conn) >= 2:
# shade = cmds.listHistory(object)[0].split('|')[1]
# cmds.hyperShade( matname,o=shade,assign=True )
#print 'assign ',object,matname
# print mat,matname
try :
cmds.sets(object, edit=True, forceElement=matname+"SG")
except :
print "problem assigning mat" + matname + " to object "+object
def assignNewMaterial(self, matname, color, type, object):
print matname, color, type, object
self.createMaterial (matname, color, type)
self.assignMaterial (object,matname)
def colorMaterial(self,matname, color):
matname=self.getMaterial(matname)
if len(matname)==1:
matname=matname[0]
cmds.setAttr( str(matname)+".color", color[0], color[1], color[2], type="double3")
def getMaterial(self,matname):
if type(matname) != str :
return matname
matname = self.checkName(matname)
mat=cmds.ls(matname, mat=True)
if len(mat)==0:
return None
else :
return mat
def getMaterialName(self,mat):
return str(mat)
def getAllMaterials(self):
#return unicode list of material
#mat=getMaterials()
matlist=cmds.ls(mat=True)#[]
return matlist
def getMaterialObject(self,obj):
obj = self.getObject(obj)
matnames = cmds.listConnections(cmds.listHistory(obj,f=1),type='lambert')
return matnames
def changeObjColorMat(self,obj,color):
#obj should be the object name, in case of mesh
#in case of spher/cylinder etc...atom name give the mat name
#thus matname should be 'mat_'+obj
obj = self.checkName(obj)
matname = "mat_"+str(obj)
self.colorMaterial(matname,color)
def changeColor(self,mesh,colors,perVertex=True,perObjectmat=None,pb=False,
facesSelection=None,faceMaterial=False):
#if hasattr(geom,'obj'):obj=geom.obj
#else : obj=geom
#mesh = self.getMesh(mesh)
if colors[0] is not list and len(colors) == 3 :
colors = [colors,]
print "change color",type(mesh),mesh
res = self.color_mesh_perVertex(mesh,colors,perVertex=perVertex,
facesSelection=facesSelection,
faceMaterial=faceMaterial)
if not res or len(colors) == 1:
#simply apply the color/material to mesh
#get object material, if none create one
# print "material assign"
mats = self.getMaterialObject(mesh)
# print mats
if not mats :
self.assignNewMaterial("mat"+self.getName(mesh), colors[0],
'lambert', mesh)
else :
self.colorMaterial(mats[0],colors[0])
def getMaterialProperty(self,material, **kw):
"""
Change a material properties.
* overwrited by children class for each host
@type material: string/Material
@param material: the material to modify
- color
- specular
- ...
"""
mat =self.getMaterial(material)
if len(mat)==1:
mat=mat[0]
res = {}
if mat is None :
return
if "specular" in kw :
res["specular"] = True#mat[c4d.MATERIAL_USE_SPECULAR]
if "specular_color" in kw :
res["specular_color"] = [0,0,0]#self.ToVec(mat[c4d.MATERIAL_SPECULAR_COLOR],pos=False)
if "specular_width" in kw :
res["specular_width"] = 0#mat[c4d.MATERIAL_SPECULAR_WIDTH]
if "color" in kw :
res["color"] = cmds.getAttr( str(mat)+".color")[0]
if "diffuse" in kw :
res["diffuse"] = cmds.getAttr( str(mat)+".diffuse")[0]
return res
###################Meshs and Objects#####################################################################################
def Sphere(self,name,res=16.,radius=1.0,pos=None,color=None,
mat=None,parent=None,type="nurb"):
# iMe[atn],node=cmds.sphere(name=name+"Atom_"+atn,r=rad)
name = self.checkName(name)
t=res/100.
if type == "nurb" :
transform_node,shape = cmds.sphere(name=name,r=radius,sections=int(res),
spans=int(res)) #NurbSphere
elif type == "poly":
transform_node,shape = cmds.polySphere( n=name, r=radius,sx=int(res), sy=int(res))
#shape is name+"Shape"
if pos is not None :
cmds.move(float(pos[0]),float(pos[1]),float(pos[2]),
transform_node,absolute=True )
if mat is not None :
mat = self.getMaterial(mat)
if mat is not None :
self.assignMaterial(transform_node,mat)
else :
if color is not None :
mat = self.addMaterial("mat"+name,color)
else :
mat = self.addMaterial("mat"+name,[1.,1.,0.])
# mat = self.getMaterial(name)
self.assignMaterial(transform_node,mat)
if parent is not None :
self.reParent(transform_node,parent)
return transform_node,shape
def updateSphereMesh(self,mesh,verts=None,faces=None,basemesh=None,
scale=None,typ=True,**kw):
#scale or directly the radius..Try the radius
#scale is actualy the radius
# name = self.getObject(mesh)
#would it be faster with openMaya
mesh = self.checkName(mesh)
if typ:
cmds.sphere(mesh,e=1,r=scale)
else :
cmds.polySphere(mesh,e=1,r=scale)
def updateSphereObj(self,obj,coords=None):
if obj is None or coords is None: return
obj = self.getObject(obj)
#would it be faster we transform action
self.setTranslation(obj,coords)
# cmds.move(float(coords[0]),float(coords[1]),float(coords[2]), obj, absolute=True )
# def updateSphereObjs(self,g,coords=None):
# if not hasattr(g,'obj') : return
# if coords == None :
# newcoords=g.getVertices()
# else :
# newcoords=coords
# #print "upadteObjSpheres"
# #again map function ?
# for i,nameo in enumerate(g.obj):
# c=newcoords[i]
# o=getObject(nameo)
# cmds.move(float(c[0]),float(c[1]),float(c[2]), o, absolute=True )
def instancesCylinder(self,name,points,faces,radii,
mesh,colors,scene,parent=None):
cyls=[]
mat = None
if len(colors) == 1:
mat = self.retrieveColorMat(colors[0])
if mat == None and colors[0] is not None:
mat = self.addMaterial('mat_'+name,colors[0])
for i in range(len(faces)):
cyl = self.oneCylinder(name+str(i),points[faces[i][0]],
points[faces[i][1]],radius=radii[i],
instance=mesh,material=mat,parent = parent)
cyls.append(cyl)
return cyls
def updateInstancesCylinder(self,name,cyls,points,faces,radii,
mesh,colors,scene,parent=None,delete = True):
mat = None
if len(colors) == 1:
mat = self.retrieveColorMat(colors[0])
if mat == None and colors[0] is not None:
mat = self.addMaterial('mat_'+name,colors[0])
for i in range(len(faces)):
col=None
if i < len(colors):
col = colors[i]
if i < len(cyls):
self.updateOneCylinder(cyls[i],points[faces[i][0]],
points[faces[i][1]],radius=radii[i],
material=mat,color=col)
self.toggleDisplay(cyls[i],True)
else :
cyl = self.oneCylinder(name+str(i),points[faces[i][0]],
points[faces[i][1]],radius=radii[i],
instance=mesh,material=mat,parent = parent)
cyls.append(cyl)
if len(faces) < len(cyls) :
#delete the other ones ?
for i in range(len(faces),len(cyls)):
if delete :
obj = cyls.pop(i)
self.deleteObject(obj)
else :
self.toggleDisplay(cyls[i],False)
return cyls
def instancesSphere(self,name,centers,radii,meshsphere,colors,scene,parent=None):
name = self.checkName(name)
sphs=[]
mat = None
if len(colors) == 1:
print (colors)
mat = self.retrieveColorMat(colors[0])
if mat == None:
mat = self.addMaterial('mat_'+name,colors[0])
for i in range(len(centers)):
sphs.append(cmds.instance(meshsphere,name=name+str(i)))
#local transformation ?
cmds.move(float(centers[i][0]),float(centers[i][1]),float(centers[i][2]),name+str(i))
cmds.scale(float(radii[i]),float(radii[i]),float(radii[i]), name+str(i),absolute=True )
if mat == None : mat = self.addMaterial("matsp"+str(i),colors[i])
self.assignMaterial(name+str(i),mat)#mat[bl.retrieveColorName(sphColors[i])]
self.addObjectToScene(scene,sphs[i],parent=parent)
return sphs
def updateInstancesSphere(self,name,sphs,centers,radii,meshsphere,
colors,scene,parent=None,delete=True):
mat = None
if len(colors) == 1:
mat = self.retrieveColorMat(colors[0])
if mat == None and colors[0] is not None:
mat = self.addMaterial('mat_'+name,colors[0])
for i in range(len(centers)):
if len(radii) == 1 :
rad = radii[0]
elif i >= len(radii) :
rad = radii[0]
else :
rad = radii[i]
if i < len(sphs):
cmds.move(float(centers[i][0]),float(centers[i][1]),float(centers[i][2]),sphs[i])#name+str(i))
cmds.scale(float(rad),float(rad),float(rad), sphs[i],absolute=True )
# sphs[i].SetAbsPos(self.FromVec(centers[i]))
# sphs[i][905]=c4d.Vector(float(rad),float(rad),float(rad))
if mat == None :
if colors is not None and i < len(colors) and colors[i] is not None :
mat = self.addMaterial("matsp"+str(i),colors[i])
if colors is not None and i < len(colors) and colors[i] is not None :
self.colorMaterial(mat,colors[i])
self.toggleDisplay(sphs[i],True)
else :
sphs.append(cmds.instance(meshsphere,name=name+str(i)))
#local transformation ?
cmds.move(float(centers[i][0]),float(centers[i][1]),float(centers[i][2]),name+str(i))
cmds.scale(float(rad),float(rad),float(rad), name+str(i),absolute=True )
if mat == None : mat = self.addMaterial("matsp"+str(i),colors[i])
self.assignMaterial(name+str(i),mat)#mat[bl.retrieveColorName(sphColors[i])]
self.addObjectToScene(scene,sphs[i],parent=parent)
if mat == None :
if colors is not None and i < len(colors) and colors[i] is not None :
mat = self.addMaterial("matsp"+str(i),colors[i])
self.addObjectToScene(scene,sphs[i],parent=parent)
if len(centers) < len(sphs) :
#delete the other ones ?
for i in range(len(centers),len(sphs)):
if delete :
obj = sphs.pop(i)
print "delete",obj
self.deleteObject(obj)
else :
self.toggleDisplay(sphs[i],False)
return sphs
def constraintLookAt(self,object):
"""
Cosntraint an hostobject to llok at the camera
@type object: Hostobject
@param object: object to constraint
"""
self.getObject(object)
cmds.orientConstraint( 'persp', object )
def updateText(self,text,string="",parent=None,size=None,pos=None,font=None):
text = self.checkName(text)
if string : cmds.textCurves(text, e=1, t=string )
# if size is not None : text[c4d.PRIM_TEXT_HEIGHT]= size
# if pos is not None : self.setTranslation(text,pos)
# if parent is not None : self.reParent(text,parent)
def extrudeText(self,text,**kw):
tr,parent = self.getTransformNode(text)
nChild = parent.childCount()
print nChild
#dag = om.MFnDagNode(node)
dnode = om.MFnDependencyNode(parent.transform())
child_path = om.MDagPath()
cmd ="constructionHistory=True,normalsOutwards=True,range=False,polygon=1,\
tolerance=0.01,numberOfSides=4 ,js=True,width=0 ,depth=0 ,extrudeDepth=0.5,\
capSides=4 ,bevelInside=0 ,outerStyle=0 ,innerStyle=0 ,\
polyOutMethod=0,polyOutCount=200,polyOutExtrusionType=2 ,\
polyOutExtrusionSamples=3,polyOutCurveType=2 ,\
polyOutCurveSamples=3,polyOutUseChordHeightRatio=0)"
for i in range(nChild):
#get all curve
node_child = parent.child(i)
child_tr,child_path = self.getTransformNode(node_child)
dnode = om.MFnDependencyNode(node_child)
nChildChild = child_path.childCount()
for j in range(nChildChild):
cmdchilds="cmds.bevelPlus("
node_child_child = child_path.child(j)
dnode = om.MFnDependencyNode(node_child_child)
cmdchilds+='"'+dnode.name()+'",'
cmdchilds+="n='bevel_"+dnode.name()+str(j)+"',"+cmd
cmdbis = 'cmds.bevel("'+dnode.name()+'",n="bevel_'+dnode.name()+str(j)+'", ed=0.5)'
eval(cmdbis)
cmds.bevel(e=1,w=0,d=0)
def Text(self,name="",string="",parent=None,size=5.,pos=None,font='Courier',
lookAt=False,**kw):
return_extruder = False
name = self.checkName(name)
if "extrude" in kw :
extruder = None
if type(kw["extrude"]) is bool and kw["extrude"]:
pass
text = cmds.textCurves( n= name, f=font, t=string )
## Result: [u'testShape', u'makeTextCurves2'] #
if pos is not None :
#should add -14
pos[0] = pos[0]-14.0#not center
self.setTranslation(name+'Shape',pos)
# if parent is not None:
self.addObjectToScene(self.getCurrentScene(),name+'Shape',parent=parent)
if lookAt:
self.constraintLookAt(name)
self.scaleObj(text[0],[size,size,size])
if "extrude" in kw :
extruder = None
#create an extruder
if type(kw["extrude"]) is bool and kw["extrude"]:
self.extrudeText(text)
# extruder = cmds.bevelPlus( text[1], ed=0.5)
# extruder = cmds.bevel( text, ed=0.5,w=0.0,d=0.0)
#reparent the extruder ?
# self.reParent(extruder,parent)
#po=1, cap=4,
# extruded=cmds.extrude( extrude_obj,self.checkName(name)+"_spline",
# et = 2, ucp = 1,n=name, fpt=1,upn=1)
return_extruder = True
else :
self.extrudeText(text)
# extruder = cmds.bevel( text, ed=0.5,w=0.0,d=0.0)
self.reParent(extruder,parent)
# if extruder is not None :
# pass
self.addObjectToScene(self.getCurrentScene(),name+'Shape',parent=parent)
if return_extruder :
return text,None
return text
def getBoxSize(self,name):
#kPolyCube
# cmds.select(name)
# print(name)
sx = cmds.polyCube(name, q=True,w=True)
sy = cmds.polyCube(name, q=True,h=True)
sz = cmds.polyCube(name, q=True,d=True)
return [sx,sy,sz]
def box(self,name,center=[0.,0.,0.],size=[1.,1.,1.],cornerPoints=None,visible=1,
mat=None,**kw):
if cornerPoints != None :
for i in range(3):
size[i] = cornerPoints[1][i]-cornerPoints[0][i]
for i in range(3):
center[i]=(cornerPoints[0][i]+cornerPoints[1][i])/2.
res = 15.
name = self.checkName(name)
box,shape = cmds.polyCube(name=name,w=float(size[0]),h=float(size[1]),
d=float(size[2]), sx=res, sy=res, sz=res )
mat = self.addMaterial("mat"+name,[1.,1.,0.])
self.assignMaterial(box,mat)
cmds.move(float(center[0]),float(center[1]),float(center[2]),box)
parent = None
if "parent" in kw :
parent = kw["parent"]
self.addObjectToScene(self.getCurrentScene(),box,parent=parent)
return box,shape
def updateBox(self,box,center=[0.,0.,0.],size=[1.,1.,1.],cornerPoints=None,
visible=1, mat = None):
box=self.getObject(box)
if cornerPoints != None :
for i in range(3):
size[i] = cornerPoints[1][i]-cornerPoints[0][i]
for i in range(3):
center[i]=(cornerPoints[0][i]+cornerPoints[1][i])/2.
cmds.move(float(center[0]),float(center[1]),float(center[2]),box)
cmds.polyCube(box,e=1,w=float(size[0]),h=float(size[1]),
d=float(size[2]))
def Cone(self,name,radius=1.0,length=1.,res=16,pos = None,parent=None):
name = self.checkName(name)
diameter = 2*radius
cone,mesh=cmds.cone(name=name,axis=[0.0,1.0,0.0],hr=length,
r=radius,s=res,nsp=res)
if pos != None : cmds.move(float(pos[0]),float(pos[1]),float(pos[2]),cone)
if parent is not None:
self.reParent(cone,parent)
# self.addObjectToScene(self.getCurrentScene(),instance)
return str(cone),mesh
def Cylinder(self,name,radius=1.,length=1.,res=16,pos = None,parent=None,**kw):
#import numpy
name = self.checkName(name)
diameter = 2*radius
axis = [0.0,0.0,1.0]
if "axis" in kw : #orientation
dic = {"+X":[1.,0.,0.],"-X":[-1.,0.,0.],"+Y":[0.,1.,0.],"-Y":[0.,-1.,0.],
"+Z":[0.,0.,1.],"-Z":[0.,0.,-1.]}
if type(kw["axis"]) is str :
axis = dic[kw["axis"]]
else :
axis = kw["axis"]
cyl,mesh=cmds.polyCylinder(name=name,axis=axis,
r=radius, sx=res, sy=res, sz=5, h=length)
if pos != None : cmds.move(float(pos[0]),float(pos[1]),float(pos[2]),cyl)
if parent is not None:
self.reParent(cyl,parent)
# self.addObjectToScene(self.getCurrentScene(),instance)
return str(cyl),mesh#,mesh
def oneCylinder(self,name,head,tail,radius=None,instance=None,material=None,
parent = None,color=None):
name = self.checkName(name)
laenge,wsz,wz,coord=self.getTubeProperties(head,tail)
# print "oneCylinder instance",instance
if instance == None :
obj = self.Cylinder(name)
else :
obj = self.newMInstance(name,instance,parent=parent)
# obj = name
# self.translateObj(name,coord)
# self.setTranslation(name,coord)
# #obj.setLocation(float(coord[0]),float(coord[1]),float(coord[2]))
# cmds.setAttr(name+'.ry',float(degrees(wz)))
# cmds.setAttr(name+'.rz',float(degrees(wsz)))
# cmds.scale( 1, 1, laenge, name,absolute=True )
if radius is None :
radius= 1.0
self.setTransformation(obj,trans=coord,scale=[radius, radius, laenge],
rot=[0.,wz,wsz])
if material is not None :
self.assignMaterial(obj,material)
elif color is not None :
mats = self.getMaterialObject(obj)
if not mats :
mat = self.addMaterial("mat_"+name,color)
self.assignMaterial(obj,mat)
else :
self.colorMaterial(mats[0],color)
return obj
def updateOneCylinder(self,name,head,tail,radius=None,material=None,color=None):
name = self.checkName(name)
laenge,wsz,wz,coord=self.getTubeProperties(head,tail)
obj = self.getObject(name)
if radius is None :
radius= 1.0
self.setTransformation(obj,trans=coord,scale=[radius, radius, laenge],
rot=[0.,wz,wsz])
if material is not None :
self.assignMaterial(obj,material)
elif color is not None :
mats = self.getMaterialObject(obj)
if not mats :
mat = self.addMaterial("mat_"+name,color)
self.assignMaterial(obj,mat)
else :
self.colorMaterial(mats[0],color)
return obj
def updateTubeObj(self,o,coord1,coord2):
laenge,wsz,wz,pos=self.getTubeProperties(coord1,coord2)
self.setTransformation(o,trans=pos,scale=[1., 1., laenge],
rot=[0.,wz,wsz])
# cmds.scale( 1., 1., laenge, o,absolute=True )
# self.setTranslation(o,pos)
## cmds.move(float(pos[0]),float(pos[1]),float(pos[2]), o, absolute=True )
# cmds.setAttr(o+'.ry',float(degrees(wz)))
# cmds.setAttr(o+'.rz',float(degrees(wsz)))
def updateTubeMeshold(self,atm1,atm2,bicyl=False,cradius=1.0,quality=0):
self.updateTubeObj(atm1,atm2,bicyl=bicyl,cradius=cradius)
def updateTubeMesh(self,mesh,basemesh=None,cradius=1.0,quality=0):
# print mesh
# print cradius, mesh
mesh = self.getObject(str(mesh))
# print mesh
maya.cmds.polyCylinder(mesh,e=True,r=cradius)
# def updateTubeObjs(self,g):
# if not hasattr(g,'obj') : return
# newpoints=g.getVertices()
# newfaces=g.getFaces()
# #print "upadteObjTubes"
# for i,o in enumerate(g.obj):
# laenge,wsz,wz,pos=self.getTubeProperties(points[f[0]],points[f[1]])
# cmds.scale( 1, 1, laenge, o,absolute=True )
# cmds.move(float(pos[0]),float(pos[1]),float(pos[2]), o, absolute=True )
# cmds.setAttr(o+'.ry',float(degrees(wz)))
# cmds.setAttr(o+'.rz',float(degrees(wsz)))
def plane(self,name,center=[0.,0.,0.],size=[1.,1.],cornerPoints=None,visible=1,**kw):
#polyPlane([axis=[linear, linear, linear]], [
# constructionHistory=boolean], [createUVs=int], [height=linear],
# [name=string], [object=boolean], [subdivisionsX=int],
# [subdivisionsY=int], [texture=int], [width=linear])
plane,shape = cmds.polyPlane(name=name,w=float(size[0]),h=float(size[1]),
ax=[0.,0.,1.])
if cornerPoints != None :
for i in range(3):
size[i] = cornerPoints[1][i]-cornerPoints[0][i]
for i in range(3):
center[i]=(cornerPoints[0][i]+cornerPoints[1][i])/2.
cmds.move(float(center[0]),float(center[1]),float(center[2]),plane)
if "subdivision" in kw :
cmds.polyPlane(plane,e=1,
sx=kw["subdivision"][0],sy=kw["subdivision"][1])
if "axis" in kw : #orientation
dic = { "+X":[1.,0.,0.],"-X":[-1.,0.,0.],
"+Y":[0.,1.,0.],"-Y":[0.,-1.,0.],
"+Z":[0.,0.,1.],"-Z":[0.,0.,-1.]}
idic = { 0:[1.,0.,0.],1:[-1.,0.,0.],
2:[0.,1.,0.],3:[0.,-1.,0.],
4:[0.,0.,1.],5:[0.,0.,-1.]}
if type(kw["axis"]) is str :
axis = dic[kw["axis"]]
else : #int
axis = idic[kw["axis"]]
cmds.polyPlane(plane,e=1,ax=axis)
# if "material" in kw :
# texture = plane.MakeTag(c4d.Ttexture)
# if type(kw["material"]) is c4d.BaseMaterial :
# texture[1010] = kw["material"]
# else :
# texture[1010] = self.addMaterial("plane",[1.,1.,0.])
parent = None
if "parent" in kw :
parent = kw["parent"]
self.addObjectToScene(self.getCurrentScene(),plane,parent=parent)
return plane,shape
def PointCloudObject(self,name,**kw):
#print "cloud", len(coords)
name = self.checkName(name)
coords=kw['vertices']
# nface = 0
# if kw.has_key("faces"):
# nface = len(kw['faces'])
# obj = self.createsNmesh(name+'ds',coords,None,[])
# return obj[0]
partShape,part = self.particule(name+"ds", coords)
return part,partShape
def getJointPosition(self,jointname):
return self.getTranslation(jointname)
#return self.getTranslationOM(jointname)
# fnJt=oma.MFnIkJoint()
# mobj = self.getNode(jointname)
# if not fnJt.hasObj(mobj ) :
# print "no joint provided!"
# return None
# fnJt.setObject(mobj)
# cvs = om.MPointArray()
# ncurve.getCVs(cvs,om.MSpace.kPostTransform)
# return cvs
def updateArmature(self,basename,coords,listeName=None,scn=None,root=None,**kw):
for j in range(len(coords)):
atC=coords[j]
name = basename+'bone'+str(j)
if listeName is not None:
name = listeName[j]
relativePos=[atC[0],atC[1],atC[2]]
cmds.joint(self.checkName(name),e=1, p=relativePos)
def armature(self,basename,coords,listeName=None,scn=None,root=None,**kw):
#bones are called joint in maya
#they can be position relatively or globally
basename = self.checkName(basename)
bones=[]
# center = self.getCenter(coords)
parent = self.newEmpty(basename)
self.addObjectToScene(scn,parent,parent=root)
for j in range(len(coords)):
atC=coords[j]
#bones.append(c4d.BaseObject(BONE))
relativePos=[atC[0],atC[1],atC[2]]
name = basename+'bone'+str(j)
if listeName is not None:
name = listeName[j]
joint=cmds.joint(n=self.checkName(name), p=relativePos) #named "joint1"
bones.append(joint)
if scn != None :
if j==0 : self.addObjectToScene(scn,bones[j],parent=parent)
else : self.addObjectToScene(scn,bones[j],parent=bones[j-1])
return parent,bones
def bindGeom2Bones(self,listeObject,bones):
"""
Make a skinning. Namely bind the given bones to the given list of geometry.
This function will joins the list of geomtry in one geometry
@type listeObjects: list
@param listeObjects: list of object to joins
@type bones: list
@param bones: list of joins
"""
if len(listeObject) >1:
self.JoinsObjects(listeObject)
else :
self.ObjectsSelection(listeObject,"new")
#2- add the joins to the selection
self.ObjectsSelection(bones,"add")
#3- bind the bones / geoms
cmds.bindSkin()
#IK:cmds.ikHandle( sj='joint1', ee='joint5', p=2, w=.5 )
def getParticulesPosition(self,name):
name = self.checkName(name)
partO=self.getMShape(name) #shape..
fnP = omfx.MFnParticleSystem(partO)
pos=om.MVectorArray(fnP.count())
oriPsType = fnP.renderType()
if(oriPsType == omfx.MFnParticleSystem.kTube):
fnP.position0(pos);
else:
fnP.position(pos);
return pos
def setParticulesPosition(self,newPos,PS=None):
if PS == None :
return
obj = self.checkName(PS)
partO=self.getMShape(obj) #shape..
fnP = omfx.MFnParticleSystem(partO)
oriPsType = fnP.renderType()
pos=om.MVectorArray(fnP.count())
#pts = om.MPointArray(fnP.count())
for v in newPos:
p = om.MVector( float(v[0]),float(v[1]),float(v[2]) )
pos.append(p)
# pts.append(p)
#fnP.emit(pts)
fnP.setPerParticleAttribute("position",pos)
def getParticles(self,name,**kw):
PS = self.getObject(name)
return PS
def updateParticles(self,newPos,PS=None,**kw):
if PS == None :
return
obj = self.checkName(PS)
partO=self.getMShape(obj) #shape..
fnP = omfx.MFnParticleSystem(partO)
oriPsType = fnP.renderType()
currentN = fnP.count()
N = len(newPos)
fnP.setCount(N)
pos=om.MVectorArray(fnP.count())
#pts = om.MPointArray(fnP.count())
for v in newPos:
p = om.MVector( float(v[0]),float(v[1]),float(v[2]) )
pos.append(p)
fnP.setPerParticleAttribute("position",pos)
#this update the particle position not the particle number
def updateParticleRotation(self,obj,rotation):
obj = self.checkName(obj)
partO=self.getMShape(obj) #shape..
fnP = omfx.MFnParticleSystem(partO)
oriPsType = fnP.renderType()
rot=om.MVectorArray(fnP.count())
#euler angle?
for v in rotation:
p = om.MVector( float(v[0]),float(v[1]),float(v[2]) )
pos.append(p)
fnP.setPerParticleAttribute("rotationPP",rot)
#this update the particle position not the particle number
def updateParticle(self,obj,vertices,faces):
obj = self.checkName(obj)
partO=self.getMShape(obj) #shape..
fnP = omfx.MFnParticleSystem(partO)
oriPsType = fnP.renderType()
if(oriPsType == omfx.MFnParticleSystem.kTube):
if faces is None :
return
position0 = om.MVectorArray()
position1 = om.MVectorArray()
for i,f in enumerate(face):
coord1 = c = vertices[f[0]]
coord2 = vertices[f[1]]
p = om.MVector( float(c[0]),float(c[1]),float(c[2]) )
#print 'point:: %f, %f, %f' % (p.x, p.y, p.z)
position0.append(p)
c= coord2
p = om.MVector( float(c[0]),float(c[1]),float(c[2]) )
#print 'point:: %f, %f, %f' % (p.x, p.y, p.z)
position1.append(p)
fnP.setPerParticleAttribute("position0",position0)
fnP.setPerParticleAttribute("position1",position1)
else :
pos=om.MVectorArray(fnP.count())
#pts = om.MPointArray(fnP.count())
for v in vertices:
p = om.MVector( float(v[0]),float(v[1]),float(v[2]) )
pos.append(p)
# pts.append(p)
#fnP.emit(pts)
fnP.setPerParticleAttribute("position",pos)
#fnP.setPerParticleAttribute? position
#stat = resultPs.emit(finalPos);
def particule(self,name, coord,**kw):
name = self.checkName(name)
if coord is not None :
try :
coord = numpy.array(coord).tolist()
except :
pass
part,partShape=cmds.particle(n=name,p=list(coord))
else :
part,partShape=cmds.particle(n=name)
# instant = cmds.particleInstancer(part, a = 1, object = cyl[0],
# position = 'bondPos', aimDirection = 'velocity',
# scale = 'bondScaler',
# name = (chainName+ '_geoBondsInstances'))
return partShape,part
def updateMetaball(self,name,vertices=None):
if vertices is None :
return
self.updateParticle(name,vertices=vertices,faces=None)
def metaballs(self,name,coords,radius,scn=None,root=None,**kw):
# atoms=selection.findType(Atom)
#no metaball native in mauya, need to use particle set to blobby surface
#use of the point cloud polygon object as the emmiter
# name is on the form 'metaballs'+mol.name
# if scn == None:
# scn = self.getCurrentScene()
#molname = name.split("balls")[1]
#emiter = molname+"_cloud"
name = self.checkName(name)
partShape,part = self.particule(name, coords)
#need to change the rep
node = self.getNode(partShape)
plug = self.getNodePlug("particleRenderType",node)
plug.setInt(7); #Bloby surface s/w
return part,partShape
def splinecmds(self,name,coords,type="",extrude_obj=None,scene=None,parent=None):
#Type : "sBezier", "tBezier" or ""
name = self.checkName(name)
if scene is None :
scene = self.getCurrentScene()
#parent=newEmpty(name)
curve = cmds.curve(n=name,p=coords)
#return the name only, but create a transform node with name : name
#and create a curveShape named curveShape1
objName=cmds.ls("curveShape1")
cmds.rename(objName,name+"Shape")
cmds.setAttr(name+"Shape"+".dispEP",1)
if parent is not None :
cmds.parent( name, parent)
return name,None
def extrudeSpline(self,spline,**kw):
extruder = None
shape = None
spline_clone = None
if "shape" in kw:
if type(kw["shape"]) == str :
shape = self.build_2dshape("sh_"+kw["shape"]+"_"+str(spline),
kw["shape"])[0]
else :
shape = kw["shape"]
if shape is None :
shapes = self.build_2dshape("sh_circle"+str(spline))[0]
if "extruder" in kw:
extruder = kw["extruder"]
# if extruder is None :
# extruder=self.sweepnurbs("ex_"+spline.GetName())
if "clone" in kw and kw["clone"] :
spline_clone = cmds.duplicate(spline,n="exd"+str(spline))
self.resetTransformation(spline_clone)
extruder=cmds.extrude( shape[0],spline_clone,
et = 2, ucp = 1,n="ex_"+str(spline), fpt=1,upn=1)
self.toggleDisplay(spline_clone,False)
return extruder,shape,spline_clone
else :
extruder=cmds.extrude( shape[0],spline,
et = 2, ucp = 1,n="ex_"+str(spline), fpt=1,upn=1)
return extruder,shape
#setAttr "extrudedSurfaceShape1.simplifyMode" 1;
def build_2dshape(self,name,type="circle",**kw):
shapedic = {"circle":{"obj":cmds.circle,"size":["r",]},
# "rectangle":{"obj":None,"size":[0,0]}
}
shape = shapedic[type]["obj"](n=name, nr=(1, 0, 0), c=(0, 0, 0),r=0.3)
dopts = [1.,1.]
if "opts" in kw :
dopts = kw["opts"]
if len(shapedic[type]["size"]) == 1 :
pass
# shape[shapedic[type]["size"][0]] = dopts[0]
else :
for i in range(len(shapedic[type]["size"])) :
pass
# shape[shapedic[type]["size"][i]] = dopts[i]
self.addObjectToScene(None,shape)
return shape,name+"Shape"
def spline(self,name,coords,type="",extrude_obj=None,scene=None,
parent=None,**kw):
#Type :
name = self.checkName(name)
if scene is None :
scene = self.getCurrentScene()
#parent=newEmpty(name)
if extrude_obj is not None:
shape,curve = self.omCurve(name+"_spline",coords)
#return the name only, but create a transform node with name : name
#and create a curveShape named curveShape1
if parent is not None :
cmds.parent( self.checkName(name)+"_spline", parent)
# extrude profile curve along path curve using "flat" method
# The extrude type can be distance-0, flat-1, or tube-2
extruded=cmds.extrude( extrude_obj,self.checkName(name)+"_spline",
et = 2, ucp = 1,n=name, fpt=1,upn=1)
#setAttr "extrudedSurfaceShape1.simplifyMode" 1;
return name,shape,extruded
shape,curve = self.omCurve(name,coords)
#return the name only, but create a transform node with name : name
#and create a curveShape named curveShape1
if parent is not None :
cmds.parent( self.checkName(name), parent)
return name,shape
def getSplinePoints(self,name,convert=False):
name = self.checkName(name)
ncurve = om.MFnNurbsCurve()
mobj = self.getNode(self.checkName(name))
if not ncurve.hasObj(mobj ) :
mobj = self.getNode(self.checkName(name)+"Shape")
if not ncurve.hasObj(mobj) :
print "no curve shape provided!"
return None
ncurve.setObject(mobj)
cvs = om.MPointArray()
ncurve.getCVs(cvs,om.MSpace.kPostTransform)
return cvs
def update_spline(self,name,coords):
#need to provide the object shape name
name = self.checkName(name)
ncurve = om.MFnNurbsCurve()
mobj = self.getNode(self.checkName(name))
if not ncurve.hasObj(mobj ) :
mobj = self.getNode(self.checkName(name)+"Shape")
if not ncurve.hasObj(mobj) :
print "no curve shape provided!"
return None
ncurve.setObject(mobj)
deg = 3; #Curve Degree
ncvs = len(coords); #Number of CVs
spans = ncvs - deg # Number of spans
nknots = spans+2*deg-1 # Number of knots
controlVertices = om.MPointArray()
knotSequences = om.MDoubleArray()
# point array of plane vertex local positions
for c in coords:
p = om.MPoint(om.MFloatPoint( float(c[0]),float(c[1]),float(c[2]) ))
#print 'point:: %f, %f, %f' % (p.x, p.y, p.z)
controlVertices.append(p)
# for i in range(nknots):
# knotSequences.append(i)
# create(controlVertices,knotSequences, deg,
# om.MFnNurbsCurve.kOpen, False, False
ncurve.setCVs(controlVertices,om.MSpace.kPostTransform)
# ncurve.setKnots(knotSequences)
ncurve.updateCurve()
def omCurve(self,name,coords,**kw):
#default value
name = self.checkName(name)
deg = 3; #Curve Degree
ncvs = len(coords); #Number of CVs
if kw.has_key("deg"):
deg = kw['deg']
spans = ncvs - deg # Number of spans
nknots = spans+2*deg-1 # Number of knots
controlVertices = om.MPointArray()
knotSequences = om.MDoubleArray()
# point array of plane vertex local positions
for c in coords:
p = om.MPoint(om.MFloatPoint( float(c[0]),float(c[1]),float(c[2]) ))
#print 'point:: %f, %f, %f' % (p.x, p.y, p.z)
controlVertices.append(p)
for i in range(nknots):
knotSequences.append(i)
curveFn=om.MFnNurbsCurve()
curve = curveFn.create(controlVertices,knotSequences, deg,
om.MFnNurbsCurve.kOpen, False, False)
# curveFn.setName(name)
print (curveFn.partialPathName())
print (curveFn.name())
shapename = curveFn.name()
objName = shapename.split("Shape")[0]
n = shapename.split("Shape")[1]
# objName=cmds.ls("curve1")[0]
cmds.rename(objName+n,name)
nodeName = curveFn.name() #curveShape
cmds.rename(nodeName, name+"Shape")
return curveFn, curve
def createLines(self,name,coords,normal,faces):
partShape,part = self.linesAsParticles(name,coords,faces)
return part
def linesAsParticles(self,name,coords,face):
#what about omfx to create the system...
name = self.checkName(name)
partShape,part = self.particule(name, None)
path = self.getMShape(part)
node = path.node()
depNodeFn = om.MFnDependencyNode( node )
plug = self.getNodePlug("particleRenderType", node )
plug.setInt(9); #Tube s/w
fnP = omfx.MFnParticleSystem(path)
pts = om.MPointArray()
position0 = om.MVectorArray()
position1 = om.MVectorArray()
for i,f in enumerate(face):
coord1 = c = coords[f[0]]
coord2 = coords[f[1]]
p = om.MVector( float(c[0]),float(c[1]),float(c[2]) )
#print 'point:: %f, %f, %f' % (p.x, p.y, p.z)
position0.append(p)
c= coord2
p = om.MVector( float(c[0]),float(c[1]),float(c[2]) )
#print 'point:: %f, %f, %f' % (p.x, p.y, p.z)
position1.append(p)
laenge,wsz,wz,c=self.getTubeProperties(coord1,coord2)
p = om.MPoint(om.MFloatPoint( float(c[0]),float(c[1]),float(c[2]) ))
pts.append(p)
# fnP.emit(pts)
fnP.setPerParticleAttribute("position0",position0)
fnP.setPerParticleAttribute("position1",position1)
fnP.emit(pts)
return partShape,part
def mayaVec(self,v):
return om.MFloatPoint( float(v[0]),float(v[1]),float(v[2]) )
def getFaces(self,obj,**kw):
# import numpy
node = self.getNode('mesh_'+obj)
meshnode = om.MFnMesh(node)
triangleCounts =om.MIntArray()
triangleVertices= om.MIntArray()
meshnode.getTriangles(triangleCounts,triangleVertices)
if self._usenumpy :
return numpy.array(triangleVertices).reshape((len(triangleVertices)/3,3))
else :
return triangleVertices
def polygons(self,name,proxyCol=False,smooth=False,color=[[1,0,0],], material=None, **kw):
normals = kw["normals"]
name,meshFS = self.createsNmesh(name,kw['vertices'],normals,kw['faces'],color=color,
smooth=smooth,material=material)
return name
def createsNmesh(self,name,vertices,normal,faces,color=[[1,0,0],],smooth=False,
material=None,proxyCol=False,**kw):
"""
This is the main function that create a polygonal mesh.
@type name: string
@param name: name of the pointCloud
@type vertices: array
@param vertices: list of x,y,z vertices points
@type vnormals: array
@param vnormals: list of x,y,z vertex normals vector
@type faces: array
@param faces: list of i,j,k indice of vertex by face
@type smooth: boolean
@param smooth: smooth the mesh
@type material: hostApp obj
@param material: material to apply to the mesh
@type proxyCol: booelan
@param proxyCol: do we need a special object for color by vertex (ie C4D)
@type color: array
@param color: r,g,b value to color the mesh
@rtype: hostApp obj
@return: the polygon object
"""
if len(color) == 3 :
if type(color[0]) is not list :
color = [color,]
outputMesh = om.MObject()
#print outputMesh.name()
#cmds.rename(outputMesh.name(), name)
#test=cmds.createNode( 'transform', n='transform1' )
name=name.replace(":","_")
name=name.replace("-","_")
name=name.replace("'","")
name=name.replace('"',"")
name=self.checkName(name)
#print "NMesh ",name
numFaces = 0
if faces is not None :
numFaces = len(faces)
numVertices = len(vertices)
# point array of plane vertex local positions
points = om.MFloatPointArray()
for v in vertices:
points.append(self.mayaVec(v))
#mayaVertices=map(mayaVec,vertices)
#map(points.append,mayaVertices)
# vertex connections per poly face in one array of indexs into point array given above
faceConnects = om.MIntArray()
for f in faces:
for i in f :
faceConnects.append(int(i))
# an array to hold the total number of vertices that each face has
faceCounts = om.MIntArray()
for c in range(0,numFaces,1):
faceCounts.append(int(len(f)))
#create mesh object using arrays above and get name of new mesh
meshFS = om.MFnMesh()
newMesh = meshFS.create(numVertices, numFaces, points, faceCounts,
faceConnects, outputMesh)
# meshFS.updateSurface()
nodeName = meshFS.name()
cmds.rename(nodeName, "mesh_"+name)
#print 'Mesh node name is: %s' % nodeName
objName=cmds.ls("polySurface1")[0]
cmds.rename(objName,name)
#newName should bydefault polySurface something
# assign new mesh to default shading group
if color is not None and len(color) > 1:
self.color_mesh_perVertex(meshFS,color)
doMaterial = True
if type(material) is bool :
doMaterial = material
if doMaterial:
if material == None :
if len(name.split("_")) == 1 : splitname = name
else :
splitname = name.split("_")[1]
#print name,name[:4],splitname,splitname[:4]
self.assignNewMaterial( "mat_"+name, color[0],'lambert' ,"mesh_"+name)
else :
self.assignMaterial("mesh_"+name,material)
if "parent" in kw :
parent = kw["parent"]
# print "reparent ", name,parent
self.reParent(name,parent)
return name,meshFS#,outputMesh
def updatePoly(self,obj,vertices=None,faces=None):
if type(obj) is str:
obj = self.getObject(obj)
if obj is None : return
node = self.getMShape(self.checkName(obj))
if node.hasFn(om.MFn.kMesh):
self.updateMesh(obj,vertices=vertices,faces=faces)
elif node.hasFn(om.MFn.kParticle):
self.updateParticle(obj,vertices=vertices,faces=faces)
def updateMesh(self,meshnode,vertices=None,faces=None, smooth=False,**kw):#chains.residues.atoms.coords,indices
# print meshnode,type(meshnode)
if type(meshnode) is str or type(meshnode) is unicode:
node = self.getMShape(self.checkName(meshnode))#self.getNode(self.checkName(meshnode))
meshnode = om.MFnMesh(node)
# meshnode = self.getObject(meshnode,doit=True)
if meshnode is None:
return
nv = meshnode.numVertices()
nf = meshnode.numPolygons()
if vertices is not None :
numVertices = len(vertices)
# point array of plane vertex local positions
points = om.MFloatPointArray()
for v in vertices:
points.append(self.mayaVec(v))
else :
return
#numVertices = nv
if faces is not None :
numFaces = len(faces)
else :
numFaces = nf
faces = []
faceConnects = om.MIntArray()
for f in faces:
for i in f :
faceConnects.append(int(i))
# an array to hold the total number of vertices that each face has
faceCounts = om.MIntArray()
for c in range(0,numFaces,1):
faceCounts.append(int(len(f)))
#newMesh = meshFS.create(numVertices, numFaces, points, faceCounts, faceConnects, outputMesh)
result = meshnode.createInPlace(numVertices, numFaces, points, faceCounts, faceConnects)
meshnode.updateSurface()
def ToVec(self,v,**kw):
if hasattr(v,"x") :
return [v.x,v.y,v.z]
else :
return v
def arr2marr(self,v):
#from http://www.rtrowbridge.com/blog/2009/02/maya-api-docs-demystified-for-python-users/
self.msutil.createFromList( v, len(v) )
doubleArrayPtr = self.msutil.asDoublePtr()
return doubleArrayPtr
# def vecp2m(self,v):
# #from http://www.rtrowbridge.com/blog/2009/02/maya-api-docs-demystified-for-python-users/
# doubleArrayPtr = self.arr2marr(v)
# vec = om.MVector( doubleArrayPtr )
# return vec
def FromVec(self,v,pos=True):
if isinstance(v,om.MVector):
return v
else :
return om.MVector(v[0], v[1], v[2])
def vec2m(self,v):
if isinstance(v,om.MVector):
return v
else :
return om.MVector(float(v[0]), float(v[1]), float(v[2]))
def ToMat(self,mat,**kw):
#maya - > python
return self.m2matrix(mat)
def FromMat(self,mat,**kw):
#pythn->maya
return self.matrixp2m(mat)
def matrixp2m(self,mat):
#from http://www.rtrowbridge.com/blog/2009/02/python-api-mtransformationmatrixgetrotation-bug/
if isinstance(mat,om.MTransformationMatrix) :
return mat
getMatrix = om.MMatrix()
matrixList = mat#mat.transpose().reshape(16,)
om.MScriptUtil().createMatrixFromList(matrixList, getMatrix)
mTM = om.MTransformationMatrix( getMatrix )
rotOrder = om.MTransformationMatrix().kXYZ
return mTM
def m2matrix(self,mMat):
#return mMat
#do we use numpy
if isinstance(mMat,om.MTransformationMatrix) :
matrix = mMat.asMatrix()
elif isinstance(mMat,om.MMatrix):
matrix = mMat
else :
return mMat
us=om.MScriptUtil()
out_mat = [0.0, 0.0, 0.0,0.0,
0.0, 0.0, 0.0,0.0,
0.0, 0.0, 0.0,0.0,
0.0, 0.0, 0.0,0.0]
us.createFromList( out_mat, len(out_mat) )
ptr1 = us.asFloat4Ptr()
matrix.get(ptr1)
res_mat = [[0.0, 0.0, 0.0,0.0],
[0.0, 0.0, 0.0,0.0],
[0.0, 0.0, 0.0,0.0],
[0.0, 0.0, 0.0,0.0]]
for i in range(4):
for j in range(4):
val = us.getFloat4ArrayItem(ptr1, i,j)
res_mat[i][j]=val
return res_mat
def alignNormal(self,poly):
pass
def triangulate(self,poly):
#select poly
doc = self.getCurrentScene()
mesh = self.getMShape(poly)
meshname= mesh.partialPathName()
#checkType
if self.getType(meshname) != self.MESH :
return
cmds.polyTriangulate(meshname)
def getMeshVertices(self,poly,transform=False,selected = False):
meshnode = self.checkIsMesh(poly)
if selected :
mverts_indice = []
verts =[]
v = om.MIntArray()
vertsComponent = om.MObject()
meshDagPath = om.MDagPath()
activeList = om.MSelectionList()
om.MGlobal.getActiveSelectionList(activeList)
selIter = om.MItSelectionList(activeList,om.MFn.kMeshVertComponent)
while selIter.isDone():
selIter.getDagPath(meshDagPath, vertsComponent)
if not vertsComponent.isNull():
# ITERATE THROUGH EACH "FACE" IN THE CURRENT FACE COMPONENT:
vertIter = om.MItMeshVertex(meshDagPath,vertsComponent)
while vertIter.isDone():
mverts_indice.append(vertIter.index()) #indice of the faces
pts = faceIter.position(om.MSpace.kWorld)
verts.append(self.ToVec(pts))
faces.append(v[0],v[1],v[2])
vertIter.next()
selIter.next()
return verts,mverts_indice
else :
nv = meshnode.numVertices()
points = om.MFloatPointArray()
meshnode.getPoints(points)
vertices = [self.ToVec(points[i]) for i in range(nv)]
return vertices
def getMeshNormales(self,poly,selected = False):
meshnode = self.checkIsMesh(poly)
nv = meshnode.numNormals()
normals = om.MFloatVectorArray()
meshnode.getVertexNormals(False,normals)
vnormals = [self.ToVec(normals[i]) for i in range(nv)]
if selected :
v,indice = self.getMeshVertices(poly,selected = selected)
vn=[]
for i in indice:
vn.append(vnormals[i])
return vn,indice
return vnormals
def getMeshEdges(self,poly,selected = False):
#to be tested
meshnode = self.checkIsMesh(poly)
ne= meshnode.numEdges()
edges = []
edgeConnects = om.MIntArray()
for i in range(ne):
meshnode.getEdgeVertices(i,edgeConnects)
edges.append(edgeConnects)
return edges
def getMeshFaces(self,poly,selected = False):
meshnode = self.checkIsMesh(poly)
faceConnects = om.MIntArray()
faceCounts = om.MIntArray()
meshnode.getTriangles(faceCounts,faceConnects)
if selected :
mfaces_indice = []
faces =[]
v = om.MIntArray()
faceComponent = om.MObject()
meshDagPath = om.MDagPath()
activeList = om.MSelectionList()
om.MGlobal.getActiveSelectionList(activeList)
selIter = om.MItSelectionList(activeList,om.MFn.kMeshPolygonComponent)
# print "itersel",selIter.isDone()
while 1:
selIter.getDagPath(meshDagPath, faceComponent);
# print "faces ?",faceComponent.isNull()
if not faceComponent.isNull():
# print ' ITERATE THROUGH EACH "FACE" IN THE CURRENT FACE COMPONENT:'
faceIter = om.MItMeshPolygon(meshDagPath,faceComponent)
while 1:
mfaces_indice.append(faceIter.index()) #indice of the faces
faceIter.getVertices(v)
faces.append([v[0],v[1],v[2]])
faceIter.next()
if faceIter.isDone() : break
selIter.next()
if selIter.isDone() : break
return faces,mfaces_indice
if self._usenumpy :
return numpy.array(faceConnects).reshape((len(faceConnects)/3,3))
else :
return faceConnects
def DecomposeMesh(self,poly,edit=True,copy=True,tri=True,transform=True,**kw):
# import numpy
if tri:
self.triangulate(poly)
if type(poly) is str or type(poly) is unicode or type(poly) is list:
mesh = self.getMShape(poly)#dagPath
else :
#have to a object shape node or dagpath
mesh = poly
print ("mesh ", mesh)
if self.getType(mesh.partialPathName()) != self.POLYGON :
if self.getType(mesh.partialPathName()) == self.PARTICULE:
v = self.getParticulesPosition(mesh.partialPathName())
return None,v,None
return None,None,None
#again problem with instance.....
meshnode = om.MFnMesh(mesh)
print ("meshnode",meshnode)
fnTrans = om.MFnTransform(self.getTransformNode(poly)[0])
print ("fnTrans",fnTrans)
# fnTrans = om.MFnTransform(mesh.transform())
#get infos
nv = meshnode.numVertices()
nf = meshnode.numPolygons()
# m = om.MFloatMatrix()
points = om.MFloatPointArray()
normals = om.MFloatVectorArray()
faceConnects = om.MIntArray()
faceCounts = om.MIntArray()
meshnode.getPoints(points)
#meshnode.getNormals(normals)
meshnode.getVertexNormals(False,normals)
meshnode.getTriangles(faceCounts,faceConnects)
fnormals=[]
if self._usenumpy :
faces = numpy.array(faceConnects).reshape((len(faceConnects)/3,3))
else :
faces = faceConnects
vertices = [self.ToVec(points[i]) for i in range(nv)]
vnormals = [self.ToVec(normals[i]) for i in range(nv)]
#remove the copy if its exist? or keep it ?
#need to apply the transformation
if transform :
#node = self.getNode(mesh)
#fnTrans = om.MFnTransform(mesh)
mmat = fnTrans.transformation()
if self._usenumpy :
mat = self.m2matrix(mmat)
vertices = self.ApplyMatrix(vertices,numpy.array(mat).transpose())
vnormals = self.ApplyMatrix(vnormals,numpy.array(mat).transpose())#??
else :
out_mat = [0.0, 0.0, 0.0,0.0,
0.0, 0.0, 0.0,0.0,
0.0, 0.0, 0.0,0.0,
0.0, 0.0, 0.0,0.0]
self.msutil.createFromList( out_mat, len(out_mat) )
ptr1 = self.msutil.asFloat4Ptr()
mmat.asMatrix().get(ptr1)
m = om.MFloatMatrix(ptr1)
vertices = []
for i in range(nv) :
v = points[i]*m
vertices.append(self.ToVec(v))
# vertices = [self.ToVec(p*m) for p in points]
# if edit and copy :
# self.getCurrentScene().SetActiveObject(poly)
# c4d.CallCommand(100004787) #delete the obj
print ("ok",len(faces),len(vertices),len(vnormals))
if "fn" in kw and kw["fn"] :
fnormals = []
p = om.MVector( 0.,0.,0. )
for i in range(len(faces)) :
meshnode.getPolygonNormal(i,p,om.MSpace.kWorld)#kPostTransform
fnormals.append(self.ToVec(p))
return faces,vertices,vnormals,fnormals
else :
return faces,vertices,vnormals
def connectAttr(self,shape,i=0,mat=None):
if mat is not None :
#print shape
#print mat+"SG"
cmds.isConnected( shape+'.instObjGroups['+i+']', mat+'SG.dagSetMembers')
#need to get the shape : name+"Shape"
def rotation_matrix(self,angle, direction, point=None,trans=None):
"""
Return matrix to rotate about axis defined by point and direction.
"""
if self._usenumpy:
return Helper.rotation_matrix(angle, direction, point=point,trans=trans)
else :
direction = self.FromVec(direction)
direction.normalize()
out_mat = [1.0, 0.0, 0.0,0.0,
0.0, 1.0, 0.0,0.0,
0.0, 0.0, 1.0,0.0,
0.0, 0.0, 0.0,1.0]
m = self.matrixp2m(out_mat)
# m = om.MTransformationMatrix()
m.setToRotationAxis (direction,angle)
if point is not None:
point = self.FromVec(point)
m.setTranslation(point,om.MSpace.kPostTransform)# = point - (point * m)self.vec2m(trans),om.MSpace.kPostTransform
if trans is not None :
trans = self.FromVec(trans)
m.setTranslation(trans,om.MSpace.kPostTransform)
# M = m2matrix(m)
return m
#==============================================================================
# properties objec
#==============================================================================
def getPropertyObject(self, obj, key=["radius"]):
"""
Return the property "key" of the object obj
* overwrited by children class for each host
@type obj: host Obj
@param obj: the object that contains the property
@type key: string
@param key: name of the property
@rtype : int, float, str, dict, list
@return : the property value
"""
res = []
if "pos" in key :
res.append(self.ToVec(self.getTranslation(obj)))
if "scale" in key :
res.append(self.ToVec(self.getScale(obj)))
if "rotation" in key :
mo = self.getTransformation(obj)
m = self.ToMat(mo)#.transpose()
mws = m.transpose()
rotMatj = mws[:]
rotMatj[3][:3]*=0.0
res.append(rotMatj)
if self.getType(obj) == self.SPHERE :
for k in key :
if k == "radius" :
try :
r=cmds.polySphere(obj,q=1,r=1)
except :
r=cmds.sphere(obj,q=1,r=1)
res.append(r)
if self.getType(obj) == self.CYLINDER :
for k in key :
if k == "radius" :
r=cmds.polyCylinder(obj,q=1,r=1)
res.append(r)
elif k == "length" :
h=cmds.polyCylinder(obj,q=1,h=1)
res.append(h)
elif k == "axis" :
ax = cmds.polyCylinder(obj,q=1,axis=1)
res.append(ax)
if self.getType(obj) == self.CUBE :
for k in key :
if k == "length" :
l = self.getBoxSize(obj)#cmds.polyCube(obj, q=True,h=True)
res.append(l)
return res
#===============================================================================
# Texture Mapping / UV
#===============================================================================
def getUV(self,object,faceIndex,vertexIndex,perVertice=True):
mesh = self.getMShape(object)
meshnode = om.MFnMesh(mesh)
#uv=[]
u_util = maya.OpenMaya.MScriptUtil()
u_util.createFromDouble(0.0)
u_ptr = u_util.asFloatPtr()
v_util = maya.OpenMaya.MScriptUtil()
v_util.createFromDouble(0.0)
v_ptr = v_util.asFloatPtr()
if perVertice :
meshnode.getUV(vertexIndex, u_ptr, v_ptr)
u = u_util.getFloat(u_ptr)
v = v_util.getFloat(v_ptr)
return [u,v]
else :
def getuv(faceIndex,iv,u_ptr,v_ptr):
meshnode.getPolygonUV(faceIndex,iv,u_ptr,v_ptr)
u = u_util.getFloat(u_ptr)
v = v_util.getFloat(v_ptr)
return [u,v]
#uv of the face
return [getuv(faceIndex,iv,u_ptr,v_ptr) for iv in range(3)]
#
#
##meshFn = maya.OpenMaya.MFnMesh(node)
##
#u_util = maya.OpenMaya.MScriptUtil()
#u_util.createFromDouble(0.0)
#u_ptr = u_util.asFloatPtr()
#v_util = maya.OpenMaya.MScriptUtil()
#v_util.createFromDouble(0.0)
#v_ptr = v_util.asFloatPtr()
#
#meshFn.getUV(0, u_ptr, v_ptr)
#
#u = u_util.getFloat(u_ptr)
#v = v_util.getFloat(v_ptr))
##getPolygonUVid
##getPolygonUV
#
#should be faster ?
def setUVs(self,object,uvs):
#uvs is a dictionary key are faceindex, values it the actual uv for the 3-4 vertex
ob = self.getObject(object)
node = self.getNode('mesh_'+ob)
meshnode = om.MFnMesh(node)
meshnode.clearUVs()
u = om.MFloatArray()
v = om.MFloatArray()
uvCounts = om.MIntArray()
uvIds = om.MIntArray()
i = 0
for f in uvs:
for k,uv in enumerate(uvs[f]):
uvIds.append(i)
uvCounts.append(len(uvs[f]))
u.append(uv[0])
v.append(uv[1])
#meshnode.setUV(i,uv[0],uv[1])
#meshnode.assignUV(f,k,i)
i = i +1
meshnode.setUVs(u,v)
meshnode.assignUVs(uvCounts,uvIds)
def setUV(self,object,faceIndex,vertexIndex,uv,perVertice=True,uvid=0):
ob = self.getObject(object)
node = self.getNode('mesh_'+ob)
meshnode = om.MFnMesh(node)
for k in range(3):
luv = uv[k]
meshnode.setUV(uvid,luv[0],luv[1])
meshnode.assignUV(faceIndex,k,uvid)
uvid = uvid +1
return uvid
def hyperShade_meVertCol(self):
#mel command : nodeReleaseCallback graph1HyperShadeEd mentalrayVertexColors1 none;
# nodeOutlinerInputsCmd connectWindow|tl|cwForm|connectWindowPane|leftSideCW connectWindow|tl|cwForm|connectWindowPane|rightSideCW; nodeOutliner -e -r connectWindow|tl|cwForm|connectWindowPane|rightSideCW;
# connectAttr -f mesh_MSMS_MOL1crn.colorSet[0].colorName mentalrayVertexColors1.cpvSets[0];
# // Result: Connected mesh_MSMS_MOL1crn.colorSet.colorName to mentalrayVertexColors1.cpvSets. //
# // Result: connectWindow|tl|cwForm|connectWindowPane|rightSideCW //
pass
#==============================================================================
# import / expor / read load / save
#==============================================================================
def readFile(self,filename,**kw):
fileName, fileExtension = os.path.splitext(filename)
fileExtension=fileExtension.replace(".","")
fileExtension=fileExtension.upper()
if fileExtension == "MA":
fileExtension = "mayaAscii"
elif fileExtension == "DAE":
fileExtension = "DAE_FBX"
elif fileExtension == "FBX":
pass
else :
print ("not supported by uPy, contact us!")
return
# doc = self.getCurrentScene()
cmds.file(filename ,type=fileExtension,loadReferenceDepth="all", i=True ) #merge the documets
# c4d.documents.MergeDocument(doc,filename,c4d.SCENEFILTER_OBJECTS|c4d.SCENEFILTER_MATERIALS)
def read(self,filename,**kw):
fileName, fileExtension = os.path.splitext(filename)
fileExtension=fileExtension.replace(".","")
fileExtension=fileExtension.upper()
if fileExtension == "MA":
fileExtension = "mayaAscii"
cmds.file(filename ,type=fileExtension,loadReferenceDepth="all", i=True )
elif fileExtension == "DAE" or fileExtension == "FBX":
import maya.mel as mel
#mel.eval('FBXImportMode -v exmerge;')
filename = filename.replace("\\","\\\\")
mel.eval('FBXImport -f "%s" -t 0;' % filename)#FBXGetTakeName ?
else :
print ("not supported by uPy, contact us!")
return
def write(self,listObj,**kw):
pass
#==============================================================================
# raycasting
#==============================================================================
def raycast(self,obj,start, end, length, **kw ):
#posted on cgtalk.com
#part of http://code.google.com/p/dynamica/
mo = self.getTransformation(obj)
mi = mo.asMatrixInverse()
mat = self.ToMat(mi)#.transpose()
point = self.ApplyMatrix([start],numpy.array(mat).transpose())[0]
direction = self.ApplyMatrix([end],numpy.array(mat).transpose())[0]
#om.MGlobal.clearSelectionList()
om.MGlobal.selectByName(obj)
sList = om.MSelectionList()
#Assign current selection to the selection list object
om.MGlobal.getActiveSelectionList(sList)
item = om.MDagPath()
sList.getDagPath(0, item)
item.extendToShape()
fnMesh = om.MFnMesh(item)
raySource = om.MFloatPoint(float(point[0]), float(point[1]), float(point[2]), 1.0)
rayDir = om.MFloatVector(float(direction[0]-point[0]), float(direction[1]-point[1]), float(direction[2]-point[2]))
faceIds = None
triIds = None
idsSorted = False
testBothDirections = False
worldSpace = om.MSpace.kWorld
maxParam = length#999999
accelParams = None
sortHits = True
hitPoints = om.MFloatPointArray()
#hitRayParams = om.MScriptUtil().asFloatPtr()
hitRayParams = om.MFloatArray()
hitFaces = om.MIntArray()
hitTris = None
hitBarys1 = None
hitBarys2 = None
tolerance = 0.0001
#http://download.autodesk.com/us/maya/2010help/API/class_m_fn_mesh.html#114943af4e75410b0172c58b2818398f
hit = fnMesh.allIntersections(raySource, rayDir, faceIds, triIds, idsSorted, worldSpace,
maxParam, testBothDirections, accelParams, sortHits,
hitPoints, hitRayParams, hitFaces, hitTris, hitBarys1,
hitBarys2, tolerance)
om.MGlobal.clearSelectionList()
#print hit, len(hitFaces)
if "count" in kw :
#result = int(fmod(len(hitFaces), 2))
return hit, len(hitFaces)
#clear selection as may cause problem if the function is called multiple times in succession
return result | gpl-3.0 |
phenoxim/nova | nova/tests/json_ref.py | 1 | 2271 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_serialization import jsonutils
def _resolve_ref(ref, base_path):
file_path, _, json_path = ref.partition('#')
if json_path:
raise NotImplementedError('JSON refs with JSON path after the "#" is '
'not yet supported')
path = os.path.join(base_path, file_path)
# binary mode is needed due to bug/1515231
with open(path, 'r+b') as f:
ref_value = jsonutils.load(f)
base_path = os.path.dirname(path)
res = resolve_refs(ref_value, base_path)
return res
def resolve_refs(obj_with_refs, base_path):
if isinstance(obj_with_refs, list):
for i, item in enumerate(obj_with_refs):
obj_with_refs[i] = resolve_refs(item, base_path)
elif isinstance(obj_with_refs, dict):
if '$ref' in obj_with_refs.keys():
ref = obj_with_refs.pop('$ref')
resolved_ref = _resolve_ref(ref, base_path)
# the rest of the ref dict contains overrides for the ref. Apply
# those overrides recursively here.
_update_dict_recursively(resolved_ref, obj_with_refs)
return resolved_ref
else:
for key, value in obj_with_refs.items():
obj_with_refs[key] = resolve_refs(value, base_path)
else:
# scalar, nothing to do
pass
return obj_with_refs
def _update_dict_recursively(d, update):
"""Update dict d recursively with data from dict update"""
for k, v in update.items():
if k in d and isinstance(d[k], dict) and isinstance(v, dict):
_update_dict_recursively(d[k], v)
else:
d[k] = v
| apache-2.0 |
florentchandelier/keras | examples/mnist_irnn.py | 70 | 3041 | from __future__ import absolute_import
from __future__ import print_function
import numpy as np
np.random.seed(1337) # for reproducibility
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Activation
from keras.initializations import normal, identity
from keras.layers.recurrent import SimpleRNN, LSTM
from keras.optimizers import RMSprop
from keras.utils import np_utils
'''
This is a reproduction of the IRNN experiment
with pixel-by-pixel sequential MNIST in
"A Simple Way to Initialize Recurrent Networks of Rectified Linear Units "
by Quoc V. Le, Navdeep Jaitly, Geoffrey E. Hinton
arXiv:1504.00941v2 [cs.NE] 7 Apr 201
http://arxiv.org/pdf/1504.00941v2.pdf
Optimizer is replaced with RMSprop which yields more stable and steady
improvement.
Reaches 0.93 train/test accuracy after 900 epochs (which roughly corresponds
to 1687500 steps in the original paper.)
'''
batch_size = 32
nb_classes = 10
nb_epochs = 200
hidden_units = 100
learning_rate = 1e-6
clip_norm = 1.0
BPTT_truncate = 28*28
# the data, shuffled and split between train and test sets
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.reshape(X_train.shape[0], -1, 1)
X_test = X_test.reshape(X_test.shape[0], -1, 1)
X_train = X_train.astype("float32")
X_test = X_test.astype("float32")
X_train /= 255
X_test /= 255
print('X_train shape:', X_train.shape)
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
print('Evaluate IRNN...')
model = Sequential()
model.add(SimpleRNN(input_dim=1, output_dim=hidden_units,
init=lambda shape: normal(shape, scale=0.001),
inner_init=lambda shape: identity(shape, scale=1.0),
activation='relu', truncate_gradient=BPTT_truncate))
model.add(Dense(hidden_units, nb_classes))
model.add(Activation('softmax'))
rmsprop = RMSprop(lr=learning_rate)
model.compile(loss='categorical_crossentropy', optimizer=rmsprop)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epochs,
show_accuracy=True, verbose=1, validation_data=(X_test, Y_test))
scores = model.evaluate(X_test, Y_test, show_accuracy=True, verbose=0)
print('IRNN test score:', scores[0])
print('IRNN test accuracy:', scores[1])
print('Compare to LSTM...')
model = Sequential()
model.add(LSTM(1, hidden_units))
model.add(Dense(hidden_units, nb_classes))
model.add(Activation('softmax'))
rmsprop = RMSprop(lr=learning_rate)
model.compile(loss='categorical_crossentropy', optimizer=rmsprop)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epochs,
show_accuracy=True, verbose=1, validation_data=(X_test, Y_test))
scores = model.evaluate(X_test, Y_test, show_accuracy=True, verbose=0)
print('LSTM test score:', scores[0])
print('LSTM test accuracy:', scores[1])
| mit |
noironetworks/nova | nova/scheduler/weights/__init__.py | 95 | 1386 | # Copyright (c) 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Scheduler host weights
"""
from nova import weights
class WeighedHost(weights.WeighedObject):
def to_dict(self):
x = dict(weight=self.weight)
x['host'] = self.obj.host
return x
def __repr__(self):
return "WeighedHost [host: %r, weight: %s]" % (
self.obj, self.weight)
class BaseHostWeigher(weights.BaseWeigher):
"""Base class for host weights."""
pass
class HostWeightHandler(weights.BaseWeightHandler):
object_class = WeighedHost
def __init__(self):
super(HostWeightHandler, self).__init__(BaseHostWeigher)
def all_weighers():
"""Return a list of weight plugin classes found in this directory."""
return HostWeightHandler().get_all_classes()
| apache-2.0 |
jathak/ok-client | demo/ok_test/tests/q1.py | 2 | 1670 | test = {
'name': 'Question 1',
'points': 3,
'suites': [
{
'type': 'concept',
'cases': [
{
'answer': 'Domain is numbers. Range is numbers',
'choices': [
'Domain is numbers. Range is numbers',
'Domain is numbers. Range is strings',
'Domain is strings. Range is numbers',
'Domain is strings. Range is strings'
],
'hidden': False,
'question': 'What is the domain and range of the square function?'
}
],
},
{
'type': 'wwpp',
'cases': [
{
'code': r"""
>>> square(3)
9
>>> square(5)
25
"""
},
{
'code': r"""
>>> print(print(square(4)))
16
None
"""
}
],
},
{
'cases': [
{
'code': r"""
>>> square(3)
9
""",
'hidden': False
},
{
'code': r"""
>>> square(2)
4
# explanation: Squaring a negative number
""",
'hidden': True
},
{
'code': r"""
>>> square(0)
0
# explanation: Squaring zero
""",
'hidden': True
},
{
'code': r"""
>>> 1 / square(0)
ZeroDivisionError
""",
'hidden': True
}
],
'scored': True,
'setup': r"""
>>> from hw1 import *
""",
'teardown': r"""
>>> print('Teardown code')
""",
'type': 'doctest'
}
]
}
| apache-2.0 |
katrid/django | django/contrib/staticfiles/management/commands/collectstatic.py | 149 | 13955 | from __future__ import unicode_literals
import os
from collections import OrderedDict
from django.contrib.staticfiles.finders import get_finders
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core.files.storage import FileSystemStorage
from django.core.management.base import BaseCommand, CommandError
from django.core.management.color import no_style
from django.utils.encoding import smart_text
from django.utils.six.moves import input
class Command(BaseCommand):
"""
Command that allows to copy or symlink static files from different
locations to the settings.STATIC_ROOT.
"""
help = "Collect static files in a single location."
requires_system_checks = False
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
self.copied_files = []
self.symlinked_files = []
self.unmodified_files = []
self.post_processed_files = []
self.storage = staticfiles_storage
self.style = no_style()
try:
self.storage.path('')
except NotImplementedError:
self.local = False
else:
self.local = True
def add_arguments(self, parser):
parser.add_argument('--noinput',
action='store_false', dest='interactive', default=True,
help="Do NOT prompt the user for input of any kind.")
parser.add_argument('--no-post-process',
action='store_false', dest='post_process', default=True,
help="Do NOT post process collected files.")
parser.add_argument('-i', '--ignore', action='append', default=[],
dest='ignore_patterns', metavar='PATTERN',
help="Ignore files or directories matching this glob-style "
"pattern. Use multiple times to ignore more.")
parser.add_argument('-n', '--dry-run',
action='store_true', dest='dry_run', default=False,
help="Do everything except modify the filesystem.")
parser.add_argument('-c', '--clear',
action='store_true', dest='clear', default=False,
help="Clear the existing files using the storage "
"before trying to copy or link the original file.")
parser.add_argument('-l', '--link',
action='store_true', dest='link', default=False,
help="Create a symbolic link to each file instead of copying.")
parser.add_argument('--no-default-ignore', action='store_false',
dest='use_default_ignore_patterns', default=True,
help="Don't ignore the common private glob-style patterns 'CVS', "
"'.*' and '*~'.")
def set_options(self, **options):
"""
Set instance variables based on an options dict
"""
self.interactive = options['interactive']
self.verbosity = options['verbosity']
self.symlink = options['link']
self.clear = options['clear']
self.dry_run = options['dry_run']
ignore_patterns = options['ignore_patterns']
if options['use_default_ignore_patterns']:
ignore_patterns += ['CVS', '.*', '*~']
self.ignore_patterns = list(set(ignore_patterns))
self.post_process = options['post_process']
def collect(self):
"""
Perform the bulk of the work of collectstatic.
Split off from handle() to facilitate testing.
"""
if self.symlink and not self.local:
raise CommandError("Can't symlink to a remote destination.")
if self.clear:
self.clear_dir('')
if self.symlink:
handler = self.link_file
else:
handler = self.copy_file
found_files = OrderedDict()
for finder in get_finders():
for path, storage in finder.list(self.ignore_patterns):
# Prefix the relative path if the source storage contains it
if getattr(storage, 'prefix', None):
prefixed_path = os.path.join(storage.prefix, path)
else:
prefixed_path = path
if prefixed_path not in found_files:
found_files[prefixed_path] = (storage, path)
handler(path, prefixed_path, storage)
else:
self.log(
"Found another file with the destination path '%s'. It "
"will be ignored since only the first encountered file "
"is collected. If this is not what you want, make sure "
"every static file has a unique path." % prefixed_path,
level=1,
)
# Here we check if the storage backend has a post_process
# method and pass it the list of modified files.
if self.post_process and hasattr(self.storage, 'post_process'):
processor = self.storage.post_process(found_files,
dry_run=self.dry_run)
for original_path, processed_path, processed in processor:
if isinstance(processed, Exception):
self.stderr.write("Post-processing '%s' failed!" % original_path)
# Add a blank line before the traceback, otherwise it's
# too easy to miss the relevant part of the error message.
self.stderr.write("")
raise processed
if processed:
self.log("Post-processed '%s' as '%s'" %
(original_path, processed_path), level=1)
self.post_processed_files.append(original_path)
else:
self.log("Skipped post-processing '%s'" % original_path)
return {
'modified': self.copied_files + self.symlinked_files,
'unmodified': self.unmodified_files,
'post_processed': self.post_processed_files,
}
def handle(self, **options):
self.set_options(**options)
message = ['\n']
if self.dry_run:
message.append(
'You have activated the --dry-run option so no files will be modified.\n\n'
)
message.append(
'You have requested to collect static files at the destination\n'
'location as specified in your settings'
)
if self.is_local_storage() and self.storage.location:
destination_path = self.storage.location
message.append(':\n\n %s\n\n' % destination_path)
else:
destination_path = None
message.append('.\n\n')
if self.clear:
message.append('This will DELETE ALL FILES in this location!\n')
else:
message.append('This will overwrite existing files!\n')
message.append(
'Are you sure you want to do this?\n\n'
"Type 'yes' to continue, or 'no' to cancel: "
)
if self.interactive and input(''.join(message)) != 'yes':
raise CommandError("Collecting static files cancelled.")
collected = self.collect()
modified_count = len(collected['modified'])
unmodified_count = len(collected['unmodified'])
post_processed_count = len(collected['post_processed'])
if self.verbosity >= 1:
template = ("\n%(modified_count)s %(identifier)s %(action)s"
"%(destination)s%(unmodified)s%(post_processed)s.\n")
summary = template % {
'modified_count': modified_count,
'identifier': 'static file' + ('' if modified_count == 1 else 's'),
'action': 'symlinked' if self.symlink else 'copied',
'destination': (" to '%s'" % destination_path if destination_path else ''),
'unmodified': (', %s unmodified' % unmodified_count if collected['unmodified'] else ''),
'post_processed': (collected['post_processed'] and
', %s post-processed'
% post_processed_count or ''),
}
self.stdout.write(summary)
def log(self, msg, level=2):
"""
Small log helper
"""
if self.verbosity >= level:
self.stdout.write(msg)
def is_local_storage(self):
return isinstance(self.storage, FileSystemStorage)
def clear_dir(self, path):
"""
Deletes the given relative path using the destination storage backend.
"""
if not self.storage.exists(path):
return
dirs, files = self.storage.listdir(path)
for f in files:
fpath = os.path.join(path, f)
if self.dry_run:
self.log("Pretending to delete '%s'" %
smart_text(fpath), level=1)
else:
self.log("Deleting '%s'" % smart_text(fpath), level=1)
self.storage.delete(fpath)
for d in dirs:
self.clear_dir(os.path.join(path, d))
def delete_file(self, path, prefixed_path, source_storage):
"""
Checks if the target file should be deleted if it already exists
"""
if self.storage.exists(prefixed_path):
try:
# When was the target file modified last time?
target_last_modified = \
self.storage.modified_time(prefixed_path)
except (OSError, NotImplementedError, AttributeError):
# The storage doesn't support ``modified_time`` or failed
pass
else:
try:
# When was the source file modified last time?
source_last_modified = source_storage.modified_time(path)
except (OSError, NotImplementedError, AttributeError):
pass
else:
# The full path of the target file
if self.local:
full_path = self.storage.path(prefixed_path)
else:
full_path = None
# Skip the file if the source file is younger
# Avoid sub-second precision (see #14665, #19540)
if (target_last_modified.replace(microsecond=0)
>= source_last_modified.replace(microsecond=0)):
if not ((self.symlink and full_path
and not os.path.islink(full_path)) or
(not self.symlink and full_path
and os.path.islink(full_path))):
if prefixed_path not in self.unmodified_files:
self.unmodified_files.append(prefixed_path)
self.log("Skipping '%s' (not modified)" % path)
return False
# Then delete the existing file if really needed
if self.dry_run:
self.log("Pretending to delete '%s'" % path)
else:
self.log("Deleting '%s'" % path)
self.storage.delete(prefixed_path)
return True
def link_file(self, path, prefixed_path, source_storage):
"""
Attempt to link ``path``
"""
# Skip this file if it was already copied earlier
if prefixed_path in self.symlinked_files:
return self.log("Skipping '%s' (already linked earlier)" % path)
# Delete the target file if needed or break
if not self.delete_file(path, prefixed_path, source_storage):
return
# The full path of the source file
source_path = source_storage.path(path)
# Finally link the file
if self.dry_run:
self.log("Pretending to link '%s'" % source_path, level=1)
else:
self.log("Linking '%s'" % source_path, level=1)
full_path = self.storage.path(prefixed_path)
try:
os.makedirs(os.path.dirname(full_path))
except OSError:
pass
try:
if os.path.lexists(full_path):
os.unlink(full_path)
os.symlink(source_path, full_path)
except AttributeError:
import platform
raise CommandError("Symlinking is not supported by Python %s." %
platform.python_version())
except NotImplementedError:
import platform
raise CommandError("Symlinking is not supported in this "
"platform (%s)." % platform.platform())
except OSError as e:
raise CommandError(e)
if prefixed_path not in self.symlinked_files:
self.symlinked_files.append(prefixed_path)
def copy_file(self, path, prefixed_path, source_storage):
"""
Attempt to copy ``path`` with storage
"""
# Skip this file if it was already copied earlier
if prefixed_path in self.copied_files:
return self.log("Skipping '%s' (already copied earlier)" % path)
# Delete the target file if needed or break
if not self.delete_file(path, prefixed_path, source_storage):
return
# The full path of the source file
source_path = source_storage.path(path)
# Finally start copying
if self.dry_run:
self.log("Pretending to copy '%s'" % source_path, level=1)
else:
self.log("Copying '%s'" % source_path, level=1)
with source_storage.open(path) as source_file:
self.storage.save(prefixed_path, source_file)
self.copied_files.append(prefixed_path)
| bsd-3-clause |
sparkslabs/kamaelia_ | Sketches/AM/KPIPackage/Kamaelia/Community/AM/Kamaelia/KPIFramework/KPI/Client/Authenticatee.py | 3 | 8627 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
#
"""
=========================
Authenticatee Component
=========================
Authenticatee is client side component of authentication. After successful
authentication, it depacketizes the data and key packets
How it works ?
--------------
How does it work ?
------------------
The authentication process is based on the Needham-Scroeder protocol.
Authentication Sequence
1. The Authenticatee sends its Userid encrypted with root key.
2. The Authenticator obtains userid by decrypting the data received and
looks up for the key corresponding to the user id.
3. The Authenticator generates a random number, encrypts it
with a user's key and sends it to the authenticatee.
4. The Authenticatee decrypts it, increments it by 1 and sends it back
to the Authenticator.
5. The Authenticator verifies if the number recieved is indeed the original
number incremented by 1.
6. Authenticator sends SUCCESS Message
7. Authenticatee sends back "OK" ack
8. After succesful authentication, Authenticatee reads incoming packet's
header. if it is key packet, it decrypts and sends to "notifykey" outbox
9. If its data packet, it sends data to "encout" outbox
10. Note: Authenticator takes care of packets of different sizes.
"""
#TODO:
#currently uses xtea as the encryption algorithm for challenge response
#communication with authenticator. need to refactor to be able to plugin
#various ciphers
#separate authenticatee and depackizer into two components.
#Should be able to extend and override new authentication mechanisms
import struct
import Axon
from Kamaelia.Community.AM.Kamaelia.KPIFramework.KPI.Crypto import xtea
class Authenticatee(Axon.Component.component):
"""\ Authenticator(kpiuser) -> new Authenticatee component
Handles authentication and depacketizing
Keyword arguments:
- kpiuser -- uses KPIUser instance for looking up user
key from client config file
"""
Inboxes = {"inbox" : "authentication and data packets",
"control" : "receive shutdown messages"}
Outboxes = {"outbox" : "authentication",
"encout" : "encrypted data packets",
"notifykey" : "notify key",
"signal" : "pass shutdown messages"}
def __init__(self, kpiuser):
"""x.__init__(...) initializes x; see x.__class__.__doc__ for signature"""
super(Authenticatee,self).__init__()
self.kpiuser = kpiuser
def main(self):
#encrypt user id with root key
userid = self.kpiuser.getID()
data = xtea.xtea_encrypt(self.kpiuser.getRootKey(),
struct.pack('!2L',0, userid))
self.send(data, "outbox")
yield 1
while not self.dataReady("inbox"):
yield 1
data = self.recv("inbox")
#receive challenge
temp = xtea.xtea_decrypt(self.kpiuser.getUserKey(), data)
padding, challenge = struct.unpack('!2L',temp)
response = challenge+1
#sending response encrypted with my key
data = xtea.xtea_encrypt(self.kpiuser.getUserKey(),
struct.pack('!2L',0, response))
self.send(data, "outbox")
yield 1
while not self.dataReady("inbox"):
yield 1
data = self.recv("inbox")
if data == "SUCCESS":
#print "authentication success"
pass
else:
#print "authenication failure"
return #shutdown
#Send OK ack to authenticator
self.send("OK", "outbox")
yield 1
#decode data
#the depacketizer has to modes
#reading header or reading body
buffer = ''
KEY = 0x20
DATA = 0x30
READ_HEADER = 1
READ_BODY = 2
HEADER_LEN = 8
mode = READ_HEADER
HEADER_SIZE = 8
while 1:
while self.dataReady("inbox"):
data = self.recv("inbox")
buffer = buffer + data
if mode == READ_HEADER :
if len(buffer) >= HEADER_SIZE:
mode = READ_BODY
header = buffer[:HEADER_SIZE]
packetType, data2read = struct.unpack("!2L", header)
#Bug fix - previously was reading less
#data from buffer -> body = buffer[HEADER_SIZE:data2read]
#this caused the client to be slower in receiving data
body = buffer[HEADER_SIZE:HEADER_SIZE+data2read]
#read the body
if data2read <= len(body):
mode = READ_HEADER
if packetType == KEY:
#key packet structure
#8 bytes - key ID with which session key was encrypted
#16 bytes of encrypted session key
padding,ID = struct.unpack("!2L", body[:8])
try:
key = self.kpiuser.getKey(ID)
#i have the key for the ID
enckey = body[8:data2read]
part1 = xtea.xtea_decrypt(key, enckey[:8])
part2 = xtea.xtea_decrypt(key, enckey[8:16])
sessionkey = part1 + part2
self.send(sessionkey, "notifykey")
except KeyError:
pass #the key is not for me
elif packetType == DATA:
#print "decoded data", body
self.send(body, "encout")
# remove the header + data read
buffer = buffer[(data2read+HEADER_SIZE):len(buffer)]
else:
#remove header
buffer = buffer[HEADER_SIZE:len(buffer)]
elif mode == READ_BODY:
body = buffer[:data2read]
#data2read = data2read - len(buffer)
#read all the data
if data2read <= len(body):
mode = READ_HEADER
if packetType == KEY:
#key packet structure
#8 bytes - key ID with which session key was encrypted
#16 bytes of encrypted session key
padding,ID = struct.unpack("!2L", body[:8])
try:
key = self.kpiuser.getKey(ID)
enckey = body[8:data2read]
part1 = xtea.xtea_decrypt(key, enckey[:8])
part2 = xtea.xtea_decrypt(key, enckey[8:16])
sessionkey = part1 + part2
self.send(sessionkey, "notifykey")
except KeyError:
pass #the key is not for me
elif packetType == DATA:
self.send(body, "encout")
# remove the data read
buffer = buffer[data2read:len(buffer)]
yield 1
| apache-2.0 |
UstadMobile/eXePUB | twisted/cred/error.py | 19 | 2213 | # Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
"""Cred errors."""
class Unauthorized(Exception):
"""Standard unauthorized error."""
class DuplicateIdentity(KeyError):
"""There already exists an identity with that name."""
# Descends from KeyError for backwards compatibility: That's what
# DefaultAuthorizer.addIdentity used to raise.
def __init__(self, name):
KeyError.__init__(self, name)
self.name = name
def __repr__(self):
return "<%s name %s>" % (self.__class__.__name__,
repr(self.name))
def __str__(self):
return "There is already an identity named %s." % (self.name,)
class KeyNotFound(KeyError, Unauthorized):
"""None of the keys on your keyring seem to fit here."""
def __init__(self, serviceName, perspectiveName):
KeyError.__init__(self, (serviceName, perspectiveName))
self.serviceName = serviceName
self.perspectiveName = perspectiveName
def __repr__(self):
return "<%s (%r, %r)>" % (self.__class__.__name__,
repr(self.serviceName),
repr(self.perspectiveName))
def __str__(self):
return "No key for service %r, perspective %r." % (
repr(self.serviceName), repr(self.perspectiveName))
### "New Cred" objects
class LoginFailed(Exception):
"""
The user's request to log in failed for some reason.
"""
class UnauthorizedLogin(LoginFailed, Unauthorized):
"""The user was not authorized to log in.
"""
class UnhandledCredentials(LoginFailed):
"""A type of credentials were passed in with no knowledge of how to check
them. This is a server configuration error - it means that a protocol was
connected to a Portal without a CredentialChecker that can check all of its
potential authentication strategies.
"""
class LoginDenied(LoginFailed):
"""
The realm rejected this login for some reason.
Examples of reasons this might be raised include an avatar logging in
too frequently, a quota having been fully used, or the overall server
load being too high.
"""
| gpl-2.0 |
rofehr/enigma2 | lib/python/Components/Converter/ServiceTime.py | 42 | 1119 | from Converter import Converter
from Components.Element import cached, ElementError
from enigma import iServiceInformation
class ServiceTime(Converter, object):
STARTTIME = 0
ENDTIME = 1
DURATION = 2
def __init__(self, type):
Converter.__init__(self, type)
if type == "EndTime":
self.type = self.ENDTIME
elif type == "StartTime":
self.type = self.STARTTIME
elif type == "Duration":
self.type = self.DURATION
else:
raise ElementError("'%s' is not <StartTime|EndTime|Duration> for ServiceTime converter" % type)
@cached
def getTime(self):
service = self.source.service
info = self.source.info
if not info or not service:
return None
if self.type == self.STARTTIME:
return info.getInfo(service, iServiceInformation.sTimeCreate)
elif self.type == self.ENDTIME:
begin = info.getInfo(service, iServiceInformation.sTimeCreate)
len = info.getLength(service) + 10 # added 10 seconds to fix round to minutes
return begin + len
elif self.type == self.DURATION:
return info.getLength(service) + 10 # added 10 seconds to fix round to minutes
time = property(getTime)
| gpl-2.0 |
rversteegen/commandergenius | project/jni/python/src/Demo/comparisons/sortingtest.py | 38 | 1374 | #! /usr/bin/env python
# 2) Sorting Test
#
# Sort an input file that consists of lines like this
#
# var1=23 other=14 ditto=23 fred=2
#
# such that each output line is sorted WRT to the number. Order
# of output lines does not change. Resolve collisions using the
# variable name. e.g.
#
# fred=2 other=14 ditto=23 var1=23
#
# Lines may be up to several kilobytes in length and contain
# zillions of variables.
# This implementation:
# - Reads stdin, writes stdout
# - Uses any amount of whitespace to separate fields
# - Allows signed numbers
# - Treats illegally formatted fields as field=0
# - Outputs the sorted fields with exactly one space between them
# - Handles blank input lines correctly
import re
import string
import sys
def main():
prog = re.compile('^(.*)=([-+]?[0-9]+)')
def makekey(item, prog=prog):
match = prog.match(item)
if match:
var, num = match.group(1, 2)
return string.atoi(num), var
else:
# Bad input -- pretend it's a var with value 0
return 0, item
while 1:
line = sys.stdin.readline()
if not line:
break
items = line.split()
items = map(makekey, items)
items.sort()
for num, var in items:
print "%s=%s" % (var, num),
print
main()
| lgpl-2.1 |
alexthered/kienhoc-platform | common/lib/xmodule/xmodule/mixin.py | 70 | 2601 | """
Reusable mixins for XBlocks and/or XModules
"""
from xblock.fields import Scope, String, XBlockMixin
# Make '_' a no-op so we can scrape strings. Using lambda instead of
# `django.utils.translation.ugettext_noop` because Django cannot be imported in this file
_ = lambda text: text
class LicenseMixin(XBlockMixin):
"""
Mixin that allows an author to indicate a license on the contents of an
XBlock. For example, a video could be marked as Creative Commons SA-BY
licensed. You can even indicate the license on an entire course.
If this mixin is not applied to an XBlock, or if the license field is
blank, then the content is subject to whatever legal licensing terms that
apply to content by default. For example, in the United States, that content
is exclusively owned by the creator of the content by default. Other
countries may have similar laws.
"""
license = String(
display_name=_("License"),
help=_("A license defines how the contents of this block can be shared and reused."),
default=None,
scope=Scope.content,
)
@classmethod
def parse_license_from_xml(cls, definition, node):
"""
When importing an XBlock from XML, this method will parse the license
information out of the XML and attach it to the block.
It is defined here so that classes that use this mixin can simply refer
to this method, rather than reimplementing it in their XML import
functions.
"""
license = node.get('license', default=None) # pylint: disable=redefined-builtin
if license:
definition['license'] = license
return definition
def add_license_to_xml(self, node, default=None):
"""
When generating XML from an XBlock, this method will add the XBlock's
license to the XML representation before it is serialized.
It is defined here so that classes that use this mixin can simply refer
to this method, rather than reimplementing it in their XML export
functions.
"""
if getattr(self, "license", default):
node.set('license', self.license)
def wrap_with_license(block, view, frag, context): # pylint: disable=unused-argument
"""
In the LMS, display the custom license underneath the XBlock.
"""
license = getattr(block, "license", None) # pylint: disable=redefined-builtin
if license:
context = {"license": license}
frag.content += block.runtime.render_template('license_wrapper.html', context)
return frag
| agpl-3.0 |
hurrinico/account-invoice-reporting | __unported__/invoice_print_report_balance_payment/partner.py | 11 | 1850 | # -*- encoding: utf-8 -*-
###############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 Savoir-faire Linux (<http://www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import datetime
from openerp.osv import orm
class res_partner(orm.Model):
_inherit = 'res.partner'
def get_balance_at_date(self, cr, uid, id, date, context=None):
query = self.pool.get('account.move.line')._query_get(
cr, uid, context=context)
if isinstance(date, datetime.date):
date = date.strftime('%Y-%m-%d')
sql = """SELECT SUM(l.debit-l.credit)
FROM account_move_line l
JOIN account_move m ON (l.move_id=m.id)
LEFT JOIN account_account a ON (l.account_id=a.id)
WHERE a.type IN ('receivable','payable')
AND l.partner_id = %s
AND m.date <= '%s'
AND %s
""" % (id, date, query)
cr.execute(sql)
row = cr.fetchone()
return row[0] if row is not None else 0
| agpl-3.0 |
pidah/st2contrib | packs/dimensiondata/actions/lib/actions.py | 6 | 1043 | from libcloud.compute.drivers.dimensiondata import DimensionDataNodeDriver
from libcloud.loadbalancer.drivers.dimensiondata import DimensionDataLBDriver
from dimensiondata_parsers import ResultSets
from st2actions.runners.pythonrunner import Action
__all__ = [
'BaseAction',
]
class BaseAction(Action):
def __init__(self, config):
super(BaseAction, self).__init__(config)
self.resultsets = ResultSets()
def _get_compute_driver(self, region):
api_user = self.config['api_user']
api_pass = self.config['api_password']
driver = DimensionDataNodeDriver(api_user, api_pass, region=region)
return driver
def _get_lb_driver(self, region):
api_user = self.config['api_user']
api_pass = self.config['api_password']
driver = DimensionDataLBDriver(api_user, api_pass, region=region)
return driver
def _do_function(self, module, action, **kwargs):
result = getattr(module, action)(**kwargs)
return self.resultsets.formatter(result)
| apache-2.0 |
dumbringer/ns-3-dev-ndnSIM | waf-tools/clang_compilation_database.py | 99 | 1830 | #!/usr/bin/env python
# encoding: utf-8
# Christoph Koke, 2013
"""
Writes the c and cpp compile commands into build/compile_commands.json
see http://clang.llvm.org/docs/JSONCompilationDatabase.html
Usage:
def configure(conf):
conf.load('compiler_cxx')
...
conf.load('clang_compilation_database')
"""
import sys, os, json, shlex, pipes
from waflib import Logs, TaskGen
from waflib.Tools import c, cxx
if sys.hexversion >= 0x3030000:
quote = shlex.quote
else:
quote = pipes.quote
@TaskGen.feature('*')
@TaskGen.after_method('process_use')
def collect_compilation_db_tasks(self):
"Add a compilation database entry for compiled tasks"
try:
clang_db = self.bld.clang_compilation_database_tasks
except AttributeError:
clang_db = self.bld.clang_compilation_database_tasks = []
self.bld.add_post_fun(write_compilation_database)
for task in getattr(self, 'compiled_tasks', []):
if isinstance(task, (c.c, cxx.cxx)):
clang_db.append(task)
def write_compilation_database(ctx):
"Write the clang compilation database as JSON"
database_file = ctx.bldnode.make_node('compile_commands.json')
Logs.info("Build commands will be stored in %s" % database_file.path_from(ctx.path))
try:
root = json.load(database_file)
except IOError:
root = []
clang_db = dict((x["file"], x) for x in root)
for task in getattr(ctx, 'clang_compilation_database_tasks', []):
try:
cmd = task.last_cmd
except AttributeError:
continue
directory = getattr(task, 'cwd', ctx.variant_dir)
f_node = task.inputs[0]
filename = os.path.relpath(f_node.abspath(), directory)
cmd = " ".join(map(quote, cmd))
entry = {
"directory": directory,
"command": cmd,
"file": filename,
}
clang_db[filename] = entry
root = list(clang_db.values())
database_file.write(json.dumps(root, indent=2))
| gpl-2.0 |
shubhdev/openedx | openedx/core/djangoapps/content/course_structures/models.py | 6 | 2232 | import json
import logging
from collections import OrderedDict
from model_utils.models import TimeStampedModel
from util.models import CompressedTextField
from xmodule_django.models import CourseKeyField
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
class CourseStructure(TimeStampedModel):
course_id = CourseKeyField(max_length=255, db_index=True, unique=True, verbose_name='Course ID')
# Right now the only thing we do with the structure doc is store it and
# send it on request. If we need to store a more complex data model later,
# we can do so and build a migration. The only problem with a normalized
# data model for this is that it will likely involve hundreds of rows, and
# we'd have to be careful about caching.
structure_json = CompressedTextField(verbose_name='Structure JSON', blank=True, null=True)
@property
def structure(self):
if self.structure_json:
return json.loads(self.structure_json)
return None
@property
def ordered_blocks(self):
"""
Return the blocks in the order with which they're seen in the courseware. Parents are ordered before children.
"""
if self.structure:
ordered_blocks = OrderedDict()
self._traverse_tree(self.structure['root'], self.structure['blocks'], ordered_blocks)
return ordered_blocks
def _traverse_tree(self, block, unordered_structure, ordered_blocks, parent=None):
"""
Traverses the tree and fills in the ordered_blocks OrderedDict with the blocks in
the order that they appear in the course.
"""
# find the dictionary entry for the current node
cur_block = unordered_structure[block]
if parent:
cur_block['parent'] = parent
ordered_blocks[block] = cur_block
for child_node in cur_block['children']:
self._traverse_tree(child_node, unordered_structure, ordered_blocks, parent=block)
# Signals must be imported in a file that is automatically loaded at app startup (e.g. models.py). We import them
# at the end of this file to avoid circular dependencies.
import signals # pylint: disable=unused-import
| agpl-3.0 |
pombredanne/brisk-hadoop-common | src/examples/org/apache/hadoop/examples/terasort/job_history_summary.py | 323 | 3444 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
pat = re.compile('(?P<name>[^=]+)="(?P<value>[^"]*)" *')
counterPat = re.compile('(?P<name>[^:]+):(?P<value>[^,]*),?')
def parse(tail):
result = {}
for n,v in re.findall(pat, tail):
result[n] = v
return result
mapStartTime = {}
mapEndTime = {}
reduceStartTime = {}
reduceShuffleTime = {}
reduceSortTime = {}
reduceEndTime = {}
reduceBytes = {}
for line in sys.stdin:
words = line.split(" ",1)
event = words[0]
attrs = parse(words[1])
if event == 'MapAttempt':
if attrs.has_key("START_TIME"):
mapStartTime[attrs["TASKID"]] = int(attrs["START_TIME"])/1000
elif attrs.has_key("FINISH_TIME"):
mapEndTime[attrs["TASKID"]] = int(attrs["FINISH_TIME"])/1000
elif event == 'ReduceAttempt':
if attrs.has_key("START_TIME"):
reduceStartTime[attrs["TASKID"]] = int(attrs["START_TIME"]) / 1000
elif attrs.has_key("FINISH_TIME"):
reduceShuffleTime[attrs["TASKID"]] = int(attrs["SHUFFLE_FINISHED"])/1000
reduceSortTime[attrs["TASKID"]] = int(attrs["SORT_FINISHED"])/1000
reduceEndTime[attrs["TASKID"]] = int(attrs["FINISH_TIME"])/1000
elif event == 'Task':
if attrs["TASK_TYPE"] == "REDUCE" and attrs.has_key("COUNTERS"):
for n,v in re.findall(counterPat, attrs["COUNTERS"]):
if n == "File Systems.HDFS bytes written":
reduceBytes[attrs["TASKID"]] = int(v)
runningMaps = {}
shufflingReduces = {}
sortingReduces = {}
runningReduces = {}
startTime = min(reduce(min, mapStartTime.values()),
reduce(min, reduceStartTime.values()))
endTime = max(reduce(max, mapEndTime.values()),
reduce(max, reduceEndTime.values()))
reduces = reduceBytes.keys()
reduces.sort()
print "Name reduce-output-bytes shuffle-finish reduce-finish"
for r in reduces:
print r, reduceBytes[r], reduceShuffleTime[r] - startTime,
print reduceEndTime[r] - startTime
print
for t in range(startTime, endTime):
runningMaps[t] = 0
shufflingReduces[t] = 0
sortingReduces[t] = 0
runningReduces[t] = 0
for map in mapStartTime.keys():
for t in range(mapStartTime[map], mapEndTime[map]):
runningMaps[t] += 1
for reduce in reduceStartTime.keys():
for t in range(reduceStartTime[reduce], reduceShuffleTime[reduce]):
shufflingReduces[t] += 1
for t in range(reduceShuffleTime[reduce], reduceSortTime[reduce]):
sortingReduces[t] += 1
for t in range(reduceSortTime[reduce], reduceEndTime[reduce]):
runningReduces[t] += 1
print "time maps shuffle merge reduce"
for t in range(startTime, endTime):
print t - startTime, runningMaps[t], shufflingReduces[t], sortingReduces[t],
print runningReduces[t]
| apache-2.0 |
SouWilliams/selenium | py/selenium/webdriver/firefox/webdriver.py | 44 | 3810 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
try:
import http.client as http_client
except ImportError:
import httplib as http_client
import shutil
import socket
import sys
from .firefox_binary import FirefoxBinary
from .service import Service
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.firefox.extension_connection import ExtensionConnection
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from selenium.webdriver.remote.webdriver import WebDriver as RemoteWebDriver
class WebDriver(RemoteWebDriver):
# There is no native event support on Mac
NATIVE_EVENTS_ALLOWED = sys.platform != "darwin"
def __init__(self, firefox_profile=None, firefox_binary=None, timeout=30,
capabilities=None, proxy=None, executable_path='wires'):
self.binary = firefox_binary
self.profile = firefox_profile
if self.profile is None:
self.profile = FirefoxProfile()
self.profile.native_events_enabled = (
self.NATIVE_EVENTS_ALLOWED and self.profile.native_events_enabled)
if capabilities is None:
capabilities = DesiredCapabilities.FIREFOX
if "marionette" in capabilities and capabilities['marionette'] is True:
# Let's use Marionette! WOOOOHOOOOO!
if "binary" in capabilities:
self.binary = capabilities["binary"]
self.service = Service(executable_path, firefox_binary=self.binary)
self.service.start()
RemoteWebDriver.__init__(self,
command_executor=self.service.service_url,
desired_capabilities=capabilities,
keep_alive=True)
else:
# Oh well... sometimes the old way is the best way.
if self.binary is None:
self.binary = FirefoxBinary()
if proxy is not None:
proxy.add_to_capabilities(capabilities)
RemoteWebDriver.__init__(self,
command_executor=ExtensionConnection("127.0.0.1", self.profile,
self.binary, timeout),
desired_capabilities=capabilities,
keep_alive=True)
self._is_remote = False
def quit(self):
"""Quits the driver and close every associated window."""
try:
RemoteWebDriver.quit(self)
except (http_client.BadStatusLine, socket.error):
# Happens if Firefox shutsdown before we've read the response from
# the socket.
pass
if "marionette" in self.capabilities and self.capabilities['marionette'] is True:
self.service.stop()
else:
self.binary.kill()
try:
shutil.rmtree(self.profile.path)
if self.profile.tempfolder is not None:
shutil.rmtree(self.profile.tempfolder)
except Exception as e:
print(str(e))
@property
def firefox_profile(self):
return self.profile
| apache-2.0 |
jakirkham/bokeh | bokeh/core/property/tests/test_datetime.py | 3 | 3301 | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2018, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import datetime
import time
# External imports
# Bokeh imports
from . import _TestHasProps, _TestModel
from bokeh._testing.util.api import verify_all
# Module under test
import bokeh.core.property.datetime as bcpd
#-----------------------------------------------------------------------------
# Setup
#-----------------------------------------------------------------------------
ALL = (
'Date',
'Datetime',
'TimeDelta',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
class Test_Date(object):
def test_valid(self):
prop = bcpd.Date()
assert prop.is_valid(None)
assert prop.is_valid(0)
assert prop.is_valid(1)
assert prop.is_valid(0.0)
assert prop.is_valid(1.0)
# TODO (bev) should check actual convertibility
assert prop.is_valid("")
# TODO (bev) should fail
assert prop.is_valid(False)
assert prop.is_valid(True)
def test_invalid(self):
prop = bcpd.Date()
assert not prop.is_valid(1.0+1.0j)
assert not prop.is_valid(())
assert not prop.is_valid([])
assert not prop.is_valid({})
assert not prop.is_valid(_TestHasProps())
assert not prop.is_valid(_TestModel())
def test_transform_seconds(self):
t = time.time()
prop = bcpd.Date()
assert prop.transform(t) == datetime.date.today()
def test_transform_milliseconds(self):
t = time.time() * 1000
prop = bcpd.Date()
assert prop.transform(t) == datetime.date.today()
def test_has_ref(self):
prop = bcpd.Date()
assert not prop.has_ref
def test_str(self):
prop = bcpd.Date()
assert str(prop) == "Date"
# TODO (bev) class Test_Datetime(object)
# TODO (bev) class Test_TimeDelta(object)
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
Test___all__ = verify_all(bcpd, ALL)
| bsd-3-clause |
polyval/CNC | flask/Lib/encodings/palmos.py | 647 | 2936 | """ Python Character Mapping Codec for PalmOS 3.5.
Written by Sjoerd Mullender (sjoerd@acm.org); based on iso8859_15.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_map)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='palmos',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
# The PalmOS character set is mostly iso-8859-1 with some differences.
decoding_map.update({
0x0080: 0x20ac, # EURO SIGN
0x0082: 0x201a, # SINGLE LOW-9 QUOTATION MARK
0x0083: 0x0192, # LATIN SMALL LETTER F WITH HOOK
0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
0x0085: 0x2026, # HORIZONTAL ELLIPSIS
0x0086: 0x2020, # DAGGER
0x0087: 0x2021, # DOUBLE DAGGER
0x0088: 0x02c6, # MODIFIER LETTER CIRCUMFLEX ACCENT
0x0089: 0x2030, # PER MILLE SIGN
0x008a: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
0x008b: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
0x008c: 0x0152, # LATIN CAPITAL LIGATURE OE
0x008d: 0x2666, # BLACK DIAMOND SUIT
0x008e: 0x2663, # BLACK CLUB SUIT
0x008f: 0x2665, # BLACK HEART SUIT
0x0090: 0x2660, # BLACK SPADE SUIT
0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK
0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK
0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK
0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK
0x0095: 0x2022, # BULLET
0x0096: 0x2013, # EN DASH
0x0097: 0x2014, # EM DASH
0x0098: 0x02dc, # SMALL TILDE
0x0099: 0x2122, # TRADE MARK SIGN
0x009a: 0x0161, # LATIN SMALL LETTER S WITH CARON
0x009c: 0x0153, # LATIN SMALL LIGATURE OE
0x009f: 0x0178, # LATIN CAPITAL LETTER Y WITH DIAERESIS
})
### Encoding Map
encoding_map = codecs.make_encoding_map(decoding_map)
| apache-2.0 |
fnaum/rez | src/rez/vendor/amqp/basic_message.py | 38 | 3954 | """Messages for AMQP"""
# Copyright (C) 2007-2008 Barry Pederson <bp@barryp.org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
from __future__ import absolute_import
from .serialization import GenericContent
__all__ = ['Message']
class Message(GenericContent):
"""A Message for use with the Channnel.basic_* methods."""
#: Instances of this class have these attributes, which
#: are passed back and forth as message properties between
#: client and server
PROPERTIES = [
('content_type', 'shortstr'),
('content_encoding', 'shortstr'),
('application_headers', 'table'),
('delivery_mode', 'octet'),
('priority', 'octet'),
('correlation_id', 'shortstr'),
('reply_to', 'shortstr'),
('expiration', 'shortstr'),
('message_id', 'shortstr'),
('timestamp', 'timestamp'),
('type', 'shortstr'),
('user_id', 'shortstr'),
('app_id', 'shortstr'),
('cluster_id', 'shortstr')
]
def __init__(self, body='', children=None, channel=None, **properties):
"""Expected arg types
body: string
children: (not supported)
Keyword properties may include:
content_type: shortstr
MIME content type
content_encoding: shortstr
MIME content encoding
application_headers: table
Message header field table, a dict with string keys,
and string | int | Decimal | datetime | dict values.
delivery_mode: octet
Non-persistent (1) or persistent (2)
priority: octet
The message priority, 0 to 9
correlation_id: shortstr
The application correlation identifier
reply_to: shortstr
The destination to reply to
expiration: shortstr
Message expiration specification
message_id: shortstr
The application message identifier
timestamp: datetime.datetime
The message timestamp
type: shortstr
The message type name
user_id: shortstr
The creating user id
app_id: shortstr
The creating application id
cluster_id: shortstr
Intra-cluster routing identifier
Unicode bodies are encoded according to the 'content_encoding'
argument. If that's None, it's set to 'UTF-8' automatically.
example::
msg = Message('hello world',
content_type='text/plain',
application_headers={'foo': 7})
"""
super(Message, self).__init__(**properties)
self.body = body
self.channel = channel
def __eq__(self, other):
"""Check if the properties and bodies of this Message and another
Message are the same.
Received messages may contain a 'delivery_info' attribute,
which isn't compared.
"""
try:
return (super(Message, self).__eq__(other) and
self.body == other.body)
except AttributeError:
return NotImplemented
| lgpl-3.0 |
Insoleet/mirage | example.py | 1 | 1205 | import asyncio
import logging
from duniterpy.documents import BlockUID
from mirage import Node, User
async def example(lp):
node = await Node.start(4444, "testnet", "12356", "123456", lp)
alice = User.create("testnet", "alice", "alicesalt", "alicepassword", BlockUID.empty())
bob = User.create("testnet", "bob", "bobsalt", "bobpassword", BlockUID.empty())
node.forge.push(alice.identity())
node.forge.push(bob.identity())
node.forge.push(alice.join(BlockUID.empty()))
node.forge.push(bob.join(BlockUID.empty()))
node.forge.push(alice.certify(bob, BlockUID.empty()))
node.forge.push(bob.certify(alice, BlockUID.empty()))
node.forge.forge_block()
node.forge.set_member(alice.key.pubkey, True)
node.forge.set_member(bob.key.pubkey, True)
node.forge.forge_block()
node.forge.forge_block()
node.forge.generate_dividend()
node.forge.forge_block()
bob.send_money(10, node.forge.user_identities[bob.key.pubkey].sources, bob,
node.forge.blocks[-1].blockUID, "Test receive")
node.forge.forge_block()
logging.getLogger('mirage').setLevel(logging.DEBUG)
lp = asyncio.get_event_loop()
lp.run_until_complete(example(lp))
| gpl-3.0 |
claytantor/coinbase4py | webapp/settings.py | 1 | 4533 | import os
from ConfigParser import RawConfigParser
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
PROJECT_DIR = os.path.dirname(__file__)
CONF_DIR = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
here = lambda x: os.path.join(os.path.abspath(os.path.dirname(__file__)), x)
# you will need to copy the example and make custom
# settings for the environment
config = RawConfigParser()
#place in a dir that is not managed in the code base
# print 'config dir: {0}/conf/gitpatron_settings.ini'.format(CONF_DIR)
config.read('{0}/conf/coinbase4py_settings.ini'.format(CONF_DIR))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config.get('secrets','DJANGO_SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config.get('debug','DEBUG')
TEMPLATE_DEBUG = config.get('debug','TEMPLATE_DEBUG')
ENVIRONMENT = config.get('base','ENVIRONMENT')
ALLOWED_HOSTS = []
#the database for the app
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(PROJECT_DIR, 'coinbase4py.db'),
}
}
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.humanize',
'django.contrib.staticfiles',
'coinbase4py',
'webapp',
'webapp.templatetags',
)
TEMPLATE_CONTEXT_PROCESSORS = ("django.contrib.auth.context_processors.auth",
"django.core.context_processors.request",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.contrib.messages.context_processors.messages")
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'webapp.urls'
WSGI_APPLICATION = 'webapp.wsgi.application'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# Additional locations of static files
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_DIR, '../', 'static/'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
TEMPLATE_DIRS = (
PROJECT_DIR + '/../webapp/templates/',
)
USER_ONE=config.get('coinbase4py','USER_ONE')
USER_TWO=config.get('coinbase4py','USER_TWO')
TEST_STATE_DIR=config.get('coinbase4py','TEST_STATE_DIR')
COINBASE4PY_PW_SECRET_KEY=config.get('coinbase4py','COINBASE4PY_PW_SECRET_KEY')
COINBASE_OAUTH_CLIENT_APP=config.get('coinbase','COINBASE_OAUTH_CLIENT_APP')
COINBASE_OAUTH_CLIENT_ID=config.get('coinbase','COINBASE_OAUTH_CLIENT_ID')
COINBASE_OAUTH_CLIENT_SECRET=config.get('coinbase','COINBASE_OAUTH_CLIENT_SECRET')
COINBASE_OAUTH_CLIENT_CALLBACK=config.get('coinbase','COINBASE_OAUTH_CLIENT_CALLBACK')
COINBASE4PY_APP_URL=config.get('coinbase','COINBASE4PY_APP_URL')
COINBASE_ORDER_CALLBACK='{0}/{1}'.format(
config.get('coinbase','COINBASE4PY_APP_URL'),
config.get('coinbase','COINBASE_ORDER_CALLBACK'))
| apache-2.0 |
whitkirkchurch/baltimore | venv/lib/python2.7/site-packages/pip/_vendor/requests/structures.py | 1160 | 2977 | # -*- coding: utf-8 -*-
"""
requests.structures
~~~~~~~~~~~~~~~~~~~
Data structures that power Requests.
"""
import collections
class CaseInsensitiveDict(collections.MutableMapping):
"""
A case-insensitive ``dict``-like object.
Implements all methods and operations of
``collections.MutableMapping`` as well as dict's ``copy``. Also
provides ``lower_items``.
All keys are expected to be strings. The structure remembers the
case of the last key to be set, and ``iter(instance)``,
``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
will contain case-sensitive keys. However, querying and contains
testing is case insensitive::
cid = CaseInsensitiveDict()
cid['Accept'] = 'application/json'
cid['aCCEPT'] == 'application/json' # True
list(cid) == ['Accept'] # True
For example, ``headers['content-encoding']`` will return the
value of a ``'Content-Encoding'`` response header, regardless
of how the header name was originally stored.
If the constructor, ``.update``, or equality comparison
operations are given keys that have equal ``.lower()``s, the
behavior is undefined.
"""
def __init__(self, data=None, **kwargs):
self._store = dict()
if data is None:
data = {}
self.update(data, **kwargs)
def __setitem__(self, key, value):
# Use the lowercased key for lookups, but store the actual
# key alongside the value.
self._store[key.lower()] = (key, value)
def __getitem__(self, key):
return self._store[key.lower()][1]
def __delitem__(self, key):
del self._store[key.lower()]
def __iter__(self):
return (casedkey for casedkey, mappedvalue in self._store.values())
def __len__(self):
return len(self._store)
def lower_items(self):
"""Like iteritems(), but with all lowercase keys."""
return (
(lowerkey, keyval[1])
for (lowerkey, keyval)
in self._store.items()
)
def __eq__(self, other):
if isinstance(other, collections.Mapping):
other = CaseInsensitiveDict(other)
else:
return NotImplemented
# Compare insensitively
return dict(self.lower_items()) == dict(other.lower_items())
# Copy is required
def copy(self):
return CaseInsensitiveDict(self._store.values())
def __repr__(self):
return str(dict(self.items()))
class LookupDict(dict):
"""Dictionary lookup object."""
def __init__(self, name=None):
self.name = name
super(LookupDict, self).__init__()
def __repr__(self):
return '<lookup \'%s\'>' % (self.name)
def __getitem__(self, key):
# We allow fall-through here, so values default to None
return self.__dict__.get(key, None)
def get(self, key, default=None):
return self.__dict__.get(key, default)
| mit |
lazaronixon/enigma2 | lib/python/Tools/GetEcmInfo.py | 23 | 3729 | import os
import time
ECM_INFO = '/tmp/ecm.info'
EMPTY_ECM_INFO = '','0','0','0'
old_ecm_time = time.time()
info = {}
ecm = ''
data = EMPTY_ECM_INFO
class GetEcmInfo:
def pollEcmData(self):
global data
global old_ecm_time
global info
global ecm
try:
ecm_time = os.stat(ECM_INFO).st_mtime
except:
ecm_time = old_ecm_time
data = EMPTY_ECM_INFO
info = {}
ecm = ''
if ecm_time != old_ecm_time:
oecmi1 = info.get('ecminterval1','')
oecmi0 = info.get('ecminterval0','')
info = {}
info['ecminterval2'] = oecmi1
info['ecminterval1'] = oecmi0
old_ecm_time = ecm_time
try:
ecm = open(ECM_INFO, 'rb').readlines()
except:
ecm = ''
for line in ecm:
d = line.split(':', 1)
if len(d) > 1:
info[d[0].strip()] = d[1].strip()
data = self.getText()
return True
else:
info['ecminterval0'] = int(time.time()-ecm_time+0.5)
def getEcm(self):
return (self.pollEcmData(), ecm)
def getEcmData(self):
self.pollEcmData()
return data
def getInfo(self, member, ifempty = ''):
self.pollEcmData()
return str(info.get(member, ifempty))
def getText(self):
global ecm
try:
# info is dictionary
using = info.get('using', '')
if using:
# CCcam
if using == 'fta':
self.textvalue = _("FTA")
elif using == 'emu':
self.textvalue = "EMU (%ss)" % (info.get('ecm time', '?'))
else:
hops = info.get('hops', None)
if hops and hops != '0':
hops = ' @' + hops
else:
hops = ''
self.textvalue = info.get('address', '?') + hops + " (%ss)" % info.get('ecm time', '?')
else:
decode = info.get('decode', None)
if decode:
# gbox (untested)
if info['decode'] == 'Network':
cardid = 'id:' + info.get('prov', '')
try:
share = open('/tmp/share.info', 'rb').readlines()
for line in share:
if cardid in line:
self.textvalue = line.strip()
break
else:
self.textvalue = cardid
except:
self.textvalue = decode
else:
self.textvalue = decode
if ecm[1].startswith('SysID'):
info['prov'] = ecm[1].strip()[6:]
if info['response'] and 'CaID 0x' in ecm[0] and 'pid 0x' in ecm[0]:
self.textvalue = self.textvalue + " (0.%ss)" % info['response']
info['caid'] = ecm[0][ecm[0].find('CaID 0x')+7:ecm[0].find(',')]
info['pid'] = ecm[0][ecm[0].find('pid 0x')+6:ecm[0].find(' =')]
info['provid'] = info.get('prov', '0')[:4]
else:
source = info.get('source', None)
if source:
# MGcam
info['caid'] = info['caid'][2:]
info['pid'] = info['pid'][2:]
info['provid'] = info['prov'][2:]
time = ""
for line in ecm:
if 'msec' in line:
line = line.split(' ')
if line[0]:
time = " (%ss)" % (float(line[0])/1000)
continue
self.textvalue = source + time
else:
reader = info.get('reader', '')
if reader:
hops = info.get('hops', None)
if hops and hops != '0':
hops = ' @' + hops
else:
hops = ''
self.textvalue = reader + hops + " (%ss)" % info.get('ecm time', '?')
else:
response = info.get('response time', None)
if response:
# wicardd
response = response.split(' ')
self.textvalue = "%s (%ss)" % (response[4], float(response[0])/1000)
else:
self.textvalue = ""
decCI = info.get('caid', info.get('CAID', '0'))
provid = info.get('provid', info.get('prov', info.get('Provider', '0')))
ecmpid = info.get('pid', info.get('ECM PID', '0'))
except:
ecm = ''
self.textvalue = ""
decCI='0'
provid='0'
ecmpid='0'
return self.textvalue,decCI,provid,ecmpid
| gpl-2.0 |
KeplerGO/kadenza | setup.py | 1 | 1430 | #!/usr/bin/env python
import os
import sys
from setuptools import setup
# Prepare and send a new release to PyPI
if "release" in sys.argv[-1]:
os.system("python setup.py sdist")
os.system("twine upload dist/*")
os.system("rm -rf dist/kadenza*")
sys.exit()
# Load the __version__ variable without importing the package already
exec(open('kadenza/version.py').read())
# Command-line tools
entry_points = {'console_scripts': [
'kadenza-tpf = kadenza.kadenza:kadenza_tpf_main',
'kadenza-ffi = kadenza.kadenza:kadenza_ffi_main'
]}
setup(name='kadenza',
version=__version__,
description="Converts raw cadence data from the Kepler spacecraft "
"into astronomer-friendly FITS files.",
long_description=open('README.md').read(),
author='Geert Barentsen',
author_email='hello@geert.io',
license='MIT',
packages=['kadenza'],
install_requires=['numpy>=1.8',
'astropy>=1.1',
'tqdm'],
entry_points=entry_points,
include_package_data=True,
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering :: Astronomy",
],
)
| mit |
tclose/python-neo | neo/core/event.py | 7 | 1955 | # -*- coding: utf-8 -*-
'''
This module defines :class:`Event`, an event occuring at a particular point in
time.
:class:`Event` derives from :class:`BaseNeo`, from :module:`neo.core.baseneo`.
'''
# needed for python 3 compatibility
from __future__ import absolute_import, division, print_function
import quantities as pq
from neo.core.baseneo import BaseNeo
class Event(BaseNeo):
'''
An event occuring at a particular point in time.
Useful for managing trigger, stimulus, comment, etc.
*Usage*::
>>> from neo.core import Event
>>> from quantities import s
>>>
>>> evt = Event(50*s, label='trigger')
>>>
>>> evt.time
array(50.0) * s
>>> evt.label
'trigger'
*Required attributes/properties*:
:time: (quantity scalar) The time of the event.
:label: (str) Name or label for the event.
*Recommended attributes/properties*:
:name: (str) A label for the dataset.
:description: (str) Text description.
:file_origin: (str) Filesystem path or URL of the original data file.
Note: Any other additional arguments are assumed to be user-specific
metadata and stored in :attr:`annotations`.
'''
_single_parent_objects = ('Segment',)
_necessary_attrs = (('time', pq.Quantity, 0),
('label', str))
def __init__(self, time, label, name=None, description=None,
file_origin=None, **annotations):
'''
Initialize a new :class:`Event` instance.
'''
BaseNeo.__init__(self, name=name, file_origin=file_origin,
description=description, **annotations)
self.time = time
self.label = label
self.segment = None
def merge(self, other):
'''
Merging is not supported in :class:`Epoch`.
'''
raise NotImplementedError('Cannot merge Epoch objects')
| bsd-3-clause |
zephirefaith/AI_Fall15_Assignments | A2/lib/networkx/classes/graph.py | 13 | 59103 | """Base class for undirected graphs.
The Graph class allows any hashable object as a node
and can associate key/value attribute pairs with each undirected edge.
Self-loops are allowed but multiple edges are not (see MultiGraph).
For directed graphs see DiGraph and MultiDiGraph.
"""
# Copyright (C) 2004-2015 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
from copy import deepcopy
import networkx as nx
from networkx.exception import NetworkXError
import networkx.convert as convert
__author__ = """\n""".join(['Aric Hagberg (hagberg@lanl.gov)',
'Pieter Swart (swart@lanl.gov)',
'Dan Schult(dschult@colgate.edu)'])
class Graph(object):
"""
Base class for undirected graphs.
A Graph stores nodes and edges with optional data, or attributes.
Graphs hold undirected edges. Self loops are allowed but multiple
(parallel) edges are not.
Nodes can be arbitrary (hashable) Python objects with optional
key/value attributes.
Edges are represented as links between nodes with optional
key/value attributes.
Parameters
----------
data : input graph
Data to initialize graph. If data=None (default) an empty
graph is created. The data can be an edge list, or any
NetworkX graph object. If the corresponding optional Python
packages are installed the data can also be a NumPy matrix
or 2d ndarray, a SciPy sparse matrix, or a PyGraphviz graph.
attr : keyword arguments, optional (default= no attributes)
Attributes to add to graph as key=value pairs.
See Also
--------
DiGraph
MultiGraph
MultiDiGraph
Examples
--------
Create an empty graph structure (a "null graph") with no nodes and
no edges.
>>> G = nx.Graph()
G can be grown in several ways.
**Nodes:**
Add one node at a time:
>>> G.add_node(1)
Add the nodes from any container (a list, dict, set or
even the lines from a file or the nodes from another graph).
>>> G.add_nodes_from([2,3])
>>> G.add_nodes_from(range(100,110))
>>> H=nx.Graph()
>>> H.add_path([0,1,2,3,4,5,6,7,8,9])
>>> G.add_nodes_from(H)
In addition to strings and integers any hashable Python object
(except None) can represent a node, e.g. a customized node object,
or even another Graph.
>>> G.add_node(H)
**Edges:**
G can also be grown by adding edges.
Add one edge,
>>> G.add_edge(1, 2)
a list of edges,
>>> G.add_edges_from([(1,2),(1,3)])
or a collection of edges,
>>> G.add_edges_from(H.edges())
If some edges connect nodes not yet in the graph, the nodes
are added automatically. There are no errors when adding
nodes or edges that already exist.
**Attributes:**
Each graph, node, and edge can hold key/value attribute pairs
in an associated attribute dictionary (the keys must be hashable).
By default these are empty, but can be added or changed using
add_edge, add_node or direct manipulation of the attribute
dictionaries named graph, node and edge respectively.
>>> G = nx.Graph(day="Friday")
>>> G.graph
{'day': 'Friday'}
Add node attributes using add_node(), add_nodes_from() or G.node
>>> G.add_node(1, time='5pm')
>>> G.add_nodes_from([3], time='2pm')
>>> G.node[1]
{'time': '5pm'}
>>> G.node[1]['room'] = 714
>>> del G.node[1]['room'] # remove attribute
>>> G.nodes(data=True)
[(1, {'time': '5pm'}), (3, {'time': '2pm'})]
Warning: adding a node to G.node does not add it to the graph.
Add edge attributes using add_edge(), add_edges_from(), subscript
notation, or G.edge.
>>> G.add_edge(1, 2, weight=4.7 )
>>> G.add_edges_from([(3,4),(4,5)], color='red')
>>> G.add_edges_from([(1,2,{'color':'blue'}), (2,3,{'weight':8})])
>>> G[1][2]['weight'] = 4.7
>>> G.edge[1][2]['weight'] = 4
**Shortcuts:**
Many common graph features allow python syntax to speed reporting.
>>> 1 in G # check if node in graph
True
>>> [n for n in G if n<3] # iterate through nodes
[1, 2]
>>> len(G) # number of nodes in graph
5
The fastest way to traverse all edges of a graph is via
adjacency_iter(), but the edges() method is often more convenient.
>>> for n,nbrsdict in G.adjacency_iter():
... for nbr,eattr in nbrsdict.items():
... if 'weight' in eattr:
... (n,nbr,eattr['weight'])
(1, 2, 4)
(2, 1, 4)
(2, 3, 8)
(3, 2, 8)
>>> G.edges(data='weight')
[(1, 2, 4), (2, 3, 8), (3, 4, None), (4, 5, None)]
**Reporting:**
Simple graph information is obtained using methods.
Iterator versions of many reporting methods exist for efficiency.
Methods exist for reporting nodes(), edges(), neighbors() and degree()
as well as the number of nodes and edges.
For details on these and other miscellaneous methods, see below.
**Subclasses (Advanced):**
The Graph class uses a dict-of-dict-of-dict data structure.
The outer dict (node_dict) holds adjacency lists keyed by node.
The next dict (adjlist) represents the adjacency list and holds
edge data keyed by neighbor. The inner dict (edge_attr) represents
the edge data and holds edge attribute values keyed by attribute names.
Each of these three dicts can be replaced by a user defined
dict-like object. In general, the dict-like features should be
maintained but extra features can be added. To replace one of the
dicts create a new graph class by changing the class(!) variable
holding the factory for that dict-like structure. The variable names
are node_dict_factory, adjlist_dict_factory and edge_attr_dict_factory.
node_dict_factory : function, (default: dict)
Factory function to be used to create the outer-most dict
in the data structure that holds adjacency lists keyed by node.
It should require no arguments and return a dict-like object.
adjlist_dict_factory : function, (default: dict)
Factory function to be used to create the adjacency list
dict which holds edge data keyed by neighbor.
It should require no arguments and return a dict-like object
edge_attr_dict_factory : function, (default: dict)
Factory function to be used to create the edge attribute
dict which holds attrbute values keyed by attribute name.
It should require no arguments and return a dict-like object.
Examples
--------
Create a graph object that tracks the order nodes are added.
>>> from collections import OrderedDict
>>> class OrderedNodeGraph(nx.Graph):
... node_dict_factory=OrderedDict
>>> G=OrderedNodeGraph()
>>> G.add_nodes_from( (2,1) )
>>> G.nodes()
[2, 1]
>>> G.add_edges_from( ((2,2), (2,1), (1,1)) )
>>> G.edges()
[(2, 1), (2, 2), (1, 1)]
Create a graph object that tracks the order nodes are added
and for each node track the order that neighbors are added.
>>> class OrderedGraph(nx.Graph):
... node_dict_factory = OrderedDict
... adjlist_dict_factory = OrderedDict
>>> G = OrderedGraph()
>>> G.add_nodes_from( (2,1) )
>>> G.nodes()
[2, 1]
>>> G.add_edges_from( ((2,2), (2,1), (1,1)) )
>>> G.edges()
[(2, 2), (2, 1), (1, 1)]
Create a low memory graph class that effectively disallows edge
attributes by using a single attribute dict for all edges.
This reduces the memory used, but you lose edge attributes.
>>> class ThinGraph(nx.Graph):
... all_edge_dict = {'weight': 1}
... def single_edge_dict(self):
... return self.all_edge_dict
... edge_attr_dict_factory = single_edge_dict
>>> G = ThinGraph()
>>> G.add_edge(2,1)
>>> G.edges(data= True)
[(1, 2, {'weight': 1})]
>>> G.add_edge(2,2)
>>> G[2][1] is G[2][2]
True
"""
node_dict_factory = dict
adjlist_dict_factory = dict
edge_attr_dict_factory = dict
def __init__(self, data=None, **attr):
"""Initialize a graph with edges, name, graph attributes.
Parameters
----------
data : input graph
Data to initialize graph. If data=None (default) an empty
graph is created. The data can be an edge list, or any
NetworkX graph object. If the corresponding optional Python
packages are installed the data can also be a NumPy matrix
or 2d ndarray, a SciPy sparse matrix, or a PyGraphviz graph.
name : string, optional (default='')
An optional name for the graph.
attr : keyword arguments, optional (default= no attributes)
Attributes to add to graph as key=value pairs.
See Also
--------
convert
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G = nx.Graph(name='my graph')
>>> e = [(1,2),(2,3),(3,4)] # list of edges
>>> G = nx.Graph(e)
Arbitrary graph attribute pairs (key=value) may be assigned
>>> G=nx.Graph(e, day="Friday")
>>> G.graph
{'day': 'Friday'}
"""
self.node_dict_factory = ndf = self.node_dict_factory
self.adjlist_dict_factory = self.adjlist_dict_factory
self.edge_attr_dict_factory = self.edge_attr_dict_factory
self.graph = {} # dictionary for graph attributes
self.node = ndf() # empty node attribute dict
self.adj = ndf() # empty adjacency dict
# attempt to load graph with data
if data is not None:
convert.to_networkx_graph(data, create_using=self)
# load graph attributes (must be after convert)
self.graph.update(attr)
self.edge = self.adj
@property
def name(self):
return self.graph.get('name', '')
@name.setter
def name(self, s):
self.graph['name'] = s
def __str__(self):
"""Return the graph name.
Returns
-------
name : string
The name of the graph.
Examples
--------
>>> G = nx.Graph(name='foo')
>>> str(G)
'foo'
"""
return self.name
def __iter__(self):
"""Iterate over the nodes. Use the expression 'for n in G'.
Returns
-------
niter : iterator
An iterator over all nodes in the graph.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
"""
return iter(self.node)
def __contains__(self, n):
"""Return True if n is a node, False otherwise. Use the expression
'n in G'.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> 1 in G
True
"""
try:
return n in self.node
except TypeError:
return False
def __len__(self):
"""Return the number of nodes. Use the expression 'len(G)'.
Returns
-------
nnodes : int
The number of nodes in the graph.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> len(G)
4
"""
return len(self.node)
def __getitem__(self, n):
"""Return a dict of neighbors of node n. Use the expression 'G[n]'.
Parameters
----------
n : node
A node in the graph.
Returns
-------
adj_dict : dictionary
The adjacency dictionary for nodes connected to n.
Notes
-----
G[n] is similar to G.neighbors(n) but the internal data dictionary
is returned instead of a list.
Assigning G[n] will corrupt the internal graph data structure.
Use G[n] for reading data only.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G[0]
{1: {}}
"""
return self.adj[n]
def add_node(self, n, attr_dict=None, **attr):
"""Add a single node n and update node attributes.
Parameters
----------
n : node
A node can be any hashable Python object except None.
attr_dict : dictionary, optional (default= no attributes)
Dictionary of node attributes. Key/value pairs will
update existing data associated with the node.
attr : keyword arguments, optional
Set or change attributes using key=value.
See Also
--------
add_nodes_from
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_node(1)
>>> G.add_node('Hello')
>>> K3 = nx.Graph([(0,1),(1,2),(2,0)])
>>> G.add_node(K3)
>>> G.number_of_nodes()
3
Use keywords set/change node attributes:
>>> G.add_node(1,size=10)
>>> G.add_node(3,weight=0.4,UTM=('13S',382871,3972649))
Notes
-----
A hashable object is one that can be used as a key in a Python
dictionary. This includes strings, numbers, tuples of strings
and numbers, etc.
On many platforms hashable items also include mutables such as
NetworkX Graphs, though one should be careful that the hash
doesn't change on mutables.
"""
# set up attribute dict
if attr_dict is None:
attr_dict = attr
else:
try:
attr_dict.update(attr)
except AttributeError:
raise NetworkXError(
"The attr_dict argument must be a dictionary.")
if n not in self.node:
self.adj[n] = self.adjlist_dict_factory()
self.node[n] = attr_dict
else: # update attr even if node already exists
self.node[n].update(attr_dict)
def add_nodes_from(self, nodes, **attr):
"""Add multiple nodes.
Parameters
----------
nodes : iterable container
A container of nodes (list, dict, set, etc.).
OR
A container of (node, attribute dict) tuples.
Node attributes are updated using the attribute dict.
attr : keyword arguments, optional (default= no attributes)
Update attributes for all nodes in nodes.
Node attributes specified in nodes as a tuple
take precedence over attributes specified generally.
See Also
--------
add_node
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_nodes_from('Hello')
>>> K3 = nx.Graph([(0,1),(1,2),(2,0)])
>>> G.add_nodes_from(K3)
>>> sorted(G.nodes(),key=str)
[0, 1, 2, 'H', 'e', 'l', 'o']
Use keywords to update specific node attributes for every node.
>>> G.add_nodes_from([1,2], size=10)
>>> G.add_nodes_from([3,4], weight=0.4)
Use (node, attrdict) tuples to update attributes for specific
nodes.
>>> G.add_nodes_from([(1,dict(size=11)), (2,{'color':'blue'})])
>>> G.node[1]['size']
11
>>> H = nx.Graph()
>>> H.add_nodes_from(G.nodes(data=True))
>>> H.node[1]['size']
11
"""
for n in nodes:
# keep all this inside try/except because
# CPython throws TypeError on n not in self.succ,
# while pre-2.7.5 ironpython throws on self.succ[n]
try:
if n not in self.node:
self.adj[n] = self.adjlist_dict_factory()
self.node[n] = attr.copy()
else:
self.node[n].update(attr)
except TypeError:
nn, ndict = n
if nn not in self.node:
self.adj[nn] = self.adjlist_dict_factory()
newdict = attr.copy()
newdict.update(ndict)
self.node[nn] = newdict
else:
olddict = self.node[nn]
olddict.update(attr)
olddict.update(ndict)
def remove_node(self, n):
"""Remove node n.
Removes the node n and all adjacent edges.
Attempting to remove a non-existent node will raise an exception.
Parameters
----------
n : node
A node in the graph
Raises
-------
NetworkXError
If n is not in the graph.
See Also
--------
remove_nodes_from
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2])
>>> G.edges()
[(0, 1), (1, 2)]
>>> G.remove_node(1)
>>> G.edges()
[]
"""
adj = self.adj
try:
nbrs = list(adj[n].keys()) # keys handles self-loops (allow mutation later)
del self.node[n]
except KeyError: # NetworkXError if n not in self
raise NetworkXError("The node %s is not in the graph." % (n,))
for u in nbrs:
del adj[u][n] # remove all edges n-u in graph
del adj[n] # now remove node
def remove_nodes_from(self, nodes):
"""Remove multiple nodes.
Parameters
----------
nodes : iterable container
A container of nodes (list, dict, set, etc.). If a node
in the container is not in the graph it is silently
ignored.
See Also
--------
remove_node
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2])
>>> e = G.nodes()
>>> e
[0, 1, 2]
>>> G.remove_nodes_from(e)
>>> G.nodes()
[]
"""
adj = self.adj
for n in nodes:
try:
del self.node[n]
for u in list(adj[n].keys()): # keys() handles self-loops
del adj[u][n] # (allows mutation of dict in loop)
del adj[n]
except KeyError:
pass
def nodes_iter(self, data=False):
"""Return an iterator over the nodes.
Parameters
----------
data : boolean, optional (default=False)
If False the iterator returns nodes. If True
return a two-tuple of node and node data dictionary
Returns
-------
niter : iterator
An iterator over nodes. If data=True the iterator gives
two-tuples containing (node, node data, dictionary)
Notes
-----
If the node data is not required it is simpler and equivalent
to use the expression 'for n in G'.
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2])
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2])
>>> [d for n,d in G.nodes_iter(data=True)]
[{}, {}, {}]
"""
if data:
return iter(self.node.items())
return iter(self.node)
def nodes(self, data=False):
"""Return a list of the nodes in the graph.
Parameters
----------
data : boolean, optional (default=False)
If False return a list of nodes. If True return a
two-tuple of node and node data dictionary
Returns
-------
nlist : list
A list of nodes. If data=True a list of two-tuples containing
(node, node data dictionary).
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2])
>>> G.nodes()
[0, 1, 2]
>>> G.add_node(1, time='5pm')
>>> G.nodes(data=True)
[(0, {}), (1, {'time': '5pm'}), (2, {})]
"""
return list(self.nodes_iter(data=data))
def number_of_nodes(self):
"""Return the number of nodes in the graph.
Returns
-------
nnodes : int
The number of nodes in the graph.
See Also
--------
order, __len__ which are identical
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2])
>>> len(G)
3
"""
return len(self.node)
def order(self):
"""Return the number of nodes in the graph.
Returns
-------
nnodes : int
The number of nodes in the graph.
See Also
--------
number_of_nodes, __len__ which are identical
"""
return len(self.node)
def has_node(self, n):
"""Return True if the graph contains the node n.
Parameters
----------
n : node
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2])
>>> G.has_node(0)
True
It is more readable and simpler to use
>>> 0 in G
True
"""
try:
return n in self.node
except TypeError:
return False
def add_edge(self, u, v, attr_dict=None, **attr):
"""Add an edge between u and v.
The nodes u and v will be automatically added if they are
not already in the graph.
Edge attributes can be specified with keywords or by providing
a dictionary with key/value pairs. See examples below.
Parameters
----------
u,v : nodes
Nodes can be, for example, strings or numbers.
Nodes must be hashable (and not None) Python objects.
attr_dict : dictionary, optional (default= no attributes)
Dictionary of edge attributes. Key/value pairs will
update existing data associated with the edge.
attr : keyword arguments, optional
Edge data (or labels or objects) can be assigned using
keyword arguments.
See Also
--------
add_edges_from : add a collection of edges
Notes
-----
Adding an edge that already exists updates the edge data.
Many NetworkX algorithms designed for weighted graphs use as
the edge weight a numerical value assigned to a keyword
which by default is 'weight'.
Examples
--------
The following all add the edge e=(1,2) to graph G:
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> e = (1,2)
>>> G.add_edge(1, 2) # explicit two-node form
>>> G.add_edge(*e) # single edge as tuple of two nodes
>>> G.add_edges_from( [(1,2)] ) # add edges from iterable container
Associate data to edges using keywords:
>>> G.add_edge(1, 2, weight=3)
>>> G.add_edge(1, 3, weight=7, capacity=15, length=342.7)
"""
# set up attribute dictionary
if attr_dict is None:
attr_dict = attr
else:
try:
attr_dict.update(attr)
except AttributeError:
raise NetworkXError(
"The attr_dict argument must be a dictionary.")
# add nodes
if u not in self.node:
self.adj[u] = self.adjlist_dict_factory()
self.node[u] = {}
if v not in self.node:
self.adj[v] = self.adjlist_dict_factory()
self.node[v] = {}
# add the edge
datadict = self.adj[u].get(v, self.edge_attr_dict_factory())
datadict.update(attr_dict)
self.adj[u][v] = datadict
self.adj[v][u] = datadict
def add_edges_from(self, ebunch, attr_dict=None, **attr):
"""Add all the edges in ebunch.
Parameters
----------
ebunch : container of edges
Each edge given in the container will be added to the
graph. The edges must be given as as 2-tuples (u,v) or
3-tuples (u,v,d) where d is a dictionary containing edge
data.
attr_dict : dictionary, optional (default= no attributes)
Dictionary of edge attributes. Key/value pairs will
update existing data associated with each edge.
attr : keyword arguments, optional
Edge data (or labels or objects) can be assigned using
keyword arguments.
See Also
--------
add_edge : add a single edge
add_weighted_edges_from : convenient way to add weighted edges
Notes
-----
Adding the same edge twice has no effect but any edge data
will be updated when each duplicate edge is added.
Edge attributes specified in edges take precedence
over attributes specified generally.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_edges_from([(0,1),(1,2)]) # using a list of edge tuples
>>> e = zip(range(0,3),range(1,4))
>>> G.add_edges_from(e) # Add the path graph 0-1-2-3
Associate data to edges
>>> G.add_edges_from([(1,2),(2,3)], weight=3)
>>> G.add_edges_from([(3,4),(1,4)], label='WN2898')
"""
# set up attribute dict
if attr_dict is None:
attr_dict = attr
else:
try:
attr_dict.update(attr)
except AttributeError:
raise NetworkXError(
"The attr_dict argument must be a dictionary.")
# process ebunch
for e in ebunch:
ne = len(e)
if ne == 3:
u, v, dd = e
elif ne == 2:
u, v = e
dd = {} # doesnt need edge_attr_dict_factory
else:
raise NetworkXError(
"Edge tuple %s must be a 2-tuple or 3-tuple." % (e,))
if u not in self.node:
self.adj[u] = self.adjlist_dict_factory()
self.node[u] = {}
if v not in self.node:
self.adj[v] = self.adjlist_dict_factory()
self.node[v] = {}
datadict = self.adj[u].get(v, self.edge_attr_dict_factory())
datadict.update(attr_dict)
datadict.update(dd)
self.adj[u][v] = datadict
self.adj[v][u] = datadict
def add_weighted_edges_from(self, ebunch, weight='weight', **attr):
"""Add all the edges in ebunch as weighted edges with specified
weights.
Parameters
----------
ebunch : container of edges
Each edge given in the list or container will be added
to the graph. The edges must be given as 3-tuples (u,v,w)
where w is a number.
weight : string, optional (default= 'weight')
The attribute name for the edge weights to be added.
attr : keyword arguments, optional (default= no attributes)
Edge attributes to add/update for all edges.
See Also
--------
add_edge : add a single edge
add_edges_from : add multiple edges
Notes
-----
Adding the same edge twice for Graph/DiGraph simply updates
the edge data. For MultiGraph/MultiDiGraph, duplicate edges
are stored.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_weighted_edges_from([(0,1,3.0),(1,2,7.5)])
"""
self.add_edges_from(((u, v, {weight: d}) for u, v, d in ebunch), **attr)
def remove_edge(self, u, v):
"""Remove the edge between u and v.
Parameters
----------
u,v: nodes
Remove the edge between nodes u and v.
Raises
------
NetworkXError
If there is not an edge between u and v.
See Also
--------
remove_edges_from : remove a collection of edges
Examples
--------
>>> G = nx.Graph() # or DiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.remove_edge(0,1)
>>> e = (1,2)
>>> G.remove_edge(*e) # unpacks e from an edge tuple
>>> e = (2,3,{'weight':7}) # an edge with attribute data
>>> G.remove_edge(*e[:2]) # select first part of edge tuple
"""
try:
del self.adj[u][v]
if u != v: # self-loop needs only one entry removed
del self.adj[v][u]
except KeyError:
raise NetworkXError("The edge %s-%s is not in the graph" % (u, v))
def remove_edges_from(self, ebunch):
"""Remove all edges specified in ebunch.
Parameters
----------
ebunch: list or container of edge tuples
Each edge given in the list or container will be removed
from the graph. The edges can be:
- 2-tuples (u,v) edge between u and v.
- 3-tuples (u,v,k) where k is ignored.
See Also
--------
remove_edge : remove a single edge
Notes
-----
Will fail silently if an edge in ebunch is not in the graph.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> ebunch=[(1,2),(2,3)]
>>> G.remove_edges_from(ebunch)
"""
adj = self.adj
for e in ebunch:
u, v = e[:2] # ignore edge data if present
if u in adj and v in adj[u]:
del adj[u][v]
if u != v: # self loop needs only one entry removed
del adj[v][u]
def has_edge(self, u, v):
"""Return True if the edge (u,v) is in the graph.
Parameters
----------
u,v : nodes
Nodes can be, for example, strings or numbers.
Nodes must be hashable (and not None) Python objects.
Returns
-------
edge_ind : bool
True if edge is in the graph, False otherwise.
Examples
--------
Can be called either using two nodes u,v or edge tuple (u,v)
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.has_edge(0,1) # using two nodes
True
>>> e = (0,1)
>>> G.has_edge(*e) # e is a 2-tuple (u,v)
True
>>> e = (0,1,{'weight':7})
>>> G.has_edge(*e[:2]) # e is a 3-tuple (u,v,data_dictionary)
True
The following syntax are all equivalent:
>>> G.has_edge(0,1)
True
>>> 1 in G[0] # though this gives KeyError if 0 not in G
True
"""
try:
return v in self.adj[u]
except KeyError:
return False
def neighbors(self, n):
"""Return a list of the nodes connected to the node n.
Parameters
----------
n : node
A node in the graph
Returns
-------
nlist : list
A list of nodes that are adjacent to n.
Raises
------
NetworkXError
If the node n is not in the graph.
Notes
-----
It is usually more convenient (and faster) to access the
adjacency dictionary as G[n]:
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_edge('a','b',weight=7)
>>> G['a']
{'b': {'weight': 7}}
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.neighbors(0)
[1]
"""
try:
return list(self.adj[n])
except KeyError:
raise NetworkXError("The node %s is not in the graph." % (n,))
def neighbors_iter(self, n):
"""Return an iterator over all neighbors of node n.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> [n for n in G.neighbors_iter(0)]
[1]
Notes
-----
It is faster to use the idiom "in G[0]", e.g.
>>> G = nx.path_graph(4)
>>> [n for n in G[0]]
[1]
"""
try:
return iter(self.adj[n])
except KeyError:
raise NetworkXError("The node %s is not in the graph." % (n,))
def edges(self, nbunch=None, data=False, default=None):
"""Return a list of edges.
Edges are returned as tuples with optional data
in the order (node, neighbor, data).
Parameters
----------
nbunch : iterable container, optional (default= all nodes)
A container of nodes. The container will be iterated
through once.
data : bool, optional (default=False)
Return two tuples (u,v) (False) or three-tuples (u,v,data) (True).
Returns
--------
edge_list: list of edge tuples
Edges that are adjacent to any node in nbunch, or a list
of all edges if nbunch is not specified.
See Also
--------
edges_iter : return an iterator over the edges
Notes
-----
Nodes in nbunch that are not in the graph will be (quietly) ignored.
For directed graphs this returns the out-edges.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2])
>>> G.add_edge(2,3,weight=5)
>>> G.edges()
[(0, 1), (1, 2), (2, 3)]
>>> G.edges(data=True) # default edge data is {} (empty dictionary)
[(0, 1, {}), (1, 2, {}), (2, 3, {'weight': 5})]
>>> list(G.edges_iter(data='weight', default=1))
[(0, 1, 1), (1, 2, 1), (2, 3, 5)]
>>> G.edges([0,3])
[(0, 1), (3, 2)]
>>> G.edges(0)
[(0, 1)]
"""
return list(self.edges_iter(nbunch, data, default))
def edges_iter(self, nbunch=None, data=False, default=None):
"""Return an iterator over the edges.
Edges are returned as tuples with optional data
in the order (node, neighbor, data).
Parameters
----------
nbunch : iterable container, optional (default= all nodes)
A container of nodes. The container will be iterated
through once.
data : string or bool, optional (default=False)
The edge attribute returned in 3-tuple (u,v,ddict[data]).
If True, return edge attribute dict in 3-tuple (u,v,ddict).
If False, return 2-tuple (u,v).
default : value, optional (default=None)
Value used for edges that dont have the requested attribute.
Only relevant if data is not True or False.
Returns
-------
edge_iter : iterator
An iterator of (u,v) or (u,v,d) tuples of edges.
See Also
--------
edges : return a list of edges
Notes
-----
Nodes in nbunch that are not in the graph will be (quietly) ignored.
For directed graphs this returns the out-edges.
Examples
--------
>>> G = nx.Graph() # or MultiGraph, etc
>>> G.add_path([0,1,2])
>>> G.add_edge(2,3,weight=5)
>>> [e for e in G.edges_iter()]
[(0, 1), (1, 2), (2, 3)]
>>> list(G.edges_iter(data=True)) # default data is {} (empty dict)
[(0, 1, {}), (1, 2, {}), (2, 3, {'weight': 5})]
>>> list(G.edges_iter(data='weight', default=1))
[(0, 1, 1), (1, 2, 1), (2, 3, 5)]
>>> list(G.edges_iter([0,3]))
[(0, 1), (3, 2)]
>>> list(G.edges_iter(0))
[(0, 1)]
"""
seen = {} # helper dict to keep track of multiply stored edges
if nbunch is None:
nodes_nbrs = self.adj.items()
else:
nodes_nbrs = ((n, self.adj[n]) for n in self.nbunch_iter(nbunch))
if data is True:
for n, nbrs in nodes_nbrs:
for nbr, ddict in nbrs.items():
if nbr not in seen:
yield (n, nbr, ddict)
seen[n] = 1
elif data is not False:
for n, nbrs in nodes_nbrs:
for nbr, ddict in nbrs.items():
if nbr not in seen:
d = ddict[data] if data in ddict else default
yield (n, nbr, d)
seen[n] = 1
else: # data is False
for n, nbrs in nodes_nbrs:
for nbr in nbrs:
if nbr not in seen:
yield (n, nbr)
seen[n] = 1
del seen
def get_edge_data(self, u, v, default=None):
"""Return the attribute dictionary associated with edge (u,v).
Parameters
----------
u,v : nodes
default: any Python object (default=None)
Value to return if the edge (u,v) is not found.
Returns
-------
edge_dict : dictionary
The edge attribute dictionary.
Notes
-----
It is faster to use G[u][v].
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G[0][1]
{}
Warning: Assigning G[u][v] corrupts the graph data structure.
But it is safe to assign attributes to that dictionary,
>>> G[0][1]['weight'] = 7
>>> G[0][1]['weight']
7
>>> G[1][0]['weight']
7
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.get_edge_data(0,1) # default edge data is {}
{}
>>> e = (0,1)
>>> G.get_edge_data(*e) # tuple form
{}
>>> G.get_edge_data('a','b',default=0) # edge not in graph, return 0
0
"""
try:
return self.adj[u][v]
except KeyError:
return default
def adjacency_list(self):
"""Return an adjacency list representation of the graph.
The output adjacency list is in the order of G.nodes().
For directed graphs, only outgoing adjacencies are included.
Returns
-------
adj_list : lists of lists
The adjacency structure of the graph as a list of lists.
See Also
--------
adjacency_iter
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.adjacency_list() # in order given by G.nodes()
[[1], [0, 2], [1, 3], [2]]
"""
return list(map(list, iter(self.adj.values())))
def adjacency_iter(self):
"""Return an iterator of (node, adjacency dict) tuples for all nodes.
This is the fastest way to look at every edge.
For directed graphs, only outgoing adjacencies are included.
Returns
-------
adj_iter : iterator
An iterator of (node, adjacency dictionary) for all nodes in
the graph.
See Also
--------
adjacency_list
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> [(n,nbrdict) for n,nbrdict in G.adjacency_iter()]
[(0, {1: {}}), (1, {0: {}, 2: {}}), (2, {1: {}, 3: {}}), (3, {2: {}})]
"""
return iter(self.adj.items())
def degree(self, nbunch=None, weight=None):
"""Return the degree of a node or nodes.
The node degree is the number of edges adjacent to that node.
Parameters
----------
nbunch : iterable container, optional (default=all nodes)
A container of nodes. The container will be iterated
through once.
weight : string or None, optional (default=None)
The edge attribute that holds the numerical value used
as a weight. If None, then each edge has weight 1.
The degree is the sum of the edge weights adjacent to the node.
Returns
-------
nd : dictionary, or number
A dictionary with nodes as keys and degree as values or
a number if a single node is specified.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.degree(0)
1
>>> G.degree([0,1])
{0: 1, 1: 2}
>>> list(G.degree([0,1]).values())
[1, 2]
"""
if nbunch in self: # return a single node
return next(self.degree_iter(nbunch, weight))[1]
else: # return a dict
return dict(self.degree_iter(nbunch, weight))
def degree_iter(self, nbunch=None, weight=None):
"""Return an iterator for (node, degree).
The node degree is the number of edges adjacent to the node.
Parameters
----------
nbunch : iterable container, optional (default=all nodes)
A container of nodes. The container will be iterated
through once.
weight : string or None, optional (default=None)
The edge attribute that holds the numerical value used
as a weight. If None, then each edge has weight 1.
The degree is the sum of the edge weights adjacent to the node.
Returns
-------
nd_iter : an iterator
The iterator returns two-tuples of (node, degree).
See Also
--------
degree
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> list(G.degree_iter(0)) # node 0 with degree 1
[(0, 1)]
>>> list(G.degree_iter([0,1]))
[(0, 1), (1, 2)]
"""
if nbunch is None:
nodes_nbrs = self.adj.items()
else:
nodes_nbrs = ((n, self.adj[n]) for n in self.nbunch_iter(nbunch))
if weight is None:
for n, nbrs in nodes_nbrs:
yield (n, len(nbrs) + (n in nbrs)) # return tuple (n,degree)
else:
# edge weighted graph - degree is sum of nbr edge weights
for n, nbrs in nodes_nbrs:
yield (n, sum((nbrs[nbr].get(weight, 1) for nbr in nbrs)) +
(n in nbrs and nbrs[n].get(weight, 1)))
def clear(self):
"""Remove all nodes and edges from the graph.
This also removes the name, and all graph, node, and edge attributes.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.clear()
>>> G.nodes()
[]
>>> G.edges()
[]
"""
self.name = ''
self.adj.clear()
self.node.clear()
self.graph.clear()
def copy(self):
"""Return a copy of the graph.
Returns
-------
G : Graph
A copy of the graph.
See Also
--------
to_directed: return a directed copy of the graph.
Notes
-----
This makes a complete copy of the graph including all of the
node or edge attributes.
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> H = G.copy()
"""
return deepcopy(self)
def is_multigraph(self):
"""Return True if graph is a multigraph, False otherwise."""
return False
def is_directed(self):
"""Return True if graph is directed, False otherwise."""
return False
def to_directed(self):
"""Return a directed representation of the graph.
Returns
-------
G : DiGraph
A directed graph with the same name, same nodes, and with
each edge (u,v,data) replaced by two directed edges
(u,v,data) and (v,u,data).
Notes
-----
This returns a "deepcopy" of the edge, node, and
graph attributes which attempts to completely copy
all of the data and references.
This is in contrast to the similar D=DiGraph(G) which returns a
shallow copy of the data.
See the Python copy module for more information on shallow
and deep copies, http://docs.python.org/library/copy.html.
Warning: If you have subclassed Graph to use dict-like objects in the
data structure, those changes do not transfer to the DiGraph
created by this method.
Examples
--------
>>> G = nx.Graph() # or MultiGraph, etc
>>> G.add_path([0,1])
>>> H = G.to_directed()
>>> H.edges()
[(0, 1), (1, 0)]
If already directed, return a (deep) copy
>>> G = nx.DiGraph() # or MultiDiGraph, etc
>>> G.add_path([0,1])
>>> H = G.to_directed()
>>> H.edges()
[(0, 1)]
"""
from networkx import DiGraph
G = DiGraph()
G.name = self.name
G.add_nodes_from(self)
G.add_edges_from(((u, v, deepcopy(data))
for u, nbrs in self.adjacency_iter()
for v, data in nbrs.items()))
G.graph = deepcopy(self.graph)
G.node = deepcopy(self.node)
return G
def to_undirected(self):
"""Return an undirected copy of the graph.
Returns
-------
G : Graph/MultiGraph
A deepcopy of the graph.
See Also
--------
copy, add_edge, add_edges_from
Notes
-----
This returns a "deepcopy" of the edge, node, and
graph attributes which attempts to completely copy
all of the data and references.
This is in contrast to the similar G=DiGraph(D) which returns a
shallow copy of the data.
See the Python copy module for more information on shallow
and deep copies, http://docs.python.org/library/copy.html.
Examples
--------
>>> G = nx.Graph() # or MultiGraph, etc
>>> G.add_path([0,1])
>>> H = G.to_directed()
>>> H.edges()
[(0, 1), (1, 0)]
>>> G2 = H.to_undirected()
>>> G2.edges()
[(0, 1)]
"""
return deepcopy(self)
def subgraph(self, nbunch):
"""Return the subgraph induced on nodes in nbunch.
The induced subgraph of the graph contains the nodes in nbunch
and the edges between those nodes.
Parameters
----------
nbunch : list, iterable
A container of nodes which will be iterated through once.
Returns
-------
G : Graph
A subgraph of the graph with the same edge attributes.
Notes
-----
The graph, edge or node attributes just point to the original graph.
So changes to the node or edge structure will not be reflected in
the original graph while changes to the attributes will.
To create a subgraph with its own copy of the edge/node attributes use:
nx.Graph(G.subgraph(nbunch))
If edge attributes are containers, a deep copy can be obtained using:
G.subgraph(nbunch).copy()
For an inplace reduction of a graph to a subgraph you can remove nodes:
G.remove_nodes_from([ n in G if n not in set(nbunch)])
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> H = G.subgraph([0,1,2])
>>> H.edges()
[(0, 1), (1, 2)]
"""
bunch = self.nbunch_iter(nbunch)
# create new graph and copy subgraph into it
H = self.__class__()
# copy node and attribute dictionaries
for n in bunch:
H.node[n] = self.node[n]
# namespace shortcuts for speed
H_adj = H.adj
self_adj = self.adj
# add nodes and edges (undirected method)
for n in H.node:
Hnbrs = H.adjlist_dict_factory()
H_adj[n] = Hnbrs
for nbr, d in self_adj[n].items():
if nbr in H_adj:
# add both representations of edge: n-nbr and nbr-n
Hnbrs[nbr] = d
H_adj[nbr][n] = d
H.graph = self.graph
return H
def nodes_with_selfloops(self):
"""Return a list of nodes with self loops.
A node with a self loop has an edge with both ends adjacent
to that node.
Returns
-------
nodelist : list
A list of nodes with self loops.
See Also
--------
selfloop_edges, number_of_selfloops
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_edge(1,1)
>>> G.add_edge(1,2)
>>> G.nodes_with_selfloops()
[1]
"""
return [n for n, nbrs in self.adj.items() if n in nbrs]
def selfloop_edges(self, data=False, default=None):
"""Return a list of selfloop edges.
A selfloop edge has the same node at both ends.
Parameters
----------
data : string or bool, optional (default=False)
Return selfloop edges as two tuples (u,v) (data=False)
or three-tuples (u,v,datadict) (data=True)
or three-tuples (u,v,datavalue) (data='attrname')
default : value, optional (default=None)
Value used for edges that dont have the requested attribute.
Only relevant if data is not True or False.
Returns
-------
edgelist : list of edge tuples
A list of all selfloop edges.
See Also
--------
nodes_with_selfloops, number_of_selfloops
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_edge(1,1)
>>> G.add_edge(1,2)
>>> G.selfloop_edges()
[(1, 1)]
>>> G.selfloop_edges(data=True)
[(1, 1, {})]
"""
if data is True:
return [(n, n, nbrs[n])
for n, nbrs in self.adj.items() if n in nbrs]
elif data is not False:
return [(n, n, nbrs[n].get(data, default))
for n, nbrs in self.adj.items() if n in nbrs]
else:
return [(n, n)
for n, nbrs in self.adj.items() if n in nbrs]
def number_of_selfloops(self):
"""Return the number of selfloop edges.
A selfloop edge has the same node at both ends.
Returns
-------
nloops : int
The number of selfloops.
See Also
--------
nodes_with_selfloops, selfloop_edges
Examples
--------
>>> G=nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_edge(1,1)
>>> G.add_edge(1,2)
>>> G.number_of_selfloops()
1
"""
return len(self.selfloop_edges())
def size(self, weight=None):
"""Return the number of edges.
Parameters
----------
weight : string or None, optional (default=None)
The edge attribute that holds the numerical value used
as a weight. If None, then each edge has weight 1.
Returns
-------
nedges : int
The number of edges or sum of edge weights in the graph.
See Also
--------
number_of_edges
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.size()
3
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_edge('a','b',weight=2)
>>> G.add_edge('b','c',weight=4)
>>> G.size()
2
>>> G.size(weight='weight')
6.0
"""
s = sum(self.degree(weight=weight).values()) / 2
if weight is None:
return int(s)
else:
return float(s)
def number_of_edges(self, u=None, v=None):
"""Return the number of edges between two nodes.
Parameters
----------
u,v : nodes, optional (default=all edges)
If u and v are specified, return the number of edges between
u and v. Otherwise return the total number of all edges.
Returns
-------
nedges : int
The number of edges in the graph. If nodes u and v are specified
return the number of edges between those nodes.
See Also
--------
size
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.number_of_edges()
3
>>> G.number_of_edges(0,1)
1
>>> e = (0,1)
>>> G.number_of_edges(*e)
1
"""
if u is None: return int(self.size())
if v in self.adj[u]:
return 1
else:
return 0
def add_star(self, nodes, **attr):
"""Add a star.
The first node in nodes is the middle of the star. It is connected
to all other nodes.
Parameters
----------
nodes : iterable container
A container of nodes.
attr : keyword arguments, optional (default= no attributes)
Attributes to add to every edge in star.
See Also
--------
add_path, add_cycle
Examples
--------
>>> G = nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_star([0,1,2,3])
>>> G.add_star([10,11,12],weight=2)
"""
nlist = list(nodes)
v = nlist[0]
edges = ((v, n) for n in nlist[1:])
self.add_edges_from(edges, **attr)
def add_path(self, nodes, **attr):
"""Add a path.
Parameters
----------
nodes : iterable container
A container of nodes. A path will be constructed from
the nodes (in order) and added to the graph.
attr : keyword arguments, optional (default= no attributes)
Attributes to add to every edge in path.
See Also
--------
add_star, add_cycle
Examples
--------
>>> G=nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_path([0,1,2,3])
>>> G.add_path([10,11,12],weight=7)
"""
nlist = list(nodes)
edges = zip(nlist[:-1], nlist[1:])
self.add_edges_from(edges, **attr)
def add_cycle(self, nodes, **attr):
"""Add a cycle.
Parameters
----------
nodes: iterable container
A container of nodes. A cycle will be constructed from
the nodes (in order) and added to the graph.
attr : keyword arguments, optional (default= no attributes)
Attributes to add to every edge in cycle.
See Also
--------
add_path, add_star
Examples
--------
>>> G=nx.Graph() # or DiGraph, MultiGraph, MultiDiGraph, etc
>>> G.add_cycle([0,1,2,3])
>>> G.add_cycle([10,11,12],weight=7)
"""
nlist = list(nodes)
edges = zip(nlist, nlist[1:] + [nlist[0]])
self.add_edges_from(edges, **attr)
def nbunch_iter(self, nbunch=None):
"""Return an iterator of nodes contained in nbunch that are
also in the graph.
The nodes in nbunch are checked for membership in the graph
and if not are silently ignored.
Parameters
----------
nbunch : iterable container, optional (default=all nodes)
A container of nodes. The container will be iterated
through once.
Returns
-------
niter : iterator
An iterator over nodes in nbunch that are also in the graph.
If nbunch is None, iterate over all nodes in the graph.
Raises
------
NetworkXError
If nbunch is not a node or or sequence of nodes.
If a node in nbunch is not hashable.
See Also
--------
Graph.__iter__
Notes
-----
When nbunch is an iterator, the returned iterator yields values
directly from nbunch, becoming exhausted when nbunch is exhausted.
To test whether nbunch is a single node, one can use
"if nbunch in self:", even after processing with this routine.
If nbunch is not a node or a (possibly empty) sequence/iterator
or None, a NetworkXError is raised. Also, if any object in
nbunch is not hashable, a NetworkXError is raised.
"""
if nbunch is None: # include all nodes via iterator
bunch = iter(self.adj.keys())
elif nbunch in self: # if nbunch is a single node
bunch = iter([nbunch])
else: # if nbunch is a sequence of nodes
def bunch_iter(nlist, adj):
try:
for n in nlist:
if n in adj:
yield n
except TypeError as e:
message = e.args[0]
# capture error for non-sequence/iterator nbunch.
if 'iter' in message:
raise NetworkXError(
"nbunch is not a node or a sequence of nodes.")
# capture error for unhashable node.
elif 'hashable' in message:
raise NetworkXError(
"Node %s in the sequence nbunch is not a valid node."%n)
else:
raise
bunch = bunch_iter(nbunch, self.adj)
return bunch
| mit |
ckuethe/gnuradio | gr-wxgui/python/wxgui/plot.py | 74 | 71781 | #-----------------------------------------------------------------------------
# Name: wx.lib.plot.py
# Purpose: Line, Bar and Scatter Graphs
#
# Author: Gordon Williams
#
# Created: 2003/11/03
# RCS-ID: $Id$
# Copyright: (c) 2002,2007,2010
# Licence: Use as you wish.
#-----------------------------------------------------------------------------
# 12/15/2003 - Jeff Grimmett (grimmtooth@softhome.net)
#
# o 2.5 compatability update.
# o Renamed to plot.py in the wx.lib directory.
# o Reworked test frame to work with wx demo framework. This saves a bit
# of tedious cut and paste, and the test app is excellent.
#
# 12/18/2003 - Jeff Grimmett (grimmtooth@softhome.net)
#
# o wxScrolledMessageDialog -> ScrolledMessageDialog
#
# Oct 6, 2004 Gordon Williams (g_will@cyberus.ca)
# - Added bar graph demo
# - Modified line end shape from round to square.
# - Removed FloatDCWrapper for conversion to ints and ints in arguments
#
# Oct 15, 2004 Gordon Williams (g_will@cyberus.ca)
# - Imported modules given leading underscore to name.
# - Added Cursor Line Tracking and User Point Labels.
# - Demo for Cursor Line Tracking and Point Labels.
# - Size of plot preview frame adjusted to show page better.
# - Added helper functions PositionUserToScreen and PositionScreenToUser in PlotCanvas.
# - Added functions GetClosestPoints (all curves) and GetClosestPoint (only closest curve)
# can be in either user coords or screen coords.
#
# May 27, 2007 Johnathan Corgan (jcorgan@corganenterprises.com)
# - Converted from numarray to numpy
#
# Apr 23, 2010 Martin Dudok van Heel (http://www.olifantasia.com/gnuradio/contact_olifantasia.gif)
# - Added Persistence option (emulate after glow of an analog CRT display using IIR)
"""
This is a simple light weight plotting module that can be used with
Boa or easily integrated into your own wxPython application. The
emphasis is on small size and fast plotting for large data sets. It
has a reasonable number of features to do line and scatter graphs
easily as well as simple bar graphs. It is not as sophisticated or
as powerful as SciPy Plt or Chaco. Both of these are great packages
but consume huge amounts of computer resources for simple plots.
They can be found at http://scipy.com
This file contains two parts; first the re-usable library stuff, then,
after a "if __name__=='__main__'" test, a simple frame and a few default
plots for examples and testing.
Based on wxPlotCanvas
Written by K.Hinsen, R. Srinivasan;
Ported to wxPython Harm van der Heijden, feb 1999
Major Additions Gordon Williams Feb. 2003 (g_will@cyberus.ca)
-More style options
-Zooming using mouse 'rubber band'
-Scroll left, right
-Grid(graticule)
-Printing, preview, and page set up (margins)
-Axis and title labels
-Cursor xy axis values
-Doc strings and lots of comments
-Optimizations for large number of points
-Legends
Did a lot of work here to speed markers up. Only a factor of 4
improvement though. Lines are much faster than markers, especially
filled markers. Stay away from circles and triangles unless you
only have a few thousand points.
Times for 25,000 points
Line - 0.078 sec
Markers
Square - 0.22 sec
dot - 0.10
circle - 0.87
cross,plus - 0.28
triangle, triangle_down - 0.90
Thanks to Chris Barker for getting this version working on Linux.
Zooming controls with mouse (when enabled):
Left mouse drag - Zoom box.
Left mouse double click - reset zoom.
Right mouse click - zoom out centred on click location.
"""
import string as _string
import time as _time
import wx
# Needs numpy or numarray
try:
import numpy as _numpy
except:
try:
import numarray as _numpy #if numarray is used it is renamed numpy
except:
msg= """
This module requires the numpy or numarray module,
which could not be imported. It probably is not installed
(it's not part of the standard Python distribution). See the
Python site (http://www.python.org) for information on
downloading source or binaries."""
raise ImportError, "numpy or numarray not found. \n" + msg
#
# Plotting classes...
#
class PolyPoints:
"""Base Class for lines and markers
- All methods are private.
"""
def __init__(self, points, attr):
self.points = _numpy.array(points)
self.currentScale= (1,1)
self.currentShift= (0,0)
self.scaled = self.points
self.attributes = {}
self.attributes.update(self._attributes)
for name, value in attr.items():
if name not in self._attributes.keys():
raise KeyError, "Style attribute incorrect. Should be one of %s" % self._attributes.keys()
self.attributes[name] = value
def boundingBox(self):
if len(self.points) == 0:
# no curves to draw
# defaults to (-1,-1) and (1,1) but axis can be set in Draw
minXY= _numpy.array([-1,-1])
maxXY= _numpy.array([ 1, 1])
else:
minXY= _numpy.minimum.reduce(self.points)
maxXY= _numpy.maximum.reduce(self.points)
return minXY, maxXY
def scaleAndShift(self, scale=(1,1), shift=(0,0)):
if len(self.points) == 0:
# no curves to draw
return
if (scale is not self.currentScale) or (shift is not self.currentShift):
# update point scaling
self.scaled = scale*self.points+shift
self.currentScale= scale
self.currentShift= shift
# else unchanged use the current scaling
def getLegend(self):
return self.attributes['legend']
def getClosestPoint(self, pntXY, pointScaled= True):
"""Returns the index of closest point on the curve, pointXY, scaledXY, distance
x, y in user coords
if pointScaled == True based on screen coords
if pointScaled == False based on user coords
"""
if pointScaled == True:
#Using screen coords
p = self.scaled
pxy = self.currentScale * _numpy.array(pntXY)+ self.currentShift
else:
#Using user coords
p = self.points
pxy = _numpy.array(pntXY)
#determine distance for each point
d= _numpy.sqrt(_numpy.add.reduce((p-pxy)**2,1)) #sqrt(dx^2+dy^2)
pntIndex = _numpy.argmin(d)
dist = d[pntIndex]
return [pntIndex, self.points[pntIndex], self.scaled[pntIndex], dist]
class PolyLine(PolyPoints):
"""Class to define line type and style
- All methods except __init__ are private.
"""
_attributes = {'colour': 'black',
'width': 1,
'style': wx.SOLID,
'legend': ''}
def __init__(self, points, **attr):
"""Creates PolyLine object
points - sequence (array, tuple or list) of (x,y) points making up line
**attr - key word attributes
Defaults:
'colour'= 'black', - wx.Pen Colour any wx.NamedColour
'width'= 1, - Pen width
'style'= wx.SOLID, - wx.Pen style
'legend'= '' - Line Legend to display
"""
PolyPoints.__init__(self, points, attr)
def draw(self, dc, printerScale, coord= None):
colour = self.attributes['colour']
width = self.attributes['width'] * printerScale
style= self.attributes['style']
pen = wx.Pen(wx.NamedColour(colour), width, style)
pen.SetCap(wx.CAP_BUTT)
dc.SetPen(pen)
if coord == None:
dc.DrawLines(self.scaled)
else:
dc.DrawLines(coord) # draw legend line
def getSymExtent(self, printerScale):
"""Width and Height of Marker"""
h= self.attributes['width'] * printerScale
w= 5 * h
return (w,h)
class PolyMarker(PolyPoints):
"""Class to define marker type and style
- All methods except __init__ are private.
"""
_attributes = {'colour': 'black',
'width': 1,
'size': 2,
'fillcolour': None,
'fillstyle': wx.SOLID,
'marker': 'circle',
'legend': ''}
def __init__(self, points, **attr):
"""Creates PolyMarker object
points - sequence (array, tuple or list) of (x,y) points
**attr - key word attributes
Defaults:
'colour'= 'black', - wx.Pen Colour any wx.NamedColour
'width'= 1, - Pen width
'size'= 2, - Marker size
'fillcolour'= same as colour, - wx.Brush Colour any wx.NamedColour
'fillstyle'= wx.SOLID, - wx.Brush fill style (use wx.TRANSPARENT for no fill)
'marker'= 'circle' - Marker shape
'legend'= '' - Marker Legend to display
Marker Shapes:
- 'circle'
- 'dot'
- 'square'
- 'triangle'
- 'triangle_down'
- 'cross'
- 'plus'
"""
PolyPoints.__init__(self, points, attr)
def draw(self, dc, printerScale, coord= None):
colour = self.attributes['colour']
width = self.attributes['width'] * printerScale
size = self.attributes['size'] * printerScale
fillcolour = self.attributes['fillcolour']
fillstyle = self.attributes['fillstyle']
marker = self.attributes['marker']
dc.SetPen(wx.Pen(wx.NamedColour(colour), width))
if fillcolour:
dc.SetBrush(wx.Brush(wx.NamedColour(fillcolour),fillstyle))
else:
dc.SetBrush(wx.Brush(wx.NamedColour(colour), fillstyle))
if coord == None:
self._drawmarkers(dc, self.scaled, marker, size)
else:
self._drawmarkers(dc, coord, marker, size) # draw legend marker
def getSymExtent(self, printerScale):
"""Width and Height of Marker"""
s= 5*self.attributes['size'] * printerScale
return (s,s)
def _drawmarkers(self, dc, coords, marker,size=1):
f = eval('self._' +marker)
f(dc, coords, size)
def _circle(self, dc, coords, size=1):
fact= 2.5*size
wh= 5.0*size
rect= _numpy.zeros((len(coords),4),_numpy.float)+[0.0,0.0,wh,wh]
rect[:,0:2]= coords-[fact,fact]
dc.DrawEllipseList(rect.astype(_numpy.int32))
def _dot(self, dc, coords, size=1):
dc.DrawPointList(coords)
def _square(self, dc, coords, size=1):
fact= 2.5*size
wh= 5.0*size
rect= _numpy.zeros((len(coords),4),_numpy.float)+[0.0,0.0,wh,wh]
rect[:,0:2]= coords-[fact,fact]
dc.DrawRectangleList(rect.astype(_numpy.int32))
def _triangle(self, dc, coords, size=1):
shape= [(-2.5*size,1.44*size), (2.5*size,1.44*size), (0.0,-2.88*size)]
poly= _numpy.repeat(coords,3)
poly.shape= (len(coords),3,2)
poly += shape
dc.DrawPolygonList(poly.astype(_numpy.int32))
def _triangle_down(self, dc, coords, size=1):
shape= [(-2.5*size,-1.44*size), (2.5*size,-1.44*size), (0.0,2.88*size)]
poly= _numpy.repeat(coords,3)
poly.shape= (len(coords),3,2)
poly += shape
dc.DrawPolygonList(poly.astype(_numpy.int32))
def _cross(self, dc, coords, size=1):
fact= 2.5*size
for f in [[-fact,-fact,fact,fact],[-fact,fact,fact,-fact]]:
lines= _numpy.concatenate((coords,coords),axis=1)+f
dc.DrawLineList(lines.astype(_numpy.int32))
def _plus(self, dc, coords, size=1):
fact= 2.5*size
for f in [[-fact,0,fact,0],[0,-fact,0,fact]]:
lines= _numpy.concatenate((coords,coords),axis=1)+f
dc.DrawLineList(lines.astype(_numpy.int32))
class PlotGraphics:
"""Container to hold PolyXXX objects and graph labels
- All methods except __init__ are private.
"""
def __init__(self, objects, title='', xLabel='', yLabel= ''):
"""Creates PlotGraphics object
objects - list of PolyXXX objects to make graph
title - title shown at top of graph
xLabel - label shown on x-axis
yLabel - label shown on y-axis
"""
if type(objects) not in [list,tuple]:
raise TypeError, "objects argument should be list or tuple"
self.objects = objects
self.title= title
self.xLabel= xLabel
self.yLabel= yLabel
def boundingBox(self):
p1, p2 = self.objects[0].boundingBox()
for o in self.objects[1:]:
p1o, p2o = o.boundingBox()
p1 = _numpy.minimum(p1, p1o)
p2 = _numpy.maximum(p2, p2o)
return p1, p2
def scaleAndShift(self, scale=(1,1), shift=(0,0)):
for o in self.objects:
o.scaleAndShift(scale, shift)
def setPrinterScale(self, scale):
"""Thickens up lines and markers only for printing"""
self.printerScale= scale
def setXLabel(self, xLabel= ''):
"""Set the X axis label on the graph"""
self.xLabel= xLabel
def setYLabel(self, yLabel= ''):
"""Set the Y axis label on the graph"""
self.yLabel= yLabel
def setTitle(self, title= ''):
"""Set the title at the top of graph"""
self.title= title
def getXLabel(self):
"""Get x axis label string"""
return self.xLabel
def getYLabel(self):
"""Get y axis label string"""
return self.yLabel
def getTitle(self, title= ''):
"""Get the title at the top of graph"""
return self.title
def draw(self, dc):
for o in self.objects:
#t=_time.clock() # profile info
o.draw(dc, self.printerScale)
#dt= _time.clock()-t
#print o, "time=", dt
def getSymExtent(self, printerScale):
"""Get max width and height of lines and markers symbols for legend"""
symExt = self.objects[0].getSymExtent(printerScale)
for o in self.objects[1:]:
oSymExt = o.getSymExtent(printerScale)
symExt = _numpy.maximum(symExt, oSymExt)
return symExt
def getLegendNames(self):
"""Returns list of legend names"""
lst = [None]*len(self)
for i in range(len(self)):
lst[i]= self.objects[i].getLegend()
return lst
def __len__(self):
return len(self.objects)
def __getitem__(self, item):
return self.objects[item]
#-------------------------------------------------------------------------------
# Main window that you will want to import into your application.
class PlotCanvas(wx.Window):
"""Subclass of a wx.Window to allow simple general plotting
of data with zoom, labels, and automatic axis scaling."""
def __init__(self, parent, id = -1, pos=wx.DefaultPosition,
size=wx.DefaultSize, style= wx.DEFAULT_FRAME_STYLE, name= ""):
self.use_persistence=False
self.alpha=0.3
self.decimation=10
self.decim_counter=0
"""Constucts a window, which can be a child of a frame, dialog or
any other non-control window"""
wx.Window.__init__(self, parent, id, pos, size, style, name)
self.border = (1,1)
self.SetBackgroundColour("white")
# Create some mouse events for zooming
self.Bind(wx.EVT_LEFT_DOWN, self.OnMouseLeftDown)
self.Bind(wx.EVT_LEFT_UP, self.OnMouseLeftUp)
self.Bind(wx.EVT_MOTION, self.OnMotion)
self.Bind(wx.EVT_LEFT_DCLICK, self.OnMouseDoubleClick)
self.Bind(wx.EVT_RIGHT_DOWN, self.OnMouseRightDown)
# set curser as cross-hairs
self.SetCursor(wx.CROSS_CURSOR)
# Things for printing
self.print_data = wx.PrintData()
self.print_data.SetPaperId(wx.PAPER_LETTER)
self.print_data.SetOrientation(wx.LANDSCAPE)
self.pageSetupData= wx.PageSetupDialogData()
self.pageSetupData.SetMarginBottomRight((25,25))
self.pageSetupData.SetMarginTopLeft((25,25))
self.pageSetupData.SetPrintData(self.print_data)
self.printerScale = 1
self.parent= parent
# Zooming variables
self._zoomInFactor = 0.5
self._zoomOutFactor = 2
self._zoomCorner1= _numpy.array([0.0, 0.0]) # left mouse down corner
self._zoomCorner2= _numpy.array([0.0, 0.0]) # left mouse up corner
self._zoomEnabled= False
self._hasDragged= False
# Drawing Variables
self.last_draw = None
self._pointScale= 1
self._pointShift= 0
self._xSpec= 'auto'
self._ySpec= 'auto'
self._gridEnabled= False
self._legendEnabled= False
self._xUseScopeTicks= False
# Fonts
self._fontCache = {}
self._fontSizeAxis= 10
self._fontSizeTitle= 15
self._fontSizeLegend= 7
# pointLabels
self._pointLabelEnabled= False
self.last_PointLabel= None
self._pointLabelFunc= None
self.Bind(wx.EVT_LEAVE_WINDOW, self.OnLeave)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnSize)
# OnSize called to make sure the buffer is initialized.
# This might result in OnSize getting called twice on some
# platforms at initialization, but little harm done.
self.OnSize(None) # sets the initial size based on client size
# UNCONDITIONAL, needed to create self._Buffer
def set_use_persistence(self, enable):
self.use_persistence = enable
def set_persist_alpha(self, persist_alpha):
self.alpha = persist_alpha
# SaveFile
def SaveFile(self, fileName= ''):
"""Saves the file to the type specified in the extension. If no file
name is specified a dialog box is provided. Returns True if sucessful,
otherwise False.
.bmp Save a Windows bitmap file.
.xbm Save an X bitmap file.
.xpm Save an XPM bitmap file.
.png Save a Portable Network Graphics file.
.jpg Save a Joint Photographic Experts Group file.
"""
if _string.lower(fileName[-3:]) not in ['bmp','xbm','xpm','png','jpg']:
dlg1 = wx.FileDialog(
self,
"Choose a file with extension bmp, gif, xbm, xpm, png, or jpg", ".", "",
"BMP files (*.bmp)|*.bmp|XBM files (*.xbm)|*.xbm|XPM file (*.xpm)|*.xpm|PNG files (*.png)|*.png|JPG files (*.jpg)|*.jpg",
wx.SAVE|wx.OVERWRITE_PROMPT
)
try:
while 1:
if dlg1.ShowModal() == wx.ID_OK:
fileName = dlg1.GetPath()
# Check for proper exension
if _string.lower(fileName[-3:]) not in ['bmp','xbm','xpm','png','jpg']:
dlg2 = wx.MessageDialog(self, 'File name extension\n'
'must be one of\n'
'bmp, xbm, xpm, png, or jpg',
'File Name Error', wx.OK | wx.ICON_ERROR)
try:
dlg2.ShowModal()
finally:
dlg2.Destroy()
else:
break # now save file
else: # exit without saving
return False
finally:
dlg1.Destroy()
# File name has required extension
fType = _string.lower(fileName[-3:])
if fType == "bmp":
tp= wx.BITMAP_TYPE_BMP # Save a Windows bitmap file.
elif fType == "xbm":
tp= wx.BITMAP_TYPE_XBM # Save an X bitmap file.
elif fType == "xpm":
tp= wx.BITMAP_TYPE_XPM # Save an XPM bitmap file.
elif fType == "jpg":
tp= wx.BITMAP_TYPE_JPEG # Save a JPG file.
else:
tp= wx.BITMAP_TYPE_PNG # Save a PNG file.
# Save Bitmap
res= self._Buffer.SaveFile(fileName, tp)
return res
def PageSetup(self):
"""Brings up the page setup dialog"""
data = self.pageSetupData
data.SetPrintData(self.print_data)
dlg = wx.PageSetupDialog(self.parent, data)
try:
if dlg.ShowModal() == wx.ID_OK:
data = dlg.GetPageSetupData() # returns wx.PageSetupDialogData
# updates page parameters from dialog
self.pageSetupData.SetMarginBottomRight(data.GetMarginBottomRight())
self.pageSetupData.SetMarginTopLeft(data.GetMarginTopLeft())
self.pageSetupData.SetPrintData(data.GetPrintData())
self.print_data=data.GetPrintData() # updates print_data
finally:
dlg.Destroy()
def Printout(self, paper=None):
"""Print current plot."""
if paper != None:
self.print_data.SetPaperId(paper)
pdd = wx.PrintDialogData()
pdd.SetPrintData(self.print_data)
printer = wx.Printer(pdd)
out = PlotPrintout(self)
print_ok = printer.Print(self.parent, out)
if print_ok:
self.print_data = printer.GetPrintDialogData().GetPrintData()
out.Destroy()
def PrintPreview(self):
"""Print-preview current plot."""
printout = PlotPrintout(self)
printout2 = PlotPrintout(self)
self.preview = wx.PrintPreview(printout, printout2, self.print_data)
if not self.preview.Ok():
wx.MessageDialog(self, "Print Preview failed.\n" \
"Check that default printer is configured\n", \
"Print error", wx.OK|wx.CENTRE).ShowModal()
self.preview.SetZoom(40)
# search up tree to find frame instance
frameInst= self
while not isinstance(frameInst, wx.Frame):
frameInst= frameInst.GetParent()
frame = wx.PreviewFrame(self.preview, frameInst, "Preview")
frame.Initialize()
frame.SetPosition(self.GetPosition())
frame.SetSize((600,550))
frame.Centre(wx.BOTH)
frame.Show(True)
def SetFontSizeAxis(self, point= 10):
"""Set the tick and axis label font size (default is 10 point)"""
self._fontSizeAxis= point
def GetFontSizeAxis(self):
"""Get current tick and axis label font size in points"""
return self._fontSizeAxis
def SetFontSizeTitle(self, point= 15):
"""Set Title font size (default is 15 point)"""
self._fontSizeTitle= point
def GetFontSizeTitle(self):
"""Get current Title font size in points"""
return self._fontSizeTitle
def SetFontSizeLegend(self, point= 7):
"""Set Legend font size (default is 7 point)"""
self._fontSizeLegend= point
def GetFontSizeLegend(self):
"""Get current Legend font size in points"""
return self._fontSizeLegend
def SetEnableZoom(self, value):
"""Set True to enable zooming."""
if value not in [True,False]:
raise TypeError, "Value should be True or False"
self._zoomEnabled= value
def GetEnableZoom(self):
"""True if zooming enabled."""
return self._zoomEnabled
def SetEnableGrid(self, value):
"""Set True to enable grid."""
if value not in [True,False]:
raise TypeError, "Value should be True or False"
self._gridEnabled= value
self.Redraw()
def GetEnableGrid(self):
"""True if grid enabled."""
return self._gridEnabled
def SetEnableLegend(self, value):
"""Set True to enable legend."""
if value not in [True,False]:
raise TypeError, "Value should be True or False"
self._legendEnabled= value
self.Redraw()
def GetEnableLegend(self):
"""True if Legend enabled."""
return self._legendEnabled
def SetEnablePointLabel(self, value):
"""Set True to enable pointLabel."""
if value not in [True,False]:
raise TypeError, "Value should be True or False"
self._pointLabelEnabled= value
self.Redraw() #will erase existing pointLabel if present
self.last_PointLabel = None
def GetEnablePointLabel(self):
"""True if pointLabel enabled."""
return self._pointLabelEnabled
def SetPointLabelFunc(self, func):
"""Sets the function with custom code for pointLabel drawing
******** more info needed ***************
"""
self._pointLabelFunc= func
def GetPointLabelFunc(self):
"""Returns pointLabel Drawing Function"""
return self._pointLabelFunc
def Reset(self):
"""Unzoom the plot."""
self.last_PointLabel = None #reset pointLabel
if self.last_draw is not None:
self.Draw(self.last_draw[0])
def ScrollRight(self, units):
"""Move view right number of axis units."""
self.last_PointLabel = None #reset pointLabel
if self.last_draw is not None:
graphics, xAxis, yAxis= self.last_draw
xAxis= (xAxis[0]+units, xAxis[1]+units)
self.Draw(graphics,xAxis,yAxis)
def ScrollUp(self, units):
"""Move view up number of axis units."""
self.last_PointLabel = None #reset pointLabel
if self.last_draw is not None:
graphics, xAxis, yAxis= self.last_draw
yAxis= (yAxis[0]+units, yAxis[1]+units)
self.Draw(graphics,xAxis,yAxis)
def GetXY(self,event):
"""Takes a mouse event and returns the XY user axis values."""
x,y= self.PositionScreenToUser(event.GetPosition())
return x,y
def PositionUserToScreen(self, pntXY):
"""Converts User position to Screen Coordinates"""
userPos= _numpy.array(pntXY)
x,y= userPos * self._pointScale + self._pointShift
return x,y
def PositionScreenToUser(self, pntXY):
"""Converts Screen position to User Coordinates"""
screenPos= _numpy.array(pntXY)
x,y= (screenPos-self._pointShift)/self._pointScale
return x,y
def SetXSpec(self, type= 'auto'):
"""xSpec- defines x axis type. Can be 'none', 'min' or 'auto'
where:
'none' - shows no axis or tick mark values
'min' - shows min bounding box values
'auto' - rounds axis range to sensible values
"""
self._xSpec= type
def SetYSpec(self, type= 'auto'):
"""ySpec- defines x axis type. Can be 'none', 'min' or 'auto'
where:
'none' - shows no axis or tick mark values
'min' - shows min bounding box values
'auto' - rounds axis range to sensible values
"""
self._ySpec= type
def GetXSpec(self):
"""Returns current XSpec for axis"""
return self._xSpec
def GetYSpec(self):
"""Returns current YSpec for axis"""
return self._ySpec
def GetXMaxRange(self):
"""Returns (minX, maxX) x-axis range for displayed graph"""
graphics= self.last_draw[0]
p1, p2 = graphics.boundingBox() # min, max points of graphics
xAxis = self._axisInterval(self._xSpec, p1[0], p2[0]) # in user units
return xAxis
def GetYMaxRange(self):
"""Returns (minY, maxY) y-axis range for displayed graph"""
graphics= self.last_draw[0]
p1, p2 = graphics.boundingBox() # min, max points of graphics
yAxis = self._axisInterval(self._ySpec, p1[1], p2[1])
return yAxis
def GetXCurrentRange(self):
"""Returns (minX, maxX) x-axis for currently displayed portion of graph"""
return self.last_draw[1]
def GetYCurrentRange(self):
"""Returns (minY, maxY) y-axis for currently displayed portion of graph"""
return self.last_draw[2]
def SetXUseScopeTicks(self, v=False):
"""Always 10 divisions, no labels"""
self._xUseScopeTicks = v
def GetXUseScopeTicks(self):
return self._xUseScopeTicks
def Draw(self, graphics, xAxis = None, yAxis = None, dc = None, step=None):
"""Draw objects in graphics with specified x and y axis.
graphics- instance of PlotGraphics with list of PolyXXX objects
xAxis - tuple with (min, max) axis range to view
yAxis - same as xAxis
dc - drawing context - doesn't have to be specified.
If it's not, the offscreen buffer is used
"""
# check Axis is either tuple or none
if type(xAxis) not in [type(None),tuple]:
raise TypeError, "xAxis should be None or (minX,maxX)"
if type(yAxis) not in [type(None),tuple]:
raise TypeError, "yAxis should be None or (minY,maxY)"
# check case for axis = (a,b) where a==b caused by improper zooms
if xAxis != None:
if xAxis[0] == xAxis[1]:
return
if yAxis != None:
if yAxis[0] == yAxis[1]:
return
if dc == None:
# sets new dc and clears it
if self.use_persistence:
dc = wx.MemoryDC()
dc.SelectObject(self._Buffer)
dc.Clear()
else:
dc = wx.BufferedDC(wx.ClientDC(self), self._Buffer)
dc.Clear()
dc.BeginDrawing()
# dc.Clear()
# set font size for every thing but title and legend
dc.SetFont(self._getFont(self._fontSizeAxis))
# sizes axis to axis type, create lower left and upper right corners of plot
if xAxis == None or yAxis == None:
# One or both axis not specified in Draw
p1, p2 = graphics.boundingBox() # min, max points of graphics
if xAxis == None:
xAxis = self._axisInterval(self._xSpec, p1[0], p2[0]) # in user units
if yAxis == None:
yAxis = self._axisInterval(self._ySpec, p1[1], p2[1])
# Adjust bounding box for axis spec
p1[0],p1[1] = xAxis[0], yAxis[0] # lower left corner user scale (xmin,ymin)
p2[0],p2[1] = xAxis[1], yAxis[1] # upper right corner user scale (xmax,ymax)
else:
# Both axis specified in Draw
p1= _numpy.array([xAxis[0], yAxis[0]]) # lower left corner user scale (xmin,ymin)
p2= _numpy.array([xAxis[1], yAxis[1]]) # upper right corner user scale (xmax,ymax)
self.last_draw = (graphics, xAxis, yAxis) # saves most recient values
if False:
ptx,pty,rectWidth,rectHeight= self._point2ClientCoord(p1, p2)
#dc.SetPen(wx.Pen(wx.BLACK))
dc.SetBrush(wx.Brush( wx.BLACK, wx.SOLID ) ) #wx.SOLID wx.TRANSPARENT ) )
#dc.SetLogicalFunction(wx.INVERT) #wx.XOR wx.INVERT
dc.DrawRectangle( ptx,pty, rectWidth,rectHeight)
#dc.SetBrush(wx.Brush( wx.WHITE, wx.SOLID ) )
#dc.SetLogicalFunction(wx.COPY)
# Get ticks and textExtents for axis if required
if self._xSpec is not 'none':
if self._xUseScopeTicks:
xticks = self._scope_ticks(xAxis[0], xAxis[1])
else:
xticks = self._ticks(xAxis[0], xAxis[1])
xTextExtent = dc.GetTextExtent(xticks[-1][1])# w h of x axis text last number on axis
else:
xticks = None
xTextExtent= (0,0) # No text for ticks
if self._ySpec is not 'none':
yticks = self._ticks(yAxis[0], yAxis[1], step)
yTextExtentBottom= dc.GetTextExtent(yticks[0][1])
yTextExtentTop = dc.GetTextExtent(yticks[-1][1])
yTextExtent= (max(yTextExtentBottom[0],yTextExtentTop[0]),
max(yTextExtentBottom[1],yTextExtentTop[1]))
else:
yticks = None
yTextExtent= (0,0) # No text for ticks
# TextExtents for Title and Axis Labels
titleWH, xLabelWH, yLabelWH= self._titleLablesWH(dc, graphics)
# TextExtents for Legend
legendBoxWH, legendSymExt, legendTextExt = self._legendWH(dc, graphics)
# room around graph area
rhsW= max(xTextExtent[0], legendBoxWH[0]) # use larger of number width or legend width
lhsW= yTextExtent[0]+ yLabelWH[1]
bottomH= max(xTextExtent[1], yTextExtent[1]/2.)+ xLabelWH[1]
topH= yTextExtent[1]/2. + titleWH[1]
textSize_scale= _numpy.array([rhsW+lhsW,bottomH+topH]) # make plot area smaller by text size
textSize_shift= _numpy.array([lhsW, bottomH]) # shift plot area by this amount
# drawing title and labels text
dc.SetFont(self._getFont(self._fontSizeTitle))
titlePos= (self.plotbox_origin[0]+ lhsW + (self.plotbox_size[0]-lhsW-rhsW)/2.- titleWH[0]/2.,
self.plotbox_origin[1]- self.plotbox_size[1])
dc.DrawText(graphics.getTitle(),titlePos[0],titlePos[1])
dc.SetFont(self._getFont(self._fontSizeAxis))
xLabelPos= (self.plotbox_origin[0]+ lhsW + (self.plotbox_size[0]-lhsW-rhsW)/2.- xLabelWH[0]/2.,
self.plotbox_origin[1]- xLabelWH[1])
dc.DrawText(graphics.getXLabel(),xLabelPos[0],xLabelPos[1])
yLabelPos= (self.plotbox_origin[0],
self.plotbox_origin[1]- bottomH- (self.plotbox_size[1]-bottomH-topH)/2.+ yLabelWH[0]/2.)
if graphics.getYLabel(): # bug fix for Linux
dc.DrawRotatedText(graphics.getYLabel(),yLabelPos[0],yLabelPos[1],90)
# drawing legend makers and text
if self._legendEnabled:
self._drawLegend(dc,graphics,rhsW,topH,legendBoxWH, legendSymExt, legendTextExt)
# allow for scaling and shifting plotted points
scale = (self.plotbox_size-textSize_scale) / (p2-p1)* _numpy.array((1,-1))
shift = -p1*scale + self.plotbox_origin + textSize_shift * _numpy.array((1,-1))
self._pointScale= scale # make available for mouse events
self._pointShift= shift
#dc.SetLogicalFunction(wx.INVERT) #wx.XOR wx.INVERT
self._drawAxes(dc, p1, p2, scale, shift, xticks, yticks)
#dc.SetLogicalFunction(wx.COPY)
graphics.scaleAndShift(scale, shift)
graphics.setPrinterScale(self.printerScale) # thicken up lines and markers if printing
# set clipping area so drawing does not occur outside axis box
ptx,pty,rectWidth,rectHeight= self._point2ClientCoord(p1, p2)
dc.SetClippingRegion(ptx,pty,rectWidth,rectHeight)
# Draw the lines and markers
#start = _time.clock()
graphics.draw(dc)
# print "entire graphics drawing took: %f second"%(_time.clock() - start)
# remove the clipping region
dc.DestroyClippingRegion()
dc.EndDrawing()
if self.use_persistence:
dc=None
self._Buffer.CopyToBuffer(self._Bufferarray) #, format=wx.BitmapBufferFormat_RGB, stride=-1)
## do the IIR filter
alpha_int=int(float(self.alpha*256))
if True:
_numpy.add(self._Bufferarray,0,self._Buffer3array)
_numpy.multiply(self._Buffer3array,alpha_int,self._Buffer3array)
_numpy.multiply(self._Buffer2array,(256-alpha_int),self._Buffer2array)
_numpy.add(self._Buffer3array,self._Buffer2array,self._Buffer2array)
_numpy.right_shift(self._Buffer2array,8,self._Buffer2array)
elif False:
self._Buffer2array=(self._Bufferarray.astype(_numpy.uint32) *alpha_int + self._Buffer2array*(256-alpha_int)).__rshift__(8)
elif False:
self._Buffer2array *=(256-alpha_int)
self._Buffer2array +=self._Bufferarray.astype(_numpy.uint32)*alpha_int
self._Buffer2array /=256
##copy back to image buffer
self._Buffer2.CopyFromBuffer(self._Buffer2array.astype(_numpy.uint8)) #, format=wx.BitmapBufferFormat_RGB, stride=-1)
#draw to the screen
#self.decim_counter=self.decim_counter+1
if True: #self.decim_counter>self.decimation:
#self.decim_counter=0
dc2 = wx.ClientDC( self )
dc2.BeginDrawing()
dc2.DrawBitmap(self._Buffer2, 0, 0, False)
#dc2.DrawBitmap(self._Buffer, 0, 0, False)
dc2.EndDrawing()
def Redraw(self, dc= None):
"""Redraw the existing plot."""
if self.last_draw is not None:
graphics, xAxis, yAxis= self.last_draw
self.Draw(graphics,xAxis,yAxis,dc)
def Clear(self):
"""Erase the window."""
self.last_PointLabel = None #reset pointLabel
dc = wx.BufferedDC(wx.ClientDC(self), self._Buffer)
dc.Clear()
self.last_draw = None
def Zoom(self, Center, Ratio):
""" Zoom on the plot
Centers on the X,Y coords given in Center
Zooms by the Ratio = (Xratio, Yratio) given
"""
self.last_PointLabel = None #reset maker
x,y = Center
if self.last_draw != None:
(graphics, xAxis, yAxis) = self.last_draw
w = (xAxis[1] - xAxis[0]) * Ratio[0]
h = (yAxis[1] - yAxis[0]) * Ratio[1]
xAxis = ( x - w/2, x + w/2 )
yAxis = ( y - h/2, y + h/2 )
self.Draw(graphics, xAxis, yAxis)
def GetClosestPoints(self, pntXY, pointScaled= True):
"""Returns list with
[curveNumber, legend, index of closest point, pointXY, scaledXY, distance]
list for each curve.
Returns [] if no curves are being plotted.
x, y in user coords
if pointScaled == True based on screen coords
if pointScaled == False based on user coords
"""
if self.last_draw == None:
#no graph available
return []
graphics, xAxis, yAxis= self.last_draw
l = []
for curveNum,obj in enumerate(graphics):
#check there are points in the curve
if len(obj.points) == 0:
continue #go to next obj
#[curveNumber, legend, index of closest point, pointXY, scaledXY, distance]
cn = [curveNum]+ [obj.getLegend()]+ obj.getClosestPoint( pntXY, pointScaled)
l.append(cn)
return l
def GetClosetPoint(self, pntXY, pointScaled= True):
"""Returns list with
[curveNumber, legend, index of closest point, pointXY, scaledXY, distance]
list for only the closest curve.
Returns [] if no curves are being plotted.
x, y in user coords
if pointScaled == True based on screen coords
if pointScaled == False based on user coords
"""
#closest points on screen based on screen scaling (pointScaled= True)
#list [curveNumber, index, pointXY, scaledXY, distance] for each curve
closestPts= self.GetClosestPoints(pntXY, pointScaled)
if closestPts == []:
return [] #no graph present
#find one with least distance
dists = [c[-1] for c in closestPts]
mdist = min(dists) #Min dist
i = dists.index(mdist) #index for min dist
return closestPts[i] #this is the closest point on closest curve
def UpdatePointLabel(self, mDataDict):
"""Updates the pointLabel point on screen with data contained in
mDataDict.
mDataDict will be passed to your function set by
SetPointLabelFunc. It can contain anything you
want to display on the screen at the scaledXY point
you specify.
This function can be called from parent window with onClick,
onMotion events etc.
"""
if self.last_PointLabel != None:
#compare pointXY
if mDataDict["pointXY"] != self.last_PointLabel["pointXY"]:
#closest changed
self._drawPointLabel(self.last_PointLabel) #erase old
self._drawPointLabel(mDataDict) #plot new
else:
#just plot new with no erase
self._drawPointLabel(mDataDict) #plot new
#save for next erase
self.last_PointLabel = mDataDict
# event handlers **********************************
def OnMotion(self, event):
if self._zoomEnabled and event.LeftIsDown():
if self._hasDragged:
self._drawRubberBand(self._zoomCorner1, self._zoomCorner2) # remove old
else:
self._hasDragged= True
self._zoomCorner2[0], self._zoomCorner2[1] = self.GetXY(event)
self._drawRubberBand(self._zoomCorner1, self._zoomCorner2) # add new
def OnMouseLeftDown(self,event):
self._zoomCorner1[0], self._zoomCorner1[1]= self.GetXY(event)
def OnMouseLeftUp(self, event):
if self._zoomEnabled:
if self._hasDragged == True:
self._drawRubberBand(self._zoomCorner1, self._zoomCorner2) # remove old
self._zoomCorner2[0], self._zoomCorner2[1]= self.GetXY(event)
self._hasDragged = False # reset flag
minX, minY= _numpy.minimum( self._zoomCorner1, self._zoomCorner2)
maxX, maxY= _numpy.maximum( self._zoomCorner1, self._zoomCorner2)
self.last_PointLabel = None #reset pointLabel
if self.last_draw != None:
self.Draw(self.last_draw[0], xAxis = (minX,maxX), yAxis = (minY,maxY), dc = None)
#else: # A box has not been drawn, zoom in on a point
## this interfered with the double click, so I've disables it.
# X,Y = self.GetXY(event)
# self.Zoom( (X,Y), (self._zoomInFactor,self._zoomInFactor) )
def OnMouseDoubleClick(self,event):
if self._zoomEnabled:
self.Reset()
def OnMouseRightDown(self,event):
if self._zoomEnabled:
X,Y = self.GetXY(event)
self.Zoom( (X,Y), (self._zoomOutFactor, self._zoomOutFactor) )
def OnPaint(self, event):
# All that is needed here is to draw the buffer to screen
if self.last_PointLabel != None:
self._drawPointLabel(self.last_PointLabel) #erase old
self.last_PointLabel = None
#paint current buffer to screen
dc = wx.BufferedPaintDC(self, self._Buffer)
def OnSize(self,event):
# The Buffer init is done here, to make sure the buffer is always
# the same size as the Window
Size = self.GetClientSize()
# Make new offscreen bitmap: this bitmap will always have the
# current drawing in it, so it can be used to save the image to
# a file, or whatever.
self._Buffer = wx.EmptyBitmap(Size[0],Size[1],24)
if True: #self.use_persistence:
#self._Bufferarray = _numpy.zeros((Size[0], Size[1],3), dtype=_numpy.uint8)
self._Bufferarray = _numpy.zeros((Size[0]* Size[1]*3), dtype=_numpy.uint8)
# Make new second offscreen bitmap: this bitmap will always have the
# last drawing in it, so it can be used to do display time dependent processing
# like averaging (IIR) or show differences between updates
self._Buffer2 = wx.EmptyBitmap(Size[0],Size[1],24)
# now the extra buffers for the IIR processing
# note the different datatype uint32
self._Buffer2array = _numpy.zeros((Size[0]* Size[1]*3), dtype=_numpy.uint32) #dtype=_numpy.float
self._Buffer3array = _numpy.zeros((Size[0]* Size[1]*3), dtype=_numpy.uint32) #dtype=_numpy.float
# optional you can set the ufunct buffer size to improve speed
#_numpy.setbufsize(16*((Size[0]* Size[1]*3)/16 +1))
self._setSize()
self.last_PointLabel = None #reset pointLabel
if self.last_draw is None:
self.Clear()
else:
graphics, xSpec, ySpec = self.last_draw
self.Draw(graphics,xSpec,ySpec)
def OnLeave(self, event):
"""Used to erase pointLabel when mouse outside window"""
if self.last_PointLabel != None:
self._drawPointLabel(self.last_PointLabel) #erase old
self.last_PointLabel = None
# Private Methods **************************************************
def _setSize(self, width=None, height=None):
"""DC width and height."""
if width == None:
(self.width,self.height) = self.GetClientSize()
else:
self.width, self.height= width,height
self.plotbox_size = 0.97*_numpy.array([self.width, self.height])
xo = 0.5*(self.width-self.plotbox_size[0])
yo = self.height-0.5*(self.height-self.plotbox_size[1])
self.plotbox_origin = _numpy.array([xo, yo])
def _setPrinterScale(self, scale):
"""Used to thicken lines and increase marker size for print out."""
# line thickness on printer is very thin at 600 dot/in. Markers small
self.printerScale= scale
def _printDraw(self, printDC):
"""Used for printing."""
if self.last_draw != None:
graphics, xSpec, ySpec= self.last_draw
self.Draw(graphics,xSpec,ySpec,printDC)
def _drawPointLabel(self, mDataDict):
"""Draws and erases pointLabels"""
width = self._Buffer.GetWidth()
height = self._Buffer.GetHeight()
tmp_Buffer = wx.EmptyBitmap(width,height)
dcs = wx.MemoryDC()
dcs.SelectObject(tmp_Buffer)
dcs.Clear()
dcs.BeginDrawing()
self._pointLabelFunc(dcs,mDataDict) #custom user pointLabel function
dcs.EndDrawing()
dc = wx.ClientDC( self )
#this will erase if called twice
dc.Blit(0, 0, width, height, dcs, 0, 0, wx.EQUIV) #(NOT src) XOR dst
def _drawLegend(self,dc,graphics,rhsW,topH,legendBoxWH, legendSymExt, legendTextExt):
"""Draws legend symbols and text"""
# top right hand corner of graph box is ref corner
trhc= self.plotbox_origin+ (self.plotbox_size-[rhsW,topH])*[1,-1]
legendLHS= .091* legendBoxWH[0] # border space between legend sym and graph box
lineHeight= max(legendSymExt[1], legendTextExt[1]) * 1.1 #1.1 used as space between lines
dc.SetFont(self._getFont(self._fontSizeLegend))
for i in range(len(graphics)):
o = graphics[i]
s= i*lineHeight
if isinstance(o,PolyMarker):
# draw marker with legend
pnt= (trhc[0]+legendLHS+legendSymExt[0]/2., trhc[1]+s+lineHeight/2.)
o.draw(dc, self.printerScale, coord= _numpy.array([pnt]))
elif isinstance(o,PolyLine):
# draw line with legend
pnt1= (trhc[0]+legendLHS, trhc[1]+s+lineHeight/2.)
pnt2= (trhc[0]+legendLHS+legendSymExt[0], trhc[1]+s+lineHeight/2.)
o.draw(dc, self.printerScale, coord= _numpy.array([pnt1,pnt2]))
else:
raise TypeError, "object is neither PolyMarker or PolyLine instance"
# draw legend txt
pnt= (trhc[0]+legendLHS+legendSymExt[0], trhc[1]+s+lineHeight/2.-legendTextExt[1]/2)
dc.DrawText(o.getLegend(),pnt[0],pnt[1])
dc.SetFont(self._getFont(self._fontSizeAxis)) # reset
def _titleLablesWH(self, dc, graphics):
"""Draws Title and labels and returns width and height for each"""
# TextExtents for Title and Axis Labels
dc.SetFont(self._getFont(self._fontSizeTitle))
title= graphics.getTitle()
titleWH= dc.GetTextExtent(title)
dc.SetFont(self._getFont(self._fontSizeAxis))
xLabel, yLabel= graphics.getXLabel(),graphics.getYLabel()
xLabelWH= dc.GetTextExtent(xLabel)
yLabelWH= dc.GetTextExtent(yLabel)
return titleWH, xLabelWH, yLabelWH
def _legendWH(self, dc, graphics):
"""Returns the size in screen units for legend box"""
if self._legendEnabled != True:
legendBoxWH= symExt= txtExt= (0,0)
else:
# find max symbol size
symExt= graphics.getSymExtent(self.printerScale)
# find max legend text extent
dc.SetFont(self._getFont(self._fontSizeLegend))
txtList= graphics.getLegendNames()
txtExt= dc.GetTextExtent(txtList[0])
for txt in graphics.getLegendNames()[1:]:
txtExt= _numpy.maximum(txtExt,dc.GetTextExtent(txt))
maxW= symExt[0]+txtExt[0]
maxH= max(symExt[1],txtExt[1])
# padding .1 for lhs of legend box and space between lines
maxW= maxW* 1.1
maxH= maxH* 1.1 * len(txtList)
dc.SetFont(self._getFont(self._fontSizeAxis))
legendBoxWH= (maxW,maxH)
return (legendBoxWH, symExt, txtExt)
def _drawRubberBand(self, corner1, corner2):
"""Draws/erases rect box from corner1 to corner2"""
ptx,pty,rectWidth,rectHeight= self._point2ClientCoord(corner1, corner2)
# draw rectangle
dc = wx.ClientDC( self )
dc.BeginDrawing()
dc.SetPen(wx.Pen(wx.BLACK))
dc.SetBrush(wx.Brush( wx.WHITE, wx.TRANSPARENT ) )
dc.SetLogicalFunction(wx.INVERT)
dc.DrawRectangle( ptx,pty, rectWidth,rectHeight)
dc.SetLogicalFunction(wx.COPY)
dc.EndDrawing()
def _getFont(self,size):
"""Take font size, adjusts if printing and returns wx.Font"""
s = size*self.printerScale
of = self.GetFont()
# Linux speed up to get font from cache rather than X font server
key = (int(s), of.GetFamily (), of.GetStyle (), of.GetWeight ())
font = self._fontCache.get (key, None)
if font:
return font # yeah! cache hit
else:
font = wx.Font(int(s), of.GetFamily(), of.GetStyle(), of.GetWeight())
self._fontCache[key] = font
return font
def _point2ClientCoord(self, corner1, corner2):
"""Converts user point coords to client screen int coords x,y,width,height"""
c1= _numpy.array(corner1)
c2= _numpy.array(corner2)
# convert to screen coords
pt1= c1*self._pointScale+self._pointShift
pt2= c2*self._pointScale+self._pointShift
# make height and width positive
pul= _numpy.minimum(pt1,pt2) # Upper left corner
plr= _numpy.maximum(pt1,pt2) # Lower right corner
rectWidth, rectHeight= plr-pul
ptx,pty= pul
return ptx, pty, rectWidth, rectHeight
def _axisInterval(self, spec, lower, upper):
"""Returns sensible axis range for given spec"""
if spec == 'none' or spec == 'min':
if lower == upper:
return lower-0.5, upper+0.5
else:
return lower, upper
elif spec == 'auto':
range = upper-lower
# if range == 0.:
if abs(range) < 1e-36:
return lower-0.5, upper+0.5
log = _numpy.log10(range)
power = _numpy.floor(log)
fraction = log-power
if fraction <= 0.05:
power = power-1
grid = 10.**power
lower = lower - lower % grid
mod = upper % grid
if mod != 0:
upper = upper - mod + grid
return lower, upper
elif type(spec) == type(()):
lower, upper = spec
if lower <= upper:
return lower, upper
else:
return upper, lower
else:
raise ValueError, str(spec) + ': illegal axis specification'
def _drawAxes(self, dc, p1, p2, scale, shift, xticks, yticks):
penWidth= self.printerScale # increases thickness for printing only
dc.SetPen(wx.Pen(wx.NamedColour('BLACK'), penWidth))
# set length of tick marks--long ones make grid
if self._gridEnabled:
x,y,width,height= self._point2ClientCoord(p1,p2)
yTickLength= width/2.0 +1
xTickLength= height/2.0 +1
else:
yTickLength= 3 * self.printerScale # lengthens lines for printing
xTickLength= 3 * self.printerScale
if self._xSpec is not 'none':
lower, upper = p1[0],p2[0]
text = 1
for y, d in [(p1[1], -xTickLength), (p2[1], xTickLength)]: # miny, maxy and tick lengths
a1 = scale*_numpy.array([lower, y])+shift
a2 = scale*_numpy.array([upper, y])+shift
dc.DrawLine(a1[0],a1[1],a2[0],a2[1]) # draws upper and lower axis line
for x, label in xticks:
pt = scale*_numpy.array([x, y])+shift
dc.DrawLine(pt[0],pt[1],pt[0],pt[1] + d) # draws tick mark d units
if text:
dc.DrawText(label,pt[0],pt[1])
text = 0 # axis values not drawn on top side
if self._ySpec is not 'none':
lower, upper = p1[1],p2[1]
text = 1
h = dc.GetCharHeight()
for x, d in [(p1[0], -yTickLength), (p2[0], yTickLength)]:
a1 = scale*_numpy.array([x, lower])+shift
a2 = scale*_numpy.array([x, upper])+shift
dc.DrawLine(a1[0],a1[1],a2[0],a2[1])
for y, label in yticks:
pt = scale*_numpy.array([x, y])+shift
dc.DrawLine(pt[0],pt[1],pt[0]-d,pt[1])
if text:
dc.DrawText(label,pt[0]-dc.GetTextExtent(label)[0],
pt[1]-0.5*h)
text = 0 # axis values not drawn on right side
def _ticks(self, lower, upper, step=None):
ideal = (upper-lower)/7.
log = _numpy.log10(ideal)
power = _numpy.floor(log)
fraction = log-power
factor = 1.
error = fraction
for f, lf in self._multiples:
e = _numpy.fabs(fraction-lf)
if e < error:
error = e
factor = f
grid = factor * 10.**power
if power > 4 or power < -4:
format = '%+7.1e'
elif power >= 0:
digits = max(1, int(power))
format = '%' + `digits`+'.0f'
else:
digits = -int(power)
format = '%'+`digits+2`+'.'+`digits`+'f'
#force grid when step is not None
if step is not None: grid = step
ticks = []
t = -grid*_numpy.floor(-lower/grid)
while t <= upper:
if t == -0: t = 0 #remove neg zero condition
ticks.append( (t, format % (t,)) )
t = t + grid
return ticks
def _scope_ticks (self, lower, upper):
'''Always 10 divisions, no labels'''
grid = (upper - lower) / 10.0
ticks = []
t = lower
while t <= upper:
ticks.append( (t, ""))
t = t + grid
return ticks
_multiples = [(2., _numpy.log10(2.)), (5., _numpy.log10(5.))]
#-------------------------------------------------------------------------------
# Used to layout the printer page
class PlotPrintout(wx.Printout):
"""Controls how the plot is made in printing and previewing"""
# Do not change method names in this class,
# we have to override wx.Printout methods here!
def __init__(self, graph):
"""graph is instance of plotCanvas to be printed or previewed"""
wx.Printout.__init__(self)
self.graph = graph
def HasPage(self, page):
if page == 1:
return True
else:
return False
def GetPageInfo(self):
return (1, 1, 1, 1) # disable page numbers
def OnPrintPage(self, page):
dc = self.GetDC() # allows using floats for certain functions
## print "PPI Printer",self.GetPPIPrinter()
## print "PPI Screen", self.GetPPIScreen()
## print "DC GetSize", dc.GetSize()
## print "GetPageSizePixels", self.GetPageSizePixels()
# Note PPIScreen does not give the correct number
# Calulate everything for printer and then scale for preview
PPIPrinter= self.GetPPIPrinter() # printer dots/inch (w,h)
#PPIScreen= self.GetPPIScreen() # screen dots/inch (w,h)
dcSize= dc.GetSize() # DC size
pageSize= self.GetPageSizePixels() # page size in terms of pixcels
clientDcSize= self.graph.GetClientSize()
# find what the margins are (mm)
margLeftSize,margTopSize= self.graph.pageSetupData.GetMarginTopLeft()
margRightSize, margBottomSize= self.graph.pageSetupData.GetMarginBottomRight()
# calculate offset and scale for dc
pixLeft= margLeftSize*PPIPrinter[0]/25.4 # mm*(dots/in)/(mm/in)
pixRight= margRightSize*PPIPrinter[0]/25.4
pixTop= margTopSize*PPIPrinter[1]/25.4
pixBottom= margBottomSize*PPIPrinter[1]/25.4
plotAreaW= pageSize[0]-(pixLeft+pixRight)
plotAreaH= pageSize[1]-(pixTop+pixBottom)
# ratio offset and scale to screen size if preview
if self.IsPreview():
ratioW= float(dcSize[0])/pageSize[0]
ratioH= float(dcSize[1])/pageSize[1]
pixLeft *= ratioW
pixTop *= ratioH
plotAreaW *= ratioW
plotAreaH *= ratioH
# rescale plot to page or preview plot area
self.graph._setSize(plotAreaW,plotAreaH)
# Set offset and scale
dc.SetDeviceOrigin(pixLeft,pixTop)
# Thicken up pens and increase marker size for printing
ratioW= float(plotAreaW)/clientDcSize[0]
ratioH= float(plotAreaH)/clientDcSize[1]
aveScale= (ratioW+ratioH)/2
self.graph._setPrinterScale(aveScale) # tickens up pens for printing
self.graph._printDraw(dc)
# rescale back to original
self.graph._setSize()
self.graph._setPrinterScale(1)
self.graph.Redraw() #to get point label scale and shift correct
return True
#---------------------------------------------------------------------------
# if running standalone...
#
# ...a sample implementation using the above
#
def _draw1Objects():
# 100 points sin function, plotted as green circles
data1 = 2.*_numpy.pi*_numpy.arange(200)/200.
data1.shape = (100, 2)
data1[:,1] = _numpy.sin(data1[:,0])
markers1 = PolyMarker(data1, legend='Green Markers', colour='green', marker='circle',size=1)
# 50 points cos function, plotted as red line
data1 = 2.*_numpy.pi*_numpy.arange(100)/100.
data1.shape = (50,2)
data1[:,1] = _numpy.cos(data1[:,0])
lines = PolyLine(data1, legend= 'Red Line', colour='red')
# A few more points...
pi = _numpy.pi
markers2 = PolyMarker([(0., 0.), (pi/4., 1.), (pi/2, 0.),
(3.*pi/4., -1)], legend='Cross Legend', colour='blue',
marker='cross')
return PlotGraphics([markers1, lines, markers2],"Graph Title", "X Axis", "Y Axis")
def _draw2Objects():
# 100 points sin function, plotted as green dots
data1 = 2.*_numpy.pi*_numpy.arange(200)/200.
data1.shape = (100, 2)
data1[:,1] = _numpy.sin(data1[:,0])
line1 = PolyLine(data1, legend='Green Line', colour='green', width=6, style=wx.DOT)
# 50 points cos function, plotted as red dot-dash
data1 = 2.*_numpy.pi*_numpy.arange(100)/100.
data1.shape = (50,2)
data1[:,1] = _numpy.cos(data1[:,0])
line2 = PolyLine(data1, legend='Red Line', colour='red', width=3, style= wx.DOT_DASH)
# A few more points...
pi = _numpy.pi
markers1 = PolyMarker([(0., 0.), (pi/4., 1.), (pi/2, 0.),
(3.*pi/4., -1)], legend='Cross Hatch Square', colour='blue', width= 3, size= 6,
fillcolour= 'red', fillstyle= wx.CROSSDIAG_HATCH,
marker='square')
return PlotGraphics([markers1, line1, line2], "Big Markers with Different Line Styles")
def _draw3Objects():
markerList= ['circle', 'dot', 'square', 'triangle', 'triangle_down',
'cross', 'plus', 'circle']
m=[]
for i in range(len(markerList)):
m.append(PolyMarker([(2*i+.5,i+.5)], legend=markerList[i], colour='blue',
marker=markerList[i]))
return PlotGraphics(m, "Selection of Markers", "Minimal Axis", "No Axis")
def _draw4Objects():
# 25,000 point line
data1 = _numpy.arange(5e5,1e6,10)
data1.shape = (25000, 2)
line1 = PolyLine(data1, legend='Wide Line', colour='green', width=5)
# A few more points...
markers2 = PolyMarker(data1, legend='Square', colour='blue',
marker='square')
return PlotGraphics([line1, markers2], "25,000 Points", "Value X", "")
def _draw5Objects():
# Empty graph with axis defined but no points/lines
points=[]
line1 = PolyLine(points, legend='Wide Line', colour='green', width=5)
return PlotGraphics([line1], "Empty Plot With Just Axes", "Value X", "Value Y")
def _draw6Objects():
# Bar graph
points1=[(1,0), (1,10)]
line1 = PolyLine(points1, colour='green', legend='Feb.', width=10)
points1g=[(2,0), (2,4)]
line1g = PolyLine(points1g, colour='red', legend='Mar.', width=10)
points1b=[(3,0), (3,6)]
line1b = PolyLine(points1b, colour='blue', legend='Apr.', width=10)
points2=[(4,0), (4,12)]
line2 = PolyLine(points2, colour='Yellow', legend='May', width=10)
points2g=[(5,0), (5,8)]
line2g = PolyLine(points2g, colour='orange', legend='June', width=10)
points2b=[(6,0), (6,4)]
line2b = PolyLine(points2b, colour='brown', legend='July', width=10)
return PlotGraphics([line1, line1g, line1b, line2, line2g, line2b],
"Bar Graph - (Turn on Grid, Legend)", "Months", "Number of Students")
class TestFrame(wx.Frame):
def __init__(self, parent, id, title):
wx.Frame.__init__(self, parent, id, title,
wx.DefaultPosition, (600, 400))
# Now Create the menu bar and items
self.mainmenu = wx.MenuBar()
menu = wx.Menu()
menu.Append(200, 'Page Setup...', 'Setup the printer page')
self.Bind(wx.EVT_MENU, self.OnFilePageSetup, id=200)
menu.Append(201, 'Print Preview...', 'Show the current plot on page')
self.Bind(wx.EVT_MENU, self.OnFilePrintPreview, id=201)
menu.Append(202, 'Print...', 'Print the current plot')
self.Bind(wx.EVT_MENU, self.OnFilePrint, id=202)
menu.Append(203, 'Save Plot...', 'Save current plot')
self.Bind(wx.EVT_MENU, self.OnSaveFile, id=203)
menu.Append(205, 'E&xit', 'Enough of this already!')
self.Bind(wx.EVT_MENU, self.OnFileExit, id=205)
self.mainmenu.Append(menu, '&File')
menu = wx.Menu()
menu.Append(206, 'Draw1', 'Draw plots1')
self.Bind(wx.EVT_MENU,self.OnPlotDraw1, id=206)
menu.Append(207, 'Draw2', 'Draw plots2')
self.Bind(wx.EVT_MENU,self.OnPlotDraw2, id=207)
menu.Append(208, 'Draw3', 'Draw plots3')
self.Bind(wx.EVT_MENU,self.OnPlotDraw3, id=208)
menu.Append(209, 'Draw4', 'Draw plots4')
self.Bind(wx.EVT_MENU,self.OnPlotDraw4, id=209)
menu.Append(210, 'Draw5', 'Draw plots5')
self.Bind(wx.EVT_MENU,self.OnPlotDraw5, id=210)
menu.Append(260, 'Draw6', 'Draw plots6')
self.Bind(wx.EVT_MENU,self.OnPlotDraw6, id=260)
menu.Append(211, '&Redraw', 'Redraw plots')
self.Bind(wx.EVT_MENU,self.OnPlotRedraw, id=211)
menu.Append(212, '&Clear', 'Clear canvas')
self.Bind(wx.EVT_MENU,self.OnPlotClear, id=212)
menu.Append(213, '&Scale', 'Scale canvas')
self.Bind(wx.EVT_MENU,self.OnPlotScale, id=213)
menu.Append(214, 'Enable &Zoom', 'Enable Mouse Zoom', kind=wx.ITEM_CHECK)
self.Bind(wx.EVT_MENU,self.OnEnableZoom, id=214)
menu.Append(215, 'Enable &Grid', 'Turn on Grid', kind=wx.ITEM_CHECK)
self.Bind(wx.EVT_MENU,self.OnEnableGrid, id=215)
menu.Append(220, 'Enable &Legend', 'Turn on Legend', kind=wx.ITEM_CHECK)
self.Bind(wx.EVT_MENU,self.OnEnableLegend, id=220)
menu.Append(222, 'Enable &Point Label', 'Show Closest Point', kind=wx.ITEM_CHECK)
self.Bind(wx.EVT_MENU,self.OnEnablePointLabel, id=222)
menu.Append(225, 'Scroll Up 1', 'Move View Up 1 Unit')
self.Bind(wx.EVT_MENU,self.OnScrUp, id=225)
menu.Append(230, 'Scroll Rt 2', 'Move View Right 2 Units')
self.Bind(wx.EVT_MENU,self.OnScrRt, id=230)
menu.Append(235, '&Plot Reset', 'Reset to original plot')
self.Bind(wx.EVT_MENU,self.OnReset, id=235)
self.mainmenu.Append(menu, '&Plot')
menu = wx.Menu()
menu.Append(300, '&About', 'About this thing...')
self.Bind(wx.EVT_MENU, self.OnHelpAbout, id=300)
self.mainmenu.Append(menu, '&Help')
self.SetMenuBar(self.mainmenu)
# A status bar to tell people what's happening
self.CreateStatusBar(1)
self.client = PlotCanvas(self)
#define the function for drawing pointLabels
self.client.SetPointLabelFunc(self.DrawPointLabel)
# Create mouse event for showing cursor coords in status bar
self.client.Bind(wx.EVT_LEFT_DOWN, self.OnMouseLeftDown)
# Show closest point when enabled
self.client.Bind(wx.EVT_MOTION, self.OnMotion)
self.Show(True)
def DrawPointLabel(self, dc, mDataDict):
"""This is the fuction that defines how the pointLabels are plotted
dc - DC that will be passed
mDataDict - Dictionary of data that you want to use for the pointLabel
As an example I have decided I want a box at the curve point
with some text information about the curve plotted below.
Any wxDC method can be used.
"""
# ----------
dc.SetPen(wx.Pen(wx.BLACK))
dc.SetBrush(wx.Brush( wx.BLACK, wx.SOLID ) )
sx, sy = mDataDict["scaledXY"] #scaled x,y of closest point
dc.DrawRectangle( sx-5,sy-5, 10, 10) #10by10 square centered on point
px,py = mDataDict["pointXY"]
cNum = mDataDict["curveNum"]
pntIn = mDataDict["pIndex"]
legend = mDataDict["legend"]
#make a string to display
s = "Crv# %i, '%s', Pt. (%.2f,%.2f), PtInd %i" %(cNum, legend, px, py, pntIn)
dc.DrawText(s, sx , sy+1)
# -----------
def OnMouseLeftDown(self,event):
s= "Left Mouse Down at Point: (%.4f, %.4f)" % self.client.GetXY(event)
self.SetStatusText(s)
event.Skip() #allows plotCanvas OnMouseLeftDown to be called
def OnMotion(self, event):
#show closest point (when enbled)
if self.client.GetEnablePointLabel() == True:
#make up dict with info for the pointLabel
#I've decided to mark the closest point on the closest curve
dlst= self.client.GetClosetPoint( self.client.GetXY(event), pointScaled= True)
if dlst != []: #returns [] if none
curveNum, legend, pIndex, pointXY, scaledXY, distance = dlst
#make up dictionary to pass to my user function (see DrawPointLabel)
mDataDict= {"curveNum":curveNum, "legend":legend, "pIndex":pIndex,\
"pointXY":pointXY, "scaledXY":scaledXY}
#pass dict to update the pointLabel
self.client.UpdatePointLabel(mDataDict)
event.Skip() #go to next handler
def OnFilePageSetup(self, event):
self.client.PageSetup()
def OnFilePrintPreview(self, event):
self.client.PrintPreview()
def OnFilePrint(self, event):
self.client.Printout()
def OnSaveFile(self, event):
self.client.SaveFile()
def OnFileExit(self, event):
self.Close()
def OnPlotDraw1(self, event):
self.resetDefaults()
self.client.Draw(_draw1Objects())
def OnPlotDraw2(self, event):
self.resetDefaults()
self.client.Draw(_draw2Objects())
def OnPlotDraw3(self, event):
self.resetDefaults()
self.client.SetFont(wx.Font(10,wx.SCRIPT,wx.NORMAL,wx.NORMAL))
self.client.SetFontSizeAxis(20)
self.client.SetFontSizeLegend(12)
self.client.SetXSpec('min')
self.client.SetYSpec('none')
self.client.Draw(_draw3Objects())
def OnPlotDraw4(self, event):
self.resetDefaults()
drawObj= _draw4Objects()
self.client.Draw(drawObj)
## # profile
## start = _time.clock()
## for x in range(10):
## self.client.Draw(drawObj)
## print "10 plots of Draw4 took: %f sec."%(_time.clock() - start)
## # profile end
def OnPlotDraw5(self, event):
# Empty plot with just axes
self.resetDefaults()
drawObj= _draw5Objects()
# make the axis X= (0,5), Y=(0,10)
# (default with None is X= (-1,1), Y= (-1,1))
self.client.Draw(drawObj, xAxis= (0,5), yAxis= (0,10))
def OnPlotDraw6(self, event):
#Bar Graph Example
self.resetDefaults()
#self.client.SetEnableLegend(True) #turn on Legend
#self.client.SetEnableGrid(True) #turn on Grid
self.client.SetXSpec('none') #turns off x-axis scale
self.client.SetYSpec('auto')
self.client.Draw(_draw6Objects(), xAxis= (0,7))
def OnPlotRedraw(self,event):
self.client.Redraw()
def OnPlotClear(self,event):
self.client.Clear()
def OnPlotScale(self, event):
if self.client.last_draw != None:
graphics, xAxis, yAxis= self.client.last_draw
self.client.Draw(graphics,(1,3.05),(0,1))
def OnEnableZoom(self, event):
self.client.SetEnableZoom(event.IsChecked())
def OnEnableGrid(self, event):
self.client.SetEnableGrid(event.IsChecked())
def OnEnableLegend(self, event):
self.client.SetEnableLegend(event.IsChecked())
def OnEnablePointLabel(self, event):
self.client.SetEnablePointLabel(event.IsChecked())
def OnScrUp(self, event):
self.client.ScrollUp(1)
def OnScrRt(self,event):
self.client.ScrollRight(2)
def OnReset(self,event):
self.client.Reset()
def OnHelpAbout(self, event):
from wx.lib.dialogs import ScrolledMessageDialog
about = ScrolledMessageDialog(self, __doc__, "About...")
about.ShowModal()
def resetDefaults(self):
"""Just to reset the fonts back to the PlotCanvas defaults"""
self.client.SetFont(wx.Font(10,wx.SWISS,wx.NORMAL,wx.NORMAL))
self.client.SetFontSizeAxis(10)
self.client.SetFontSizeLegend(7)
self.client.SetXSpec('auto')
self.client.SetYSpec('auto')
def __test():
class MyApp(wx.App):
def OnInit(self):
wx.InitAllImageHandlers()
frame = TestFrame(None, -1, "PlotCanvas")
#frame.Show(True)
self.SetTopWindow(frame)
return True
app = MyApp(0)
app.MainLoop()
if __name__ == '__main__':
__test()
| gpl-3.0 |
2014cdbg6/cdbg6 | wsgi/static/Brython2.1.0-20140419-113919/Lib/_sre.py | 54 | 51333 | # NOT_RPYTHON
"""
A pure Python reimplementation of the _sre module from CPython 2.4
Copyright 2005 Nik Haldimann, licensed under the MIT license
This code is based on material licensed under CNRI's Python 1.6 license and
copyrighted by: Copyright (c) 1997-2001 by Secret Labs AB
"""
MAXREPEAT = 2147483648
#import array
import operator, sys
from sre_constants import ATCODES, OPCODES, CHCODES
from sre_constants import SRE_INFO_PREFIX, SRE_INFO_LITERAL
from sre_constants import SRE_FLAG_UNICODE, SRE_FLAG_LOCALE
import sys
# Identifying as _sre from Python 2.3 or 2.4
#if sys.version_info[:2] >= (2, 4):
MAGIC = 20031017
#else:
# MAGIC = 20030419
# In _sre.c this is bytesize of the code word type of the C implementation.
# There it's 2 for normal Python builds and more for wide unicode builds (large
# enough to hold a 32-bit UCS-4 encoded character). Since here in pure Python
# we only see re bytecodes as Python longs, we shouldn't have to care about the
# codesize. But sre_compile will compile some stuff differently depending on the
# codesize (e.g., charsets).
# starting with python 3.3 CODESIZE is 4
#if sys.maxunicode == 65535:
# CODESIZE = 2
#else:
CODESIZE = 4
copyright = "_sre.py 2.4c Copyright 2005 by Nik Haldimann"
def getcodesize():
return CODESIZE
def compile(pattern, flags, code, groups=0, groupindex={}, indexgroup=[None]):
"""Compiles (or rather just converts) a pattern descriptor to a SRE_Pattern
object. Actual compilation to opcodes happens in sre_compile."""
return SRE_Pattern(pattern, flags, code, groups, groupindex, indexgroup)
def getlower(char_ord, flags):
if (char_ord < 128) or (flags & SRE_FLAG_UNICODE) \
or (flags & SRE_FLAG_LOCALE and char_ord < 256):
#return ord(unichr(char_ord).lower())
return ord(chr(char_ord).lower())
else:
return char_ord
class SRE_Pattern:
def __init__(self, pattern, flags, code, groups=0, groupindex={}, indexgroup=[None]):
self.pattern = pattern
self.flags = flags
self.groups = groups
self.groupindex = groupindex # Maps group names to group indices
self._indexgroup = indexgroup # Maps indices to group names
self._code = code
def match(self, string, pos=0, endpos=sys.maxsize):
"""If zero or more characters at the beginning of string match this
regular expression, return a corresponding MatchObject instance. Return
None if the string does not match the pattern."""
state = _State(string, pos, endpos, self.flags)
if state.match(self._code):
return SRE_Match(self, state)
return None
def search(self, string, pos=0, endpos=sys.maxsize):
"""Scan through string looking for a location where this regular
expression produces a match, and return a corresponding MatchObject
instance. Return None if no position in the string matches the
pattern."""
state = _State(string, pos, endpos, self.flags)
if state.search(self._code):
return SRE_Match(self, state)
else:
return None
def findall(self, string, pos=0, endpos=sys.maxsize):
"""Return a list of all non-overlapping matches of pattern in string."""
matchlist = []
state = _State(string, pos, endpos, self.flags)
while state.start <= state.end:
state.reset()
state.string_position = state.start
if not state.search(self._code):
break
match = SRE_Match(self, state)
if self.groups == 0 or self.groups == 1:
item = match.group(self.groups)
else:
item = match.groups("")
matchlist.append(item)
if state.string_position == state.start:
state.start += 1
else:
state.start = state.string_position
return matchlist
def _subx(self, template, string, count=0, subn=False):
filter = template
if not callable(template) and "\\" in template:
# handle non-literal strings ; hand it over to the template compiler
#import sre #sre was renamed to re
import re as sre
filter = sre._subx(self, template)
state = _State(string, 0, sys.maxsize, self.flags)
sublist = []
n = last_pos = 0
while not count or n < count:
state.reset()
state.string_position = state.start
if not state.search(self._code):
break
if last_pos < state.start:
sublist.append(string[last_pos:state.start])
if not (last_pos == state.start and
last_pos == state.string_position and n > 0):
# the above ignores empty matches on latest position
if callable(filter):
sublist.append(filter(SRE_Match(self, state)))
else:
sublist.append(filter)
last_pos = state.string_position
n += 1
if state.string_position == state.start:
state.start += 1
else:
state.start = state.string_position
if last_pos < state.end:
sublist.append(string[last_pos:state.end])
item = "".join(sublist)
if subn:
return item, n
else:
return item
def sub(self, repl, string, count=0):
"""Return the string obtained by replacing the leftmost non-overlapping
occurrences of pattern in string by the replacement repl."""
return self._subx(repl, string, count, False)
def subn(self, repl, string, count=0):
"""Return the tuple (new_string, number_of_subs_made) found by replacing
the leftmost non-overlapping occurrences of pattern with the replacement
repl."""
return self._subx(repl, string, count, True)
def split(self, string, maxsplit=0):
"""Split string by the occurrences of pattern."""
splitlist = []
state = _State(string, 0, sys.maxsize, self.flags)
n = 0
last = state.start
while not maxsplit or n < maxsplit:
state.reset()
state.string_position = state.start
if not state.search(self._code):
break
if state.start == state.string_position: # zero-width match
if last == state.end: # or end of string
break
state.start += 1
continue
splitlist.append(string[last:state.start])
# add groups (if any)
if self.groups:
match = SRE_Match(self, state)
splitlist.extend(list(match.groups(None)))
n += 1
last = state.start = state.string_position
splitlist.append(string[last:state.end])
return splitlist
def finditer(self, string, pos=0, endpos=sys.maxsize):
"""Return a list of all non-overlapping matches of pattern in string."""
scanner = self.scanner(string, pos, endpos)
##_list=[]
#_m=self.scanner(string, pos, endpos)
#_re=SRE_Scanner(self, string, pos, endpos)
#_m=_re.search()
#while _m:
# _list.append(_m)
# _m=_re.search()
#return _list
return iter(scanner.search, None)
def scanner(self, string, start=0, end=sys.maxsize):
return SRE_Scanner(self, string, start, end)
def __copy__(self):
raise TypeError("cannot copy this pattern object")
def __deepcopy__(self):
raise TypeError("cannot copy this pattern object")
class SRE_Scanner:
"""Undocumented scanner interface of sre."""
def __init__(self, pattern, string, start, end):
self.pattern = pattern
self._state = _State(string, start, end, self.pattern.flags)
def _match_search(self, matcher):
state = self._state
state.reset()
state.string_position = state.start
match = None
if matcher(self.pattern._code):
match = SRE_Match(self.pattern, state)
if match is None or state.string_position == state.start:
state.start += 1
else:
state.start = state.string_position
return match
def match(self):
return self._match_search(self._state.match)
def search(self):
return self._match_search(self._state.search)
class SRE_Match:
def __init__(self, pattern, state):
self.re = pattern
self.string = state.string
self.pos = state.pos
self.endpos = state.end
self.lastindex = state.lastindex
if self.lastindex < 0:
self.lastindex = None
self.regs = self._create_regs(state)
#statement below is not valid under python3 ( 0 <= None)
#if pattern._indexgroup and 0 <= self.lastindex < len(pattern._indexgroup):
if self.lastindex is not None and pattern._indexgroup and 0 <= self.lastindex < len(pattern._indexgroup):
# The above upper-bound check should not be necessary, as the re
# compiler is supposed to always provide an _indexgroup list long
# enough. But the re.Scanner class seems to screw up something
# there, test_scanner in test_re won't work without upper-bound
# checking. XXX investigate this and report bug to CPython.
self.lastgroup = pattern._indexgroup[self.lastindex]
else:
self.lastgroup = None
def _create_regs(self, state):
"""Creates a tuple of index pairs representing matched groups."""
regs = [(state.start, state.string_position)]
for group in range(self.re.groups):
mark_index = 2 * group
if mark_index + 1 < len(state.marks) \
and state.marks[mark_index] is not None \
and state.marks[mark_index + 1] is not None:
regs.append((state.marks[mark_index], state.marks[mark_index + 1]))
else:
regs.append((-1, -1))
return tuple(regs)
def _get_index(self, group):
if isinstance(group, int):
if group >= 0 and group <= self.re.groups:
return group
else:
if group in self.re.groupindex:
return self.re.groupindex[group]
raise IndexError("no such group")
def _get_slice(self, group, default):
group_indices = self.regs[group]
if group_indices[0] >= 0:
return self.string[group_indices[0]:group_indices[1]]
else:
return default
def start(self, group=0):
"""Returns the indices of the start of the substring matched by group;
group defaults to zero (meaning the whole matched substring). Returns -1
if group exists but did not contribute to the match."""
return self.regs[self._get_index(group)][0]
def end(self, group=0):
"""Returns the indices of the end of the substring matched by group;
group defaults to zero (meaning the whole matched substring). Returns -1
if group exists but did not contribute to the match."""
return self.regs[self._get_index(group)][1]
def span(self, group=0):
"""Returns the 2-tuple (m.start(group), m.end(group))."""
return self.start(group), self.end(group)
def expand(self, template):
"""Return the string obtained by doing backslash substitution and
resolving group references on template."""
import sre
return sre._expand(self.re, self, template)
def groups(self, default=None):
"""Returns a tuple containing all the subgroups of the match. The
default argument is used for groups that did not participate in the
match (defaults to None)."""
groups = []
for indices in self.regs[1:]:
if indices[0] >= 0:
groups.append(self.string[indices[0]:indices[1]])
else:
groups.append(default)
return tuple(groups)
def groupdict(self, default=None):
"""Return a dictionary containing all the named subgroups of the match.
The default argument is used for groups that did not participate in the
match (defaults to None)."""
groupdict = {}
for key, value in self.re.groupindex.items():
groupdict[key] = self._get_slice(value, default)
return groupdict
def group(self, *args):
"""Returns one or more subgroups of the match. Each argument is either a
group index or a group name."""
if len(args) == 0:
args = (0,)
grouplist = []
for group in args:
grouplist.append(self._get_slice(self._get_index(group), None))
if len(grouplist) == 1:
return grouplist[0]
else:
return tuple(grouplist)
def __copy__():
raise TypeError("cannot copy this pattern object")
def __deepcopy__():
raise TypeError("cannot copy this pattern object")
class _State:
def __init__(self, string, start, end, flags):
self.string = string
if start < 0:
start = 0
if end > len(string):
end = len(string)
self.start = start
self.string_position = self.start
self.end = end
self.pos = start
self.flags = flags
self.reset()
def reset(self):
self.marks = []
self.lastindex = -1
self.marks_stack = []
self.context_stack = []
self.repeat = None
def match(self, pattern_codes):
# Optimization: Check string length. pattern_codes[3] contains the
# minimum length for a string to possibly match.
# brython.. the optimization doesn't work
#if pattern_codes[0] == OPCODES["info"] and pattern_codes[3]:
# if self.end - self.string_position < pattern_codes[3]:
# #_log("reject (got %d chars, need %d)"
# # % (self.end - self.string_position, pattern_codes[3]))
# return False
dispatcher = _OpcodeDispatcher()
self.context_stack.append(_MatchContext(self, pattern_codes))
has_matched = None
while len(self.context_stack) > 0:
context = self.context_stack[-1]
has_matched = dispatcher.match(context)
if has_matched is not None: # don't pop if context isn't done
self.context_stack.pop()
return has_matched
def search(self, pattern_codes):
flags = 0
if pattern_codes[0] == OPCODES["info"]:
# optimization info block
# <INFO> <1=skip> <2=flags> <3=min> <4=max> <5=prefix info>
if pattern_codes[2] & SRE_INFO_PREFIX and pattern_codes[5] > 1:
return self.fast_search(pattern_codes)
flags = pattern_codes[2]
pattern_codes = pattern_codes[pattern_codes[1] + 1:]
string_position = self.start
if pattern_codes[0] == OPCODES["literal"]:
# Special case: Pattern starts with a literal character. This is
# used for short prefixes
character = pattern_codes[1]
while True:
while string_position < self.end \
and ord(self.string[string_position]) != character:
string_position += 1
if string_position >= self.end:
return False
self.start = string_position
string_position += 1
self.string_position = string_position
if flags & SRE_INFO_LITERAL:
return True
if self.match(pattern_codes[2:]):
return True
return False
# General case
while string_position <= self.end:
self.reset()
self.start = self.string_position = string_position
if self.match(pattern_codes):
return True
string_position += 1
return False
def fast_search(self, pattern_codes):
"""Skips forward in a string as fast as possible using information from
an optimization info block."""
# pattern starts with a known prefix
# <5=length> <6=skip> <7=prefix data> <overlap data>
flags = pattern_codes[2]
prefix_len = pattern_codes[5]
prefix_skip = pattern_codes[6] # don't really know what this is good for
prefix = pattern_codes[7:7 + prefix_len]
overlap = pattern_codes[7 + prefix_len - 1:pattern_codes[1] + 1]
pattern_codes = pattern_codes[pattern_codes[1] + 1:]
i = 0
string_position = self.string_position
while string_position < self.end:
while True:
if ord(self.string[string_position]) != prefix[i]:
if i == 0:
break
else:
i = overlap[i]
else:
i += 1
if i == prefix_len:
# found a potential match
self.start = string_position + 1 - prefix_len
self.string_position = string_position + 1 \
- prefix_len + prefix_skip
if flags & SRE_INFO_LITERAL:
return True # matched all of pure literal pattern
if self.match(pattern_codes[2 * prefix_skip:]):
return True
i = overlap[i]
break
string_position += 1
return False
def set_mark(self, mark_nr, position):
if mark_nr & 1:
# This id marks the end of a group.
# fix python 3 division incompatability
#self.lastindex = mark_nr / 2 + 1
self.lastindex = mark_nr // 2 + 1
if mark_nr >= len(self.marks):
self.marks.extend([None] * (mark_nr - len(self.marks) + 1))
self.marks[mark_nr] = position
def get_marks(self, group_index):
marks_index = 2 * group_index
if len(self.marks) > marks_index + 1:
return self.marks[marks_index], self.marks[marks_index + 1]
else:
return None, None
def marks_push(self):
self.marks_stack.append((self.marks[:], self.lastindex))
def marks_pop(self):
self.marks, self.lastindex = self.marks_stack.pop()
def marks_pop_keep(self):
self.marks, self.lastindex = self.marks_stack[-1]
def marks_pop_discard(self):
self.marks_stack.pop()
def lower(self, char_ord):
return getlower(char_ord, self.flags)
class _MatchContext:
def __init__(self, state, pattern_codes):
self.state = state
self.pattern_codes = pattern_codes
self.string_position = state.string_position
self.code_position = 0
self.has_matched = None
def push_new_context(self, pattern_offset):
"""Creates a new child context of this context and pushes it on the
stack. pattern_offset is the offset off the current code position to
start interpreting from."""
child_context = _MatchContext(self.state,
self.pattern_codes[self.code_position + pattern_offset:])
#print("_sre.py:517:pushing new context") #, child_context.has_matched)
#print(self.state.string_position)
#print(self.pattern_codes[self.code_position + pattern_offset:])
#print(pattern_offset)
self.state.context_stack.append(child_context)
return child_context
def peek_char(self, peek=0):
return self.state.string[self.string_position + peek]
def skip_char(self, skip_count):
self.string_position += skip_count
def remaining_chars(self):
return self.state.end - self.string_position
def peek_code(self, peek=0):
return self.pattern_codes[self.code_position + peek]
def skip_code(self, skip_count):
self.code_position += skip_count
def remaining_codes(self):
return len(self.pattern_codes) - self.code_position
def at_beginning(self):
return self.string_position == 0
def at_end(self):
return self.string_position == self.state.end
def at_linebreak(self):
return not self.at_end() and _is_linebreak(self.peek_char())
def at_boundary(self, word_checker):
if self.at_beginning() and self.at_end():
return False
that = not self.at_beginning() and word_checker(self.peek_char(-1))
this = not self.at_end() and word_checker(self.peek_char())
return this != that
class _RepeatContext(_MatchContext):
def __init__(self, context):
_MatchContext.__init__(self, context.state,
context.pattern_codes[context.code_position:])
self.count = -1
#print('569:repeat', context.state.repeat)
self.previous = context.state.repeat
self.last_position = None
class _Dispatcher:
DISPATCH_TABLE = None
def dispatch(self, code, context):
method = self.DISPATCH_TABLE.get(code, self.__class__.unknown)
return method(self, context)
def unknown(self, code, ctx):
raise NotImplementedError()
def build_dispatch_table(cls, code_dict, method_prefix):
if cls.DISPATCH_TABLE is not None:
return
table = {}
for key, value in code_dict.items():
if hasattr(cls, "%s%s" % (method_prefix, key)):
table[value] = getattr(cls, "%s%s" % (method_prefix, key))
cls.DISPATCH_TABLE = table
build_dispatch_table = classmethod(build_dispatch_table)
class _OpcodeDispatcher(_Dispatcher):
def __init__(self):
self.executing_contexts = {}
self.at_dispatcher = _AtcodeDispatcher()
self.ch_dispatcher = _ChcodeDispatcher()
self.set_dispatcher = _CharsetDispatcher()
def match(self, context):
"""Returns True if the current context matches, False if it doesn't and
None if matching is not finished, ie must be resumed after child
contexts have been matched."""
while context.remaining_codes() > 0 and context.has_matched is None:
opcode = context.peek_code()
if not self.dispatch(opcode, context):
return None
if context.has_matched is None:
context.has_matched = False
return context.has_matched
def dispatch(self, opcode, context):
"""Dispatches a context on a given opcode. Returns True if the context
is done matching, False if it must be resumed when next encountered."""
#if self.executing_contexts.has_key(id(context)):
if id(context) in self.executing_contexts:
generator = self.executing_contexts[id(context)]
del self.executing_contexts[id(context)]
has_finished = next(generator)
else:
method = self.DISPATCH_TABLE.get(opcode, _OpcodeDispatcher.unknown)
has_finished = method(self, context)
if hasattr(has_finished, "__next__"): # avoid using the types module
generator = has_finished
has_finished = next(generator)
if not has_finished:
self.executing_contexts[id(context)] = generator
return has_finished
def op_success(self, ctx):
# end of pattern
#self._log(ctx, "SUCCESS")
ctx.state.string_position = ctx.string_position
ctx.has_matched = True
return True
def op_failure(self, ctx):
# immediate failure
#self._log(ctx, "FAILURE")
ctx.has_matched = False
return True
def general_op_literal(self, ctx, compare, decorate=lambda x: x):
#print(ctx.peek_char())
if ctx.at_end() or not compare(decorate(ord(ctx.peek_char())),
decorate(ctx.peek_code(1))):
ctx.has_matched = False
ctx.skip_code(2)
ctx.skip_char(1)
def op_literal(self, ctx):
# match literal string
# <LITERAL> <code>
#self._log(ctx, "LITERAL", ctx.peek_code(1))
self.general_op_literal(ctx, operator.eq)
return True
def op_not_literal(self, ctx):
# match anything that is not the given literal character
# <NOT_LITERAL> <code>
#self._log(ctx, "NOT_LITERAL", ctx.peek_code(1))
self.general_op_literal(ctx, operator.ne)
return True
def op_literal_ignore(self, ctx):
# match literal regardless of case
# <LITERAL_IGNORE> <code>
#self._log(ctx, "LITERAL_IGNORE", ctx.peek_code(1))
self.general_op_literal(ctx, operator.eq, ctx.state.lower)
return True
def op_not_literal_ignore(self, ctx):
# match literal regardless of case
# <LITERAL_IGNORE> <code>
#self._log(ctx, "LITERAL_IGNORE", ctx.peek_code(1))
self.general_op_literal(ctx, operator.ne, ctx.state.lower)
return True
def op_at(self, ctx):
# match at given position
# <AT> <code>
#self._log(ctx, "AT", ctx.peek_code(1))
if not self.at_dispatcher.dispatch(ctx.peek_code(1), ctx):
ctx.has_matched = False
#print('_sre.py:line693, update context.has_matched variable')
return True
ctx.skip_code(2)
return True
def op_category(self, ctx):
# match at given category
# <CATEGORY> <code>
#self._log(ctx, "CATEGORY", ctx.peek_code(1))
if ctx.at_end() or not self.ch_dispatcher.dispatch(ctx.peek_code(1), ctx):
ctx.has_matched = False
#print('_sre.py:line703, update context.has_matched variable')
return True
ctx.skip_code(2)
ctx.skip_char(1)
return True
def op_any(self, ctx):
# match anything (except a newline)
# <ANY>
#self._log(ctx, "ANY")
if ctx.at_end() or ctx.at_linebreak():
ctx.has_matched = False
#print('_sre.py:line714, update context.has_matched variable')
return True
ctx.skip_code(1)
ctx.skip_char(1)
return True
def op_any_all(self, ctx):
# match anything
# <ANY_ALL>
#self._log(ctx, "ANY_ALL")
if ctx.at_end():
ctx.has_matched = False
#print('_sre.py:line725, update context.has_matched variable')
return True
ctx.skip_code(1)
ctx.skip_char(1)
return True
def general_op_in(self, ctx, decorate=lambda x: x):
#self._log(ctx, "OP_IN")
#print('general_op_in')
if ctx.at_end():
ctx.has_matched = False
#print('_sre.py:line734, update context.has_matched variable')
return
skip = ctx.peek_code(1)
ctx.skip_code(2) # set op pointer to the set code
#print(ctx.peek_char(), ord(ctx.peek_char()),
# decorate(ord(ctx.peek_char())))
if not self.check_charset(ctx, decorate(ord(ctx.peek_char()))):
#print('_sre.py:line738, update context.has_matched variable')
ctx.has_matched = False
return
ctx.skip_code(skip - 1)
ctx.skip_char(1)
#print('end:general_op_in')
def op_in(self, ctx):
# match set member (or non_member)
# <IN> <skip> <set>
#self._log(ctx, "OP_IN")
self.general_op_in(ctx)
return True
def op_in_ignore(self, ctx):
# match set member (or non_member), disregarding case of current char
# <IN_IGNORE> <skip> <set>
#self._log(ctx, "OP_IN_IGNORE")
self.general_op_in(ctx, ctx.state.lower)
return True
def op_jump(self, ctx):
# jump forward
# <JUMP> <offset>
#self._log(ctx, "JUMP", ctx.peek_code(1))
ctx.skip_code(ctx.peek_code(1) + 1)
return True
# skip info
# <INFO> <skip>
op_info = op_jump
def op_mark(self, ctx):
# set mark
# <MARK> <gid>
#self._log(ctx, "OP_MARK", ctx.peek_code(1))
ctx.state.set_mark(ctx.peek_code(1), ctx.string_position)
ctx.skip_code(2)
return True
def op_branch(self, ctx):
# alternation
# <BRANCH> <0=skip> code <JUMP> ... <NULL>
#self._log(ctx, "BRANCH")
ctx.state.marks_push()
ctx.skip_code(1)
current_branch_length = ctx.peek_code(0)
while current_branch_length:
# The following tries to shortcut branches starting with a
# (unmatched) literal. _sre.c also shortcuts charsets here.
if not (ctx.peek_code(1) == OPCODES["literal"] and \
(ctx.at_end() or ctx.peek_code(2) != ord(ctx.peek_char()))):
ctx.state.string_position = ctx.string_position
child_context = ctx.push_new_context(1)
#print("_sre.py:803:op_branch")
yield False
if child_context.has_matched:
ctx.has_matched = True
yield True
ctx.state.marks_pop_keep()
ctx.skip_code(current_branch_length)
current_branch_length = ctx.peek_code(0)
ctx.state.marks_pop_discard()
ctx.has_matched = False
#print('_sre.py:line805, update context.has_matched variable')
yield True
def op_repeat_one(self, ctx):
# match repeated sequence (maximizing).
# this operator only works if the repeated item is exactly one character
# wide, and we're not already collecting backtracking points.
# <REPEAT_ONE> <skip> <1=min> <2=max> item <SUCCESS> tail
mincount = ctx.peek_code(2)
maxcount = ctx.peek_code(3)
#print("repeat one", mincount, maxcount)
#self._log(ctx, "REPEAT_ONE", mincount, maxcount)
if ctx.remaining_chars() < mincount:
ctx.has_matched = False
yield True
ctx.state.string_position = ctx.string_position
count = self.count_repetitions(ctx, maxcount)
ctx.skip_char(count)
if count < mincount:
ctx.has_matched = False
yield True
if ctx.peek_code(ctx.peek_code(1) + 1) == OPCODES["success"]:
# tail is empty. we're finished
ctx.state.string_position = ctx.string_position
ctx.has_matched = True
yield True
ctx.state.marks_push()
if ctx.peek_code(ctx.peek_code(1) + 1) == OPCODES["literal"]:
# Special case: Tail starts with a literal. Skip positions where
# the rest of the pattern cannot possibly match.
char = ctx.peek_code(ctx.peek_code(1) + 2)
while True:
while count >= mincount and \
(ctx.at_end() or ord(ctx.peek_char()) != char):
ctx.skip_char(-1)
count -= 1
if count < mincount:
break
ctx.state.string_position = ctx.string_position
child_context = ctx.push_new_context(ctx.peek_code(1) + 1)
#print("_sre.py:856:push_new_context")
yield False
if child_context.has_matched:
ctx.has_matched = True
yield True
ctx.skip_char(-1)
count -= 1
ctx.state.marks_pop_keep()
else:
# General case: backtracking
while count >= mincount:
ctx.state.string_position = ctx.string_position
child_context = ctx.push_new_context(ctx.peek_code(1) + 1)
yield False
if child_context.has_matched:
ctx.has_matched = True
yield True
ctx.skip_char(-1)
count -= 1
ctx.state.marks_pop_keep()
ctx.state.marks_pop_discard()
#ctx.has_matched = False
ctx.has_matched = True # <== this should be True (so match object gets returned to program)
yield True
def op_min_repeat_one(self, ctx):
# match repeated sequence (minimizing)
# <MIN_REPEAT_ONE> <skip> <1=min> <2=max> item <SUCCESS> tail
mincount = ctx.peek_code(2)
maxcount = ctx.peek_code(3)
#self._log(ctx, "MIN_REPEAT_ONE", mincount, maxcount)
if ctx.remaining_chars() < mincount:
ctx.has_matched = False
yield True
ctx.state.string_position = ctx.string_position
if mincount == 0:
count = 0
else:
count = self.count_repetitions(ctx, mincount)
if count < mincount:
ctx.has_matched = False
#print('_sre.py:line891, update context.has_matched variable')
yield True
ctx.skip_char(count)
if ctx.peek_code(ctx.peek_code(1) + 1) == OPCODES["success"]:
# tail is empty. we're finished
ctx.state.string_position = ctx.string_position
ctx.has_matched = True
yield True
ctx.state.marks_push()
while maxcount == MAXREPEAT or count <= maxcount:
ctx.state.string_position = ctx.string_position
child_context = ctx.push_new_context(ctx.peek_code(1) + 1)
#print('_sre.py:916:push new context')
yield False
if child_context.has_matched:
ctx.has_matched = True
yield True
ctx.state.string_position = ctx.string_position
if self.count_repetitions(ctx, 1) == 0:
break
ctx.skip_char(1)
count += 1
ctx.state.marks_pop_keep()
ctx.state.marks_pop_discard()
ctx.has_matched = False
yield True
def op_repeat(self, ctx):
# create repeat context. all the hard work is done by the UNTIL
# operator (MAX_UNTIL, MIN_UNTIL)
# <REPEAT> <skip> <1=min> <2=max> item <UNTIL> tail
#self._log(ctx, "REPEAT", ctx.peek_code(2), ctx.peek_code(3))
#if ctx.state.repeat is None:
# print("951:ctx.state.repeat is None")
# #ctx.state.repeat=_RepeatContext(ctx)
print("repeat", ctx.state.repeat)
repeat = _RepeatContext(ctx)
ctx.state.repeat = repeat
ctx.state.string_position = ctx.string_position
child_context = ctx.push_new_context(ctx.peek_code(1) + 1)
#print("_sre.py:941:push new context", id(child_context))
#print(child_context.state.repeat)
#print(ctx.state.repeat)
# are these two yields causing the issue?
yield False
ctx.state.repeat = repeat.previous
ctx.has_matched = child_context.has_matched
yield True
def op_max_until(self, ctx):
# maximizing repeat
# <REPEAT> <skip> <1=min> <2=max> item <MAX_UNTIL> tail
repeat = ctx.state.repeat
#print("op_max_until") #, id(ctx.state.repeat))
if repeat is None:
#print(id(ctx), id(ctx.state))
raise RuntimeError("Internal re error: MAX_UNTIL without REPEAT.")
mincount = repeat.peek_code(2)
maxcount = repeat.peek_code(3)
ctx.state.string_position = ctx.string_position
count = repeat.count + 1
#self._log(ctx, "MAX_UNTIL", count)
if count < mincount:
# not enough matches
repeat.count = count
child_context = repeat.push_new_context(4)
yield False
ctx.has_matched = child_context.has_matched
if not ctx.has_matched:
repeat.count = count - 1
ctx.state.string_position = ctx.string_position
yield True
if (count < maxcount or maxcount == MAXREPEAT) \
and ctx.state.string_position != repeat.last_position:
# we may have enough matches, if we can match another item, do so
repeat.count = count
ctx.state.marks_push()
save_last_position = repeat.last_position # zero-width match protection
repeat.last_position = ctx.state.string_position
child_context = repeat.push_new_context(4)
yield False
repeat.last_position = save_last_position
if child_context.has_matched:
ctx.state.marks_pop_discard()
ctx.has_matched = True
yield True
ctx.state.marks_pop()
repeat.count = count - 1
ctx.state.string_position = ctx.string_position
# cannot match more repeated items here. make sure the tail matches
ctx.state.repeat = repeat.previous
child_context = ctx.push_new_context(1)
#print("_sre.py:987:op_max_until")
yield False
ctx.has_matched = child_context.has_matched
if not ctx.has_matched:
ctx.state.repeat = repeat
ctx.state.string_position = ctx.string_position
yield True
def op_min_until(self, ctx):
# minimizing repeat
# <REPEAT> <skip> <1=min> <2=max> item <MIN_UNTIL> tail
repeat = ctx.state.repeat
if repeat is None:
raise RuntimeError("Internal re error: MIN_UNTIL without REPEAT.")
mincount = repeat.peek_code(2)
maxcount = repeat.peek_code(3)
ctx.state.string_position = ctx.string_position
count = repeat.count + 1
#self._log(ctx, "MIN_UNTIL", count)
if count < mincount:
# not enough matches
repeat.count = count
child_context = repeat.push_new_context(4)
yield False
ctx.has_matched = child_context.has_matched
if not ctx.has_matched:
repeat.count = count - 1
ctx.state.string_position = ctx.string_position
yield True
# see if the tail matches
ctx.state.marks_push()
ctx.state.repeat = repeat.previous
child_context = ctx.push_new_context(1)
#print('_sre.py:1022:push new context')
yield False
if child_context.has_matched:
ctx.has_matched = True
yield True
ctx.state.repeat = repeat
ctx.state.string_position = ctx.string_position
ctx.state.marks_pop()
# match more until tail matches
if count >= maxcount and maxcount != MAXREPEAT:
ctx.has_matched = False
#print('_sre.py:line1022, update context.has_matched variable')
yield True
repeat.count = count
child_context = repeat.push_new_context(4)
yield False
ctx.has_matched = child_context.has_matched
if not ctx.has_matched:
repeat.count = count - 1
ctx.state.string_position = ctx.string_position
yield True
def general_op_groupref(self, ctx, decorate=lambda x: x):
group_start, group_end = ctx.state.get_marks(ctx.peek_code(1))
if group_start is None or group_end is None or group_end < group_start:
ctx.has_matched = False
return True
while group_start < group_end:
if ctx.at_end() or decorate(ord(ctx.peek_char())) \
!= decorate(ord(ctx.state.string[group_start])):
ctx.has_matched = False
#print('_sre.py:line1042, update context.has_matched variable')
return True
group_start += 1
ctx.skip_char(1)
ctx.skip_code(2)
return True
def op_groupref(self, ctx):
# match backreference
# <GROUPREF> <zero-based group index>
#self._log(ctx, "GROUPREF", ctx.peek_code(1))
return self.general_op_groupref(ctx)
def op_groupref_ignore(self, ctx):
# match backreference case-insensitive
# <GROUPREF_IGNORE> <zero-based group index>
#self._log(ctx, "GROUPREF_IGNORE", ctx.peek_code(1))
return self.general_op_groupref(ctx, ctx.state.lower)
def op_groupref_exists(self, ctx):
# <GROUPREF_EXISTS> <group> <skip> codeyes <JUMP> codeno ...
#self._log(ctx, "GROUPREF_EXISTS", ctx.peek_code(1))
group_start, group_end = ctx.state.get_marks(ctx.peek_code(1))
if group_start is None or group_end is None or group_end < group_start:
ctx.skip_code(ctx.peek_code(2) + 1)
else:
ctx.skip_code(3)
return True
def op_assert(self, ctx):
# assert subpattern
# <ASSERT> <skip> <back> <pattern>
#self._log(ctx, "ASSERT", ctx.peek_code(2))
ctx.state.string_position = ctx.string_position - ctx.peek_code(2)
if ctx.state.string_position < 0:
ctx.has_matched = False
yield True
child_context = ctx.push_new_context(3)
yield False
if child_context.has_matched:
ctx.skip_code(ctx.peek_code(1) + 1)
else:
ctx.has_matched = False
yield True
def op_assert_not(self, ctx):
# assert not subpattern
# <ASSERT_NOT> <skip> <back> <pattern>
#self._log(ctx, "ASSERT_NOT", ctx.peek_code(2))
ctx.state.string_position = ctx.string_position - ctx.peek_code(2)
if ctx.state.string_position >= 0:
child_context = ctx.push_new_context(3)
yield False
if child_context.has_matched:
ctx.has_matched = False
yield True
ctx.skip_code(ctx.peek_code(1) + 1)
yield True
def unknown(self, ctx):
#self._log(ctx, "UNKNOWN", ctx.peek_code())
raise RuntimeError("Internal re error. Unknown opcode: %s" % ctx.peek_code())
def check_charset(self, ctx, char):
"""Checks whether a character matches set of arbitrary length. Assumes
the code pointer is at the first member of the set."""
self.set_dispatcher.reset(char)
save_position = ctx.code_position
result = None
while result is None:
result = self.set_dispatcher.dispatch(ctx.peek_code(), ctx)
ctx.code_position = save_position
#print("_sre.py:1123:check_charset", result)
return result
def count_repetitions(self, ctx, maxcount):
"""Returns the number of repetitions of a single item, starting from the
current string position. The code pointer is expected to point to a
REPEAT_ONE operation (with the repeated 4 ahead)."""
count = 0
real_maxcount = ctx.state.end - ctx.string_position
if maxcount < real_maxcount and maxcount != MAXREPEAT:
real_maxcount = maxcount
# XXX could special case every single character pattern here, as in C.
# This is a general solution, a bit hackisch, but works and should be
# efficient.
code_position = ctx.code_position
string_position = ctx.string_position
ctx.skip_code(4)
reset_position = ctx.code_position
while count < real_maxcount:
# this works because the single character pattern is followed by
# a success opcode
ctx.code_position = reset_position
self.dispatch(ctx.peek_code(), ctx)
#print("count_repetitions", ctx.has_matched, count)
if ctx.has_matched is False: # could be None as well
break
count += 1
ctx.has_matched = None
ctx.code_position = code_position
ctx.string_position = string_position
return count
def _log(self, context, opname, *args):
arg_string = ("%s " * len(args)) % args
_log("|%s|%s|%s %s" % (context.pattern_codes,
context.string_position, opname, arg_string))
_OpcodeDispatcher.build_dispatch_table(OPCODES, "op_")
class _CharsetDispatcher(_Dispatcher):
def __init__(self):
self.ch_dispatcher = _ChcodeDispatcher()
def reset(self, char):
self.char = char
self.ok = True
def set_failure(self, ctx):
return not self.ok
def set_literal(self, ctx):
# <LITERAL> <code>
if ctx.peek_code(1) == self.char:
return self.ok
else:
ctx.skip_code(2)
def set_category(self, ctx):
# <CATEGORY> <code>
if self.ch_dispatcher.dispatch(ctx.peek_code(1), ctx):
return self.ok
else:
ctx.skip_code(2)
def set_charset(self, ctx):
# <CHARSET> <bitmap> (16 bits per code word)
char_code = self.char
ctx.skip_code(1) # point to beginning of bitmap
if CODESIZE == 2:
if char_code < 256 and ctx.peek_code(char_code >> 4) \
& (1 << (char_code & 15)):
return self.ok
ctx.skip_code(16) # skip bitmap
else:
if char_code < 256 and ctx.peek_code(char_code >> 5) \
& (1 << (char_code & 31)):
return self.ok
ctx.skip_code(8) # skip bitmap
def set_range(self, ctx):
# <RANGE> <lower> <upper>
if ctx.peek_code(1) <= self.char <= ctx.peek_code(2):
return self.ok
ctx.skip_code(3)
def set_negate(self, ctx):
self.ok = not self.ok
ctx.skip_code(1)
#fixme brython. array module doesn't exist
def set_bigcharset(self, ctx):
raise NotImplementationError("_sre.py: set_bigcharset, array not implemented")
# <BIGCHARSET> <blockcount> <256 blockindices> <blocks>
char_code = self.char
count = ctx.peek_code(1)
ctx.skip_code(2)
if char_code < 65536:
block_index = char_code >> 8
# NB: there are CODESIZE block indices per bytecode
a = array.array("B")
a.fromstring(array.array(CODESIZE == 2 and "H" or "I",
[ctx.peek_code(block_index // CODESIZE)]).tostring())
block = a[block_index % CODESIZE]
ctx.skip_code(256 // CODESIZE) # skip block indices
block_value = ctx.peek_code(block * (32 // CODESIZE)
+ ((char_code & 255) >> (CODESIZE == 2 and 4 or 5)))
if block_value & (1 << (char_code & ((8 * CODESIZE) - 1))):
return self.ok
else:
ctx.skip_code(256 // CODESIZE) # skip block indices
ctx.skip_code(count * (32 // CODESIZE)) # skip blocks
def unknown(self, ctx):
return False
_CharsetDispatcher.build_dispatch_table(OPCODES, "set_")
class _AtcodeDispatcher(_Dispatcher):
def at_beginning(self, ctx):
return ctx.at_beginning()
at_beginning_string = at_beginning
def at_beginning_line(self, ctx):
return ctx.at_beginning() or _is_linebreak(ctx.peek_char(-1))
def at_end(self, ctx):
return (ctx.remaining_chars() == 1 and ctx.at_linebreak()) or ctx.at_end()
def at_end_line(self, ctx):
return ctx.at_linebreak() or ctx.at_end()
def at_end_string(self, ctx):
return ctx.at_end()
def at_boundary(self, ctx):
return ctx.at_boundary(_is_word)
def at_non_boundary(self, ctx):
return not ctx.at_boundary(_is_word)
def at_loc_boundary(self, ctx):
return ctx.at_boundary(_is_loc_word)
def at_loc_non_boundary(self, ctx):
return not ctx.at_boundary(_is_loc_word)
def at_uni_boundary(self, ctx):
return ctx.at_boundary(_is_uni_word)
def at_uni_non_boundary(self, ctx):
return not ctx.at_boundary(_is_uni_word)
def unknown(self, ctx):
return False
_AtcodeDispatcher.build_dispatch_table(ATCODES, "")
class _ChcodeDispatcher(_Dispatcher):
def category_digit(self, ctx):
return _is_digit(ctx.peek_char())
def category_not_digit(self, ctx):
return not _is_digit(ctx.peek_char())
def category_space(self, ctx):
return _is_space(ctx.peek_char())
def category_not_space(self, ctx):
return not _is_space(ctx.peek_char())
def category_word(self, ctx):
return _is_word(ctx.peek_char())
def category_not_word(self, ctx):
return not _is_word(ctx.peek_char())
def category_linebreak(self, ctx):
return _is_linebreak(ctx.peek_char())
def category_not_linebreak(self, ctx):
return not _is_linebreak(ctx.peek_char())
def category_loc_word(self, ctx):
return _is_loc_word(ctx.peek_char())
def category_loc_not_word(self, ctx):
return not _is_loc_word(ctx.peek_char())
def category_uni_digit(self, ctx):
return ctx.peek_char().isdigit()
def category_uni_not_digit(self, ctx):
return not ctx.peek_char().isdigit()
def category_uni_space(self, ctx):
return ctx.peek_char().isspace()
def category_uni_not_space(self, ctx):
return not ctx.peek_char().isspace()
def category_uni_word(self, ctx):
return _is_uni_word(ctx.peek_char())
def category_uni_not_word(self, ctx):
return not _is_uni_word(ctx.peek_char())
def category_uni_linebreak(self, ctx):
return ord(ctx.peek_char()) in _uni_linebreaks
def category_uni_not_linebreak(self, ctx):
return ord(ctx.peek_char()) not in _uni_linebreaks
def unknown(self, ctx):
return False
_ChcodeDispatcher.build_dispatch_table(CHCODES, "")
_ascii_char_info = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 6, 2,
2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 25, 25, 25, 25, 25, 25, 25,
25, 25, 0, 0, 0, 0, 0, 0, 0, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 0, 0,
0, 0, 16, 0, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24,
24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 0, 0, 0, 0, 0 ]
def _is_digit(char):
code = ord(char)
return code < 128 and _ascii_char_info[code] & 1
def _is_space(char):
code = ord(char)
return code < 128 and _ascii_char_info[code] & 2
def _is_word(char):
# NB: non-ASCII chars aren't words according to _sre.c
code = ord(char)
return code < 128 and _ascii_char_info[code] & 16
def _is_loc_word(char):
return (not (ord(char) & ~255) and char.isalnum()) or char == '_'
def _is_uni_word(char):
# not valid in python 3
#return unichr(ord(char)).isalnum() or char == '_'
return chr(ord(char)).isalnum() or char == '_'
def _is_linebreak(char):
return char == "\n"
# Static list of all unicode codepoints reported by Py_UNICODE_ISLINEBREAK.
_uni_linebreaks = [10, 13, 28, 29, 30, 133, 8232, 8233]
def _log(message):
if 0:
print(message)
| gpl-2.0 |
rockyzhang/zhangyanhit-python-for-android-mips | python-build/python-libs/gdata/build/lib/gdata/tlslite/integration/TLSSocketServerMixIn.py | 320 | 2203 | """TLS Lite + SocketServer."""
from gdata.tlslite.TLSConnection import TLSConnection
class TLSSocketServerMixIn:
"""
This class can be mixed in with any L{SocketServer.TCPServer} to
add TLS support.
To use this class, define a new class that inherits from it and
some L{SocketServer.TCPServer} (with the mix-in first). Then
implement the handshake() method, doing some sort of server
handshake on the connection argument. If the handshake method
returns True, the RequestHandler will be triggered. Below is a
complete example of a threaded HTTPS server::
from SocketServer import *
from BaseHTTPServer import *
from SimpleHTTPServer import *
from tlslite.api import *
s = open("./serverX509Cert.pem").read()
x509 = X509()
x509.parse(s)
certChain = X509CertChain([x509])
s = open("./serverX509Key.pem").read()
privateKey = parsePEMKey(s, private=True)
sessionCache = SessionCache()
class MyHTTPServer(ThreadingMixIn, TLSSocketServerMixIn,
HTTPServer):
def handshake(self, tlsConnection):
try:
tlsConnection.handshakeServer(certChain=certChain,
privateKey=privateKey,
sessionCache=sessionCache)
tlsConnection.ignoreAbruptClose = True
return True
except TLSError, error:
print "Handshake failure:", str(error)
return False
httpd = MyHTTPServer(('localhost', 443), SimpleHTTPRequestHandler)
httpd.serve_forever()
"""
def finish_request(self, sock, client_address):
tlsConnection = TLSConnection(sock)
if self.handshake(tlsConnection) == True:
self.RequestHandlerClass(tlsConnection, client_address, self)
tlsConnection.close()
#Implement this method to do some form of handshaking. Return True
#if the handshake finishes properly and the request is authorized.
def handshake(self, tlsConnection):
raise NotImplementedError()
| apache-2.0 |
rddim/Notepad-plus-plus | scintilla/qt/ScintillaEdit/WidgetGen.py | 5 | 8222 | #!/usr/bin/env python3
# WidgetGen.py - regenerate the ScintillaWidgetCpp.cpp and ScintillaWidgetCpp.h files
# Check that API includes all gtkscintilla2 functions
import sys
import os
import getopt
scintillaDirectory = "../.."
scintillaScriptsDirectory = os.path.join(scintillaDirectory, "scripts")
sys.path.append(scintillaScriptsDirectory)
import Face
from FileGenerator import GenerateFile
def underscoreName(s):
# Name conversion fixes to match gtkscintilla2
irregular = ['WS', 'EOL', 'AutoC', 'KeyWords', 'BackSpace', 'UnIndents', 'RE', 'RGBA']
for word in irregular:
replacement = word[0] + word[1:].lower()
s = s.replace(word, replacement)
out = ""
for c in s:
if c.isupper():
if out:
out += "_"
out += c.lower()
else:
out += c
return out
def normalisedName(s, options, role=None):
if options["qtStyle"]:
if role == "get":
s = s.replace("Get", "")
return s[0].lower() + s[1:]
else:
return underscoreName(s)
typeAliases = {
"position": "int",
"line": "int",
"pointer": "int",
"colour": "int",
"keymod": "int",
"string": "const char *",
"stringresult": "const char *",
"cells": "const char *",
}
def cppAlias(s):
if s in typeAliases:
return typeAliases[s]
elif Face.IsEnumeration(s):
return "int"
else:
return s
understoodTypes = ["", "void", "int", "bool", "position", "line", "pointer",
"colour", "keymod", "string", "stringresult", "cells"]
def understoodType(t):
return t in understoodTypes or Face.IsEnumeration(t)
def checkTypes(name, v):
understandAllTypes = True
if not understoodType(v["ReturnType"]):
#~ print("Do not understand", v["ReturnType"], "for", name)
understandAllTypes = False
if not understoodType(v["Param1Type"]):
#~ print("Do not understand", v["Param1Type"], "for", name)
understandAllTypes = False
if not understoodType(v["Param2Type"]):
#~ print("Do not understand", v["Param2Type"], "for", name)
understandAllTypes = False
return understandAllTypes
def arguments(v, stringResult, options):
ret = ""
p1Type = cppAlias(v["Param1Type"])
if p1Type == "int":
p1Type = "sptr_t"
if p1Type:
ret = ret + p1Type + " " + normalisedName(v["Param1Name"], options)
p2Type = cppAlias(v["Param2Type"])
if p2Type == "int":
p2Type = "sptr_t"
if p2Type and not stringResult:
if p1Type:
ret = ret + ", "
ret = ret + p2Type + " " + normalisedName(v["Param2Name"], options)
return ret
def printPyFile(f, options):
out = []
for name in f.order:
v = f.features[name]
if v["Category"] != "Deprecated":
feat = v["FeatureType"]
if feat in ["val"]:
out.append(name + "=" + v["Value"])
if feat in ["evt"]:
out.append("SCN_" + name.upper() + "=" + v["Value"])
if feat in ["fun"]:
out.append("SCI_" + name.upper() + "=" + v["Value"])
return out
def printHFile(f, options):
out = []
for name in f.order:
v = f.features[name]
if v["Category"] != "Deprecated":
feat = v["FeatureType"]
if feat in ["fun", "get", "set"]:
if checkTypes(name, v):
constDeclarator = " const" if feat == "get" else ""
returnType = cppAlias(v["ReturnType"])
if returnType == "int":
returnType = "sptr_t"
stringResult = v["Param2Type"] == "stringresult"
if stringResult:
returnType = "QByteArray"
out.append("\t" + returnType + " " + normalisedName(name, options, feat) + "(" +
arguments(v, stringResult, options)+
")" + constDeclarator + ";")
return out
def methodNames(f, options):
for name in f.order:
v = f.features[name]
if v["Category"] != "Deprecated":
feat = v["FeatureType"]
if feat in ["fun", "get", "set"]:
if checkTypes(name, v):
yield normalisedName(name, options)
def printCPPFile(f, options):
out = []
for name in f.order:
v = f.features[name]
if v["Category"] != "Deprecated":
feat = v["FeatureType"]
if feat in ["fun", "get", "set"]:
if checkTypes(name, v):
constDeclarator = " const" if feat == "get" else ""
featureDefineName = "SCI_" + name.upper()
returnType = cppAlias(v["ReturnType"])
if returnType == "int":
returnType = "sptr_t"
stringResult = v["Param2Type"] == "stringresult"
if stringResult:
returnType = "QByteArray"
returnStatement = ""
if returnType != "void":
returnStatement = "return "
out.append(returnType + " ScintillaEdit::" + normalisedName(name, options, feat) + "(" +
arguments(v, stringResult, options) +
")" + constDeclarator + " {")
returns = ""
if stringResult:
returns += " " + returnStatement + "TextReturner(" + featureDefineName + ", "
if "*" in cppAlias(v["Param1Type"]):
returns += "(sptr_t)"
if v["Param1Name"]:
returns += normalisedName(v["Param1Name"], options)
else:
returns += "0"
returns += ");"
else:
returns += " " + returnStatement + "send(" + featureDefineName + ", "
if "*" in cppAlias(v["Param1Type"]):
returns += "(sptr_t)"
if v["Param1Name"]:
returns += normalisedName(v["Param1Name"], options)
else:
returns += "0"
returns += ", "
if "*" in cppAlias(v["Param2Type"]):
returns += "(sptr_t)"
if v["Param2Name"]:
returns += normalisedName(v["Param2Name"], options)
else:
returns += "0"
returns += ");"
out.append(returns)
out.append("}")
out.append("")
return out
def gtkNames():
# The full path on my machine: should be altered for anyone else
p = "C:/Users/Neil/Downloads/wingide-source-4.0.1-1/wingide-source-4.0.1-1/external/gtkscintilla2/gtkscintilla.c"
with open(p) as f:
for l in f.readlines():
if "gtk_scintilla_" in l:
name = l.split()[1][14:]
if '(' in name:
name = name.split('(')[0]
yield name
def usage():
print("WidgetGen.py [-c|--clean][-h|--help][-u|--underscore-names]")
print("")
print("Generate full APIs for ScintillaEdit class and ScintillaConstants.py.")
print("")
print("options:")
print("")
print("-c --clean remove all generated code from files")
print("-h --help display this text")
print("-u --underscore-names use method_names consistent with GTK+ standards")
def readInterface(cleanGenerated):
f = Face.Face()
if not cleanGenerated:
f.ReadFromFile("../../include/Scintilla.iface")
return f
def main(argv):
# Using local path for gtkscintilla2 so don't default to checking
checkGTK = False
cleanGenerated = False
qtStyleInterface = True
# The --gtk-check option checks for full coverage of the gtkscintilla2 API but
# depends on a particular directory so is not mentioned in --help.
opts, args = getopt.getopt(argv, "hcgu", ["help", "clean", "gtk-check", "underscore-names"])
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt in ("-c", "--clean"):
cleanGenerated = True
elif opt in ("-g", "--gtk-check"):
checkGTK = True
elif opt in ("-u", "--underscore-names"):
qtStyleInterface = False
options = {"qtStyle": qtStyleInterface}
f = readInterface(cleanGenerated)
try:
GenerateFile("ScintillaEdit.cpp.template", "ScintillaEdit.cpp",
"/* ", True, printCPPFile(f, options))
GenerateFile("ScintillaEdit.h.template", "ScintillaEdit.h",
"/* ", True, printHFile(f, options))
GenerateFile("../ScintillaEditPy/ScintillaConstants.py.template",
"../ScintillaEditPy/ScintillaConstants.py",
"# ", True, printPyFile(f, options))
if checkGTK:
names = set(methodNames(f))
#~ print("\n".join(names))
namesGtk = set(gtkNames())
for name in namesGtk:
if name not in names:
print(name, "not found in Qt version")
for name in names:
if name not in namesGtk:
print(name, "not found in GTK+ version")
except:
raise
if cleanGenerated:
for file in ["ScintillaEdit.cpp", "ScintillaEdit.h", "../ScintillaEditPy/ScintillaConstants.py"]:
try:
os.remove(file)
except OSError:
pass
if __name__ == "__main__":
main(sys.argv[1:])
| gpl-3.0 |
mahak/spark | python/pyspark/storagelevel.py | 23 | 2785 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__all__ = ["StorageLevel"]
class StorageLevel(object):
"""
Flags for controlling the storage of an RDD. Each StorageLevel records whether to use memory,
whether to drop the RDD to disk if it falls out of memory, whether to keep the data in memory
in a JAVA-specific serialized format, and whether to replicate the RDD partitions on multiple
nodes. Also contains static constants for some commonly used storage levels, MEMORY_ONLY.
Since the data is always serialized on the Python side, all the constants use the serialized
formats.
"""
def __init__(self, useDisk, useMemory, useOffHeap, deserialized, replication=1):
self.useDisk = useDisk
self.useMemory = useMemory
self.useOffHeap = useOffHeap
self.deserialized = deserialized
self.replication = replication
def __repr__(self):
return "StorageLevel(%s, %s, %s, %s, %s)" % (
self.useDisk, self.useMemory, self.useOffHeap, self.deserialized, self.replication)
def __str__(self):
result = ""
result += "Disk " if self.useDisk else ""
result += "Memory " if self.useMemory else ""
result += "OffHeap " if self.useOffHeap else ""
result += "Deserialized " if self.deserialized else "Serialized "
result += "%sx Replicated" % self.replication
return result
StorageLevel.DISK_ONLY = StorageLevel(True, False, False, False)
StorageLevel.DISK_ONLY_2 = StorageLevel(True, False, False, False, 2)
StorageLevel.DISK_ONLY_3 = StorageLevel(True, False, False, False, 3)
StorageLevel.MEMORY_ONLY = StorageLevel(False, True, False, False)
StorageLevel.MEMORY_ONLY_2 = StorageLevel(False, True, False, False, 2)
StorageLevel.MEMORY_AND_DISK = StorageLevel(True, True, False, False)
StorageLevel.MEMORY_AND_DISK_2 = StorageLevel(True, True, False, False, 2)
StorageLevel.OFF_HEAP = StorageLevel(True, True, True, False, 1)
StorageLevel.MEMORY_AND_DISK_DESER = StorageLevel(True, True, False, True)
| apache-2.0 |
almeidapaulopt/frappe | frappe/core/doctype/communication/comment.py | 8 | 5243 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, absolute_import
import frappe
from frappe import _
import json
from frappe.core.doctype.user.user import extract_mentions
from frappe.utils import get_fullname, get_link_to_form
from frappe.website.render import clear_cache
from frappe.model.db_schema import add_column
from frappe.exceptions import ImplicitCommitError
def on_trash(doc):
if doc.communication_type != "Comment":
return
if doc.reference_doctype == "Message":
return
if (doc.comment_type or "Comment") != "Comment":
frappe.only_for("System Manager")
_comments = get_comments_from_parent(doc)
for c in _comments:
if c.get("name")==doc.name:
_comments.remove(c)
update_comments_in_parent(doc.reference_doctype, doc.reference_name, _comments)
def update_comment_in_doc(doc):
"""Updates `_comments` (JSON) property in parent Document.
Creates a column `_comments` if property does not exist.
Only user created comments Communication or Comment of type Comment are saved.
`_comments` format
{
"comment": [String],
"by": [user],
"name": [Comment Document name]
}"""
if doc.communication_type not in ("Comment", "Communication"):
return
if doc.communication_type == 'Comment' and doc.comment_type != 'Comment':
# other updates
return
def get_content(doc):
return (doc.content[:97] + '...') if len(doc.content) > 100 else doc.content
if doc.reference_doctype and doc.reference_name and doc.content:
_comments = get_comments_from_parent(doc)
updated = False
for c in _comments:
if c.get("name")==doc.name:
c["comment"] = get_content(doc)
updated = True
if not updated:
_comments.append({
"comment": get_content(doc),
"by": doc.sender or doc.owner,
"name": doc.name
})
update_comments_in_parent(doc.reference_doctype, doc.reference_name, _comments)
def notify_mentions(doc):
if doc.communication_type != "Comment":
return
if doc.reference_doctype and doc.reference_name and doc.content and doc.comment_type=="Comment":
mentions = extract_mentions(doc.content)
if not mentions:
return
sender_fullname = get_fullname(frappe.session.user)
title_field = frappe.get_meta(doc.reference_doctype).get_title_field()
title = doc.reference_name if title_field == "name" else \
frappe.db.get_value(doc.reference_doctype, doc.reference_name, title_field)
if title != doc.reference_name:
parent_doc_label = "{0}: {1} (#{2})".format(_(doc.reference_doctype),
title, doc.reference_name)
else:
parent_doc_label = "{0}: {1}".format(_(doc.reference_doctype),
doc.reference_name)
subject = _("{0} mentioned you in a comment").format(sender_fullname)
recipients = [frappe.db.get_value("User", {"enabled": 1, "username": username, "user_type": "System User"})
for username in mentions]
frappe.sendmail(
recipients=recipients,
sender=frappe.session.user,
subject=subject,
template="mentioned_in_comment",
args={
"sender_fullname": sender_fullname,
"comment": doc,
"link": get_link_to_form(doc.reference_doctype, doc.reference_name, label=parent_doc_label)
},
header=[_('New Mention'), 'orange']
)
def get_comments_from_parent(doc):
try:
_comments = frappe.db.get_value(doc.reference_doctype, doc.reference_name, "_comments") or "[]"
except Exception as e:
if e.args[0] in (1146, 1054):
# 1146 = no table
# 1054 = missing column
_comments = "[]"
else:
raise
try:
return json.loads(_comments)
except ValueError:
return []
def update_comments_in_parent(reference_doctype, reference_name, _comments):
"""Updates `_comments` property in parent Document with given dict.
:param _comments: Dict of comments."""
if not reference_doctype or frappe.db.get_value("DocType", reference_doctype, "issingle"):
return
try:
# use sql, so that we do not mess with the timestamp
frappe.db.sql("""update `tab%s` set `_comments`=%s where name=%s""" % (reference_doctype,
"%s", "%s"), (json.dumps(_comments), reference_name))
except Exception as e:
if e.args[0] == 1054 and getattr(frappe.local, 'request', None):
# missing column and in request, add column and update after commit
frappe.local._comments = (getattr(frappe.local, "_comments", [])
+ [(reference_doctype, reference_name, _comments)])
else:
raise ImplicitCommitError
else:
if not frappe.flags.in_patch:
reference_doc = frappe.get_doc(reference_doctype, reference_name)
if getattr(reference_doc, "route", None):
clear_cache(reference_doc.route)
def add_info_comment(**kwargs):
kwargs.update({
"doctype": "Communication",
"communication_type": "Comment",
"comment_type": "Info",
"status": "Closed"
})
return frappe.get_doc(kwargs).insert(ignore_permissions=True)
def update_comments_in_parent_after_request():
"""update _comments in parent if _comments column is missing"""
if hasattr(frappe.local, "_comments"):
for (reference_doctype, reference_name, _comments) in frappe.local._comments:
add_column(reference_doctype, "_comments", "Text")
update_comments_in_parent(reference_doctype, reference_name, _comments)
frappe.db.commit()
| mit |
wpoely86/easybuild-easyblocks | easybuild/easyblocks/c/cufflinks.py | 3 | 2181 | ##
# This file is an EasyBuild reciPY as per https://github.com/hpcugent/easybuild
#
# Copyright:: Copyright 2012-2016 Uni.Lu/LCSB, NTUA
# Authors:: Cedric Laczny <cedric.laczny@uni.lu>, Fotis Georgatos <fotis@cern.ch>, Kenneth Hoste
# License:: MIT/GPL
# $Id$
#
# This work implements a part of the HPCBIOS project and is a component of the policy:
# http://hpcbios.readthedocs.org/en/latest/HPCBIOS_2012-94.html
##
"""
EasyBuild support for building and installing Cufflinks, implemented as an easyblock
@author: Cedric Laczny (Uni.Lu)
@author: Fotis Georgatos (Uni.Lu)
@author: Kenneth Hoste (Ghent University)
"""
import fileinput
import glob
import re
import os
import sys
from easybuild.easyblocks.generic.configuremake import ConfigureMake
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.modules import get_software_root
class EB_Cufflinks(ConfigureMake):
"""
Support for building and installing Cufflinks
"""
def configure_step(self):
"""
Check for dependencies
"""
for dep in ['Boost', 'Eigen', 'SAMtools']:
if not get_software_root(dep):
raise EasyBuildError("Dependency module %s not loaded?", dep)
super(EB_Cufflinks, self).configure_step()
def patch_step(self):
"""
First we need to rename a few things, s.a. http://wiki.ci.uchicago.edu/Beagle/BuildingSoftware -> "Cufflinks"
"""
build_dir = os.getcwd()
source_files = build_dir + '/src/*.cpp'
header_files = build_dir + '/src/*.h'
files = glob.glob(source_files)
files = files + (glob.glob(header_files))
for fname in files:
for line in fileinput.input(fname, inplace=1, backup='.orig'):
line = re.sub(r'foreach', 'for_each', line, count=0)
sys.stdout.write(line)
for line in fileinput.input(os.path.join(build_dir, 'src', 'common.h'), inplace=1, backup='.orig'):
line = re.sub(r'#include \<boost\/for\_each.hpp\>', '#include <boost/foreach.hpp>', line, count=0)
sys.stdout.write(line)
super(EB_Cufflinks, self).patch_step()
| gpl-2.0 |
whereismyjetpack/ansible | lib/ansible/module_utils/network_common.py | 60 | 6138 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2016 Red Hat Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from ansible.module_utils.six import iteritems
from ansible.module_utils.basic import AnsibleFallbackNotFound
def to_list(val):
if isinstance(val, (list, tuple, set)):
return list(val)
elif val is not None:
return [val]
else:
return list()
class ComplexDict(object):
"""Transforms a dict to with an argument spec
This class will take a dict and apply an Ansible argument spec to the
values. The resulting dict will contain all of the keys in the param
with appropriate values set.
Example::
argument_spec = dict(
command=dict(key=True),
display=dict(default='text', choices=['text', 'json']),
validate=dict(type='bool')
)
transform = ComplexDict(argument_spec, module)
value = dict(command='foo')
result = transform(value)
print result
{'command': 'foo', 'display': 'text', 'validate': None}
Supported argument spec:
* key - specifies how to map a single value to a dict
* read_from - read and apply the argument_spec from the module
* required - a value is required
* type - type of value (uses AnsibleModule type checker)
* fallback - implements fallback function
* choices - set of valid options
* default - default value
"""
def __init__(self, attrs, module):
self._attributes = attrs
self._module = module
self.attr_names = frozenset(self._attributes.keys())
self._has_key = False
for name, attr in iteritems(self._attributes):
if attr.get('read_from'):
spec = self._module.argument_spec.get(attr['read_from'])
if not spec:
raise ValueError('argument_spec %s does not exist' % attr['read_from'])
for key, value in iteritems(spec):
if key not in attr:
attr[key] = value
if attr.get('key'):
if self._has_key:
raise ValueError('only one key value can be specified')
self_has_key = True
attr['required'] = True
def _dict(self, value):
obj = {}
for name, attr in iteritems(self._attributes):
if attr.get('key'):
obj[name] = value
else:
obj[name] = attr.get('default')
return obj
def __call__(self, value):
if not isinstance(value, dict):
value = self._dict(value)
unknown = set(value).difference(self.attr_names)
if unknown:
raise ValueError('invalid keys: %s' % ','.join(unknown))
for name, attr in iteritems(self._attributes):
if not value.get(name):
value[name] = attr.get('default')
if attr.get('fallback') and not value.get(name):
fallback = attr.get('fallback', (None,))
fallback_strategy = fallback[0]
fallback_args = []
fallback_kwargs = {}
if fallback_strategy is not None:
for item in fallback[1:]:
if isinstance(item, dict):
fallback_kwargs = item
else:
fallback_args = item
try:
value[name] = fallback_strategy(*fallback_args, **fallback_kwargs)
except AnsibleFallbackNotFound:
continue
if attr.get('required') and value.get(name) is None:
raise ValueError('missing required attribute %s' % name)
if 'choices' in attr:
if value[name] not in attr['choices']:
raise ValueError('%s must be one of %s, got %s' % \
(name, ', '.join(attr['choices']), value[name]))
if value[name] is not None:
value_type = attr.get('type', 'str')
type_checker = self._module._CHECK_ARGUMENT_TYPES_DISPATCHER[value_type]
type_checker(value[name])
return value
class ComplexList(ComplexDict):
"""Extends ```ComplexDict``` to handle a list of dicts """
def __call__(self, values):
if not isinstance(values, (list, tuple)):
raise TypeError('value must be an ordered iterable')
return [(super(ComplexList, self).__call__(v)) for v in values]
| gpl-3.0 |
rsvip/Django | django/contrib/sessions/backends/db.py | 49 | 2944 | import logging
from django.contrib.sessions.backends.base import CreateError, SessionBase
from django.core.exceptions import SuspiciousOperation
from django.db import IntegrityError, router, transaction
from django.utils import timezone
from django.utils.encoding import force_text
class SessionStore(SessionBase):
"""
Implements database session store.
"""
def __init__(self, session_key=None):
super(SessionStore, self).__init__(session_key)
def load(self):
try:
s = Session.objects.get(
session_key=self.session_key,
expire_date__gt=timezone.now()
)
return self.decode(s.session_data)
except (Session.DoesNotExist, SuspiciousOperation) as e:
if isinstance(e, SuspiciousOperation):
logger = logging.getLogger('django.security.%s' %
e.__class__.__name__)
logger.warning(force_text(e))
self.create()
return {}
def exists(self, session_key):
return Session.objects.filter(session_key=session_key).exists()
def create(self):
while True:
self._session_key = self._get_new_session_key()
try:
# Save immediately to ensure we have a unique entry in the
# database.
self.save(must_create=True)
except CreateError:
# Key wasn't unique. Try again.
continue
self.modified = True
self._session_cache = {}
return
def save(self, must_create=False):
"""
Saves the current session data to the database. If 'must_create' is
True, a database error will be raised if the saving operation doesn't
create a *new* entry (as opposed to possibly updating an existing
entry).
"""
obj = Session(
session_key=self._get_or_create_session_key(),
session_data=self.encode(self._get_session(no_load=must_create)),
expire_date=self.get_expiry_date()
)
using = router.db_for_write(Session, instance=obj)
try:
with transaction.atomic(using=using):
obj.save(force_insert=must_create, using=using)
except IntegrityError:
if must_create:
raise CreateError
raise
def delete(self, session_key=None):
if session_key is None:
if self.session_key is None:
return
session_key = self.session_key
try:
Session.objects.get(session_key=session_key).delete()
except Session.DoesNotExist:
pass
@classmethod
def clear_expired(cls):
Session.objects.filter(expire_date__lt=timezone.now()).delete()
# At bottom to avoid circular import
from django.contrib.sessions.models import Session # isort:skip
| bsd-3-clause |
freeworldxbmc/pluging.video.Jurassic.World.Media | resources/lib/sources/iwatchonline_mv_tv.py | 7 | 7369 | # -*- coding: utf-8 -*-
'''
Genesis Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse
from resources.lib.libraries import cleantitle
from resources.lib.libraries import client
from resources.lib import resolvers
class source:
def __init__(self):
self.base_link = 'http://www.iwatchonline.ag'
self.link_1 = 'http://www.imovie.to'
self.link_2 = 'http://translate.googleusercontent.com/translate_c?anno=2&hl=en&sl=mt&tl=en&u=http://www.iwatchonline.ag'
self.link_3 = 'https://iwatchonline.unblocked.pw'
self.search_link = '/advance-search'
self.show_link = '/tv-shows/%s'
self.episode_link = '/episode/%s-s%02de%02d'
self.headers = {}
def get_movie(self, imdb, title, year):
try:
query = self.search_link
post = urllib.urlencode({'searchquery': title, 'searchin': '1'})
result = ''
links = [self.link_1, self.link_3]
for base_link in links:
result = client.source(urlparse.urljoin(base_link, query), post=post, headers=self.headers)
if 'widget search-page' in str(result): break
result = client.parseDOM(result, 'div', attrs = {'class': 'widget search-page'})[0]
result = client.parseDOM(result, 'td')
title = cleantitle.movie(title)
years = ['(%s)' % str(year), '(%s)' % str(int(year)+1), '(%s)' % str(int(year)-1)]
result = [(client.parseDOM(i, 'a', ret='href')[-1], client.parseDOM(i, 'a')[-1]) for i in result]
result = [i for i in result if title == cleantitle.movie(i[1])]
result = [i[0] for i in result if any(x in i[1] for x in years)][0]
url = client.replaceHTMLCodes(result)
try: url = urlparse.parse_qs(urlparse.urlparse(url).query)['u'][0]
except: pass
url = urlparse.urlparse(url).path
url = url.encode('utf-8')
return url
except:
return
def get_show(self, imdb, tvdb, tvshowtitle, year):
try:
query = self.search_link
post = urllib.urlencode({'searchquery': tvshowtitle, 'searchin': '2'})
result = ''
links = [self.link_1, self.link_3]
for base_link in links:
result = client.source(urlparse.urljoin(base_link, query), post=post, headers=self.headers)
if 'widget search-page' in str(result): break
result = client.parseDOM(result, 'div', attrs = {'class': 'widget search-page'})[0]
result = client.parseDOM(result, 'td')
tvshowtitle = cleantitle.tv(tvshowtitle)
years = ['(%s)' % str(year), '(%s)' % str(int(year)+1), '(%s)' % str(int(year)-1)]
result = [(client.parseDOM(i, 'a', ret='href')[-1], client.parseDOM(i, 'a')[-1]) for i in result]
result = [i for i in result if tvshowtitle == cleantitle.tv(i[1])]
result = [i[0] for i in result if any(x in i[1] for x in years)][0]
url = client.replaceHTMLCodes(result)
try: url = urlparse.parse_qs(urlparse.urlparse(url).query)['u'][0]
except: pass
url = urlparse.urlparse(url).path
url = url.encode('utf-8')
return url
except:
return
def get_episode(self, url, imdb, tvdb, title, date, season, episode):
if url == None: return
url = url.rsplit('/', 1)[-1]
url = self.episode_link % (url, int(season), int(episode))
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
def get_sources(self, url, hosthdDict, hostDict, locDict):
try:
sources = []
if url == None: return sources
result = ''
links = [self.link_1, self.link_2, self.link_3]
for base_link in links:
result = client.source(urlparse.urljoin(base_link, url), headers=self.headers)
if 'original-title' in str(result): break
links = client.parseDOM(result, 'tr', attrs = {'id': 'pt.+?'})
for i in links:
try:
lang = re.compile('<img src=[\'|\"|\s|\<]*(.+?)[\'|\"|\s|\>]').findall(i)[1]
if not 'English' in lang: raise Exception()
host = re.compile('<img src=[\'|\"|\s|\<]*(.+?)[\'|\"|\s|\>]').findall(i)[0]
host = host.rsplit('.', 1)[0].rsplit('.', 1)[0].rsplit('/', 1)[-1]
host = host.strip().lower()
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
if '>Cam<' in i or '>TS<' in i: quality = 'CAM'
elif '>HD<' in i and host in hosthdDict: quality = 'HD'
else: quality = 'SD'
if quality == 'HD' and not host in hosthdDict: raise Exception()
if quality == 'SD' and not host in hostDict: raise Exception()
if '>3D<' in i: info = '3D'
else: info = ''
url = re.compile('href=[\'|\"|\s|\<]*(.+?)[\'|\"|\s|\>]').findall(i)[0]
url = client.replaceHTMLCodes(url)
try: url = urlparse.parse_qs(urlparse.urlparse(url).query)['u'][0]
except: pass
if url.startswith('http'): url = urlparse.urlparse(url).path
if not url.startswith('http'): url = urlparse.urljoin(self.base_link, url)
url = url.encode('utf-8')
sources.append({'source': host, 'quality': quality, 'provider': 'Iwatchonline', 'url': url, 'info': info})
except:
pass
return sources
except:
return sources
def resolve(self, url):
try:
url = urlparse.urlparse(url).path
result = ''
links = [self.link_1, self.link_2, self.link_3]
for base_link in links:
result = client.request(urlparse.urljoin(base_link, url), headers=self.headers)
if 'frame' in str(result): break
url = re.compile('class=[\'|\"]*frame.+?src=[\'|\"|\s|\<]*(.+?)[\'|\"|\s|\>]').findall(result)[0]
url = client.replaceHTMLCodes(url)
try: url = urlparse.parse_qs(urlparse.urlparse(url).query)['u'][0]
except: pass
try: url = urlparse.parse_qs(urlparse.urlparse(url).query)['url'][0]
except: pass
url = resolvers.request(url)
return url
except:
return
| gpl-3.0 |
pulinagrawal/nupic | external/linux32/lib/python2.6/site-packages/matplotlib/projections/__init__.py | 69 | 2179 | from geo import AitoffAxes, HammerAxes, LambertAxes
from polar import PolarAxes
from matplotlib import axes
class ProjectionRegistry(object):
"""
Manages the set of projections available to the system.
"""
def __init__(self):
self._all_projection_types = {}
def register(self, *projections):
"""
Register a new set of projection(s).
"""
for projection in projections:
name = projection.name
self._all_projection_types[name] = projection
def get_projection_class(self, name):
"""
Get a projection class from its *name*.
"""
return self._all_projection_types[name]
def get_projection_names(self):
"""
Get a list of the names of all projections currently
registered.
"""
names = self._all_projection_types.keys()
names.sort()
return names
projection_registry = ProjectionRegistry()
projection_registry.register(
axes.Axes,
PolarAxes,
AitoffAxes,
HammerAxes,
LambertAxes)
def register_projection(cls):
projection_registry.register(cls)
def get_projection_class(projection=None):
"""
Get a projection class from its name.
If *projection* is None, a standard rectilinear projection is
returned.
"""
if projection is None:
projection = 'rectilinear'
try:
return projection_registry.get_projection_class(projection)
except KeyError:
raise ValueError("Unknown projection '%s'" % projection)
def projection_factory(projection, figure, rect, **kwargs):
"""
Get a new projection instance.
*projection* is a projection name.
*figure* is a figure to add the axes to.
*rect* is a :class:`~matplotlib.transforms.Bbox` object specifying
the location of the axes within the figure.
Any other kwargs are passed along to the specific projection
constructor being used.
"""
return get_projection_class(projection)(figure, rect, **kwargs)
def get_projection_names():
"""
Get a list of acceptable projection names.
"""
return projection_registry.get_projection_names()
| agpl-3.0 |
projecthamster/hamster | src/hamster/widgets/__init__.py | 2 | 3157 | # - coding: utf-8 -
# Copyright (C) 2007-2009 Toms Bauģis <toms.baugis at gmail.com>
# This file is part of Project Hamster.
# Project Hamster is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Project Hamster is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Project Hamster. If not, see <http://www.gnu.org/licenses/>.
from gi.repository import Gtk as gtk
from gi.repository import Gdk as gdk
from gi.repository import Pango as pango
from hamster.lib import datetime as dt
# import our children
from hamster.widgets.activityentry import (
ActivityEntry,
CategoryEntry,
CmdLineEntry,
)
from hamster.widgets.timeinput import TimeInput
from hamster.widgets.dayline import DayLine
from hamster.widgets.tags import Tag, TagBox, TagsEntry
from hamster.widgets.reportchooserdialog import ReportChooserDialog
from hamster.widgets.facttree import FactTree
from hamster.widgets.dates import Calendar, RangePick
# handy wrappers
def add_hint(entry, hint):
entry.hint = hint
def override_get_text(self):
#override get text so it does not return true when hint is in!
if self.real_get_text() == self.hint:
return ""
else:
return self.real_get_text()
def _set_hint(self, widget, event):
if self.get_text(): # do not mess with user entered text
return
self.modify_text(gtk.StateType.NORMAL, gdk.Color.parse("gray")[1])
hint_font = pango.FontDescription(self.get_style().font_desc.to_string())
hint_font.set_style(pango.Style.ITALIC)
self.modify_font(hint_font)
self.set_text(self.hint)
def _set_normal(self, widget, event):
#self.modify_text(gtk.StateType.NORMAL, self.get_style().fg[gtk.StateType.NORMAL])
hint_font = pango.FontDescription(self.get_style().font_desc.to_string())
hint_font.set_style(pango.Style.NORMAL)
self.modify_font(hint_font)
if self.real_get_text() == self.hint:
self.set_text("")
def _on_changed(self, widget):
if self.real_get_text() == "" and self.is_focus() == False:
self._set_hint(widget, None)
import types
instancemethod = types.MethodType
entry._set_hint = instancemethod(_set_hint, entry, gtk.Entry)
entry._set_normal = instancemethod(_set_normal, entry, gtk.Entry)
entry._on_changed = instancemethod(_on_changed, entry, gtk.Entry)
entry.real_get_text = entry.get_text
entry.get_text = instancemethod(override_get_text, entry, gtk.Entry)
entry.connect('focus-in-event', entry._set_normal)
entry.connect('focus-out-event', entry._set_hint)
entry.connect('changed', entry._on_changed)
entry._set_hint(entry, None)
| gpl-3.0 |
MarsCarl/ucore_lab | related_info/ostep/ostep2-segmentation.py | 54 | 6613 | #! /usr/bin/env python
import sys
from optparse import OptionParser
import random
import math
def convert(size):
length = len(size)
lastchar = size[length-1]
if (lastchar == 'k') or (lastchar == 'K'):
m = 1024
nsize = int(size[0:length-1]) * m
elif (lastchar == 'm') or (lastchar == 'M'):
m = 1024*1024
nsize = int(size[0:length-1]) * m
elif (lastchar == 'g') or (lastchar == 'G'):
m = 1024*1024*1024
nsize = int(size[0:length-1]) * m
else:
nsize = int(size)
return nsize
#
# main program
#
parser = OptionParser()
parser.add_option("-s", "--seed", default=0, help="the random seed",
action="store", type="int", dest="seed")
parser.add_option("-A", "--addresses", default="-1",
help="a set of comma-separated pages to access; -1 means randomly generate",
action="store", type="string", dest="addresses")
parser.add_option("-a", "--asize", default="1k",
help="address space size (e.g., 16, 64k, 32m, 1g)",
action="store", type="string", dest="asize")
parser.add_option("-p", "--physmem", default="16k",
help="physical memory size (e.g., 16, 64k, 32m, 1g)",
action="store", type="string", dest="psize")
parser.add_option("-n", "--numaddrs", default=5,
help="number of virtual addresses to generate",
action="store", type="int", dest="num")
parser.add_option("-b", "--b0", default="-1",
help="value of segment 0 base register",
action="store", type="string", dest="base0")
parser.add_option("-l", "--l0", default="-1",
help="value of segment 0 limit register",
action="store", type="string", dest="len0")
parser.add_option("-B", "--b1", default="-1",
help="value of segment 1 base register",
action="store", type="string", dest="base1")
parser.add_option("-L", "--l1", default="-1",
help="value of segment 1 limit register",
action="store", type="string", dest="len1")
parser.add_option("-c", help="compute answers for me",
action="store_true", default=False, dest="solve")
(options, args) = parser.parse_args()
print "ARG seed", options.seed
print "ARG address space size", options.asize
print "ARG phys mem size", options.psize
print ""
random.seed(options.seed)
asize = convert(options.asize)
psize = convert(options.psize)
addresses = str(options.addresses)
if psize <= 1:
print 'Error: must specify a non-zero physical memory size.'
exit(1)
if asize == 0:
print 'Error: must specify a non-zero address-space size.'
exit(1)
if psize <= asize:
print 'Error: physical memory size must be GREATER than address space size (for this simulation)'
exit(1)
#
# need to generate base, bounds for segment registers
#
len0 = convert(options.len0)
len1 = convert(options.len1)
base0 = convert(options.base0)
base1 = convert(options.base1)
if len0 == -1:
len0 = int(asize/4.0 + (asize/4.0 * random.random()))
if len1 == -1:
len1 = int(asize/4.0 + (asize/4.0 * random.random()))
# now have to find room for them
if base0 == -1:
done = 0
while done == 0:
base0 = int(psize * random.random())
if (base0 + len0) < psize:
done = 1
# internally, base1 points to the lower address, and base1+len1 the higher address
# (this differs from what the user would pass in, for example)
if base1 == -1:
done = 0
while done == 0:
base1 = int(psize * random.random())
if (base1 + len1) < psize:
if (base1 > (base0 + len0)) or ((base1 + len1) < base0):
done = 1
else:
base1 = base1 - len1
if len0 > asize/2.0 or len1 > asize/2.0:
print 'Error: length register is too large for this address space'
exit(1)
print 'Segment register information:'
print ''
print ' Segment 0 base (grows positive) : 0x%08x (decimal %d)' % (base0, base0)
print ' Segment 0 limit : %d' % (len0)
print ''
print ' Segment 1 base (grows negative) : 0x%08x (decimal %d)' % (base1+len1, base1+len1)
print ' Segment 1 limit : %d' % (len1)
print ''
nbase1 = base1 + len1
if (len0 + base0) > (base1) and (base1 > base0):
print 'Error: segments overlap in physical memory'
exit(1)
addrList = []
if addresses == '-1':
# need to generate addresses
for i in range(0, options.num):
n = int(asize * random.random())
addrList.append(n)
else:
addrList = addresses.split(',')
#
# now, need to generate virtual address trace
#
print 'Virtual Address Trace'
i = 0
for vStr in addrList:
# vaddr = int(asize * random.random())
vaddr = int(vStr)
if vaddr < 0 or vaddr >= asize:
print 'Error: virtual address %d cannot be generated in an address space of size %d' % (vaddr, asize)
exit(1)
if options.solve == False:
print ' VA %2d: 0x%08x (decimal: %4d) --> PA or segmentation violation?' % (i, vaddr, vaddr)
else:
paddr = 0
if (vaddr >= (asize / 2)):
# seg 1
paddr = nbase1 + (vaddr - asize)
if paddr < base1:
print ' VA %2d: 0x%08x (decimal: %4d) --> SEGMENTATION VIOLATION (SEG1)' % (i, vaddr, vaddr)
else:
print ' VA %2d: 0x%08x (decimal: %4d) --> VALID in SEG1: 0x%08x (decimal: %4d)' % (i, vaddr, vaddr, paddr, paddr)
else:
# seg 0
if (vaddr >= len0):
print ' VA %2d: 0x%08x (decimal: %4d) --> SEGMENTATION VIOLATION (SEG0)' % (i, vaddr, vaddr)
else:
paddr = vaddr + base0
print ' VA %2d: 0x%08x (decimal: %4d) --> VALID in SEG0: 0x%08x (decimal: %4d)' % (i, vaddr, vaddr, paddr, paddr)
i += 1
print ''
if options.solve == False:
print 'For each virtual address, either write down the physical address it translates to'
print 'OR write down that it is an out-of-bounds address (a segmentation violation). For'
print 'this problem, you should assume a simple address space with two segments: the top'
print 'bit of the virtual address can thus be used to check whether the virtual address'
print 'is in segment 0 (topbit=0) or segment 1 (topbit=1). Note that the base/limit pairs'
print 'given to you grow in different directions, depending on the segment, i.e., segment 0'
print 'grows in the positive direction, whereas segment 1 in the negative. '
print ''
| gpl-2.0 |
JeremiasE/KFormula | kspread/plugins/scripting/scripts/myorca.py | 3 | 7630 | #!/usr/bin/env kross
import urllib, Kross, KSpread
class MyConfig:
def __init__(self):
self.url = "http://127.0.0.1:20433"
self.sheetRange = "A1:F50"
self.cellNameOnSelectionChanged = True
self.cellValueOnSelectionChanged = True #TODO
self.sheetNameOnSheetChanged = True
class MyOrca:
def __init__(self, config):
self.config = config
def _send(self, data):
f = urllib.urlopen(self.config.url, data)
s = f.read()
f.close()
return s
def speak(self, text):
self._send("speak:%s" % text)
def stop(self):
self._send("stop")
def isSpeaking(self):
return self._send("isSpeaking")
class MyKSpread:
def __init__(self, action, myorca):
self.action = action
self.myorca = myorca
self.config = self.myorca.config
def cleanUp(*args):
if hasattr(__main__,"_KSpreadOrca"):
getattr(__main__,"_KSpreadOrca").finalized()
self.action.connect("started(Kross::Action*)", cleanUp)
self.action.connect("finalized(Kross::Action*)", cleanUp)
self._view = KSpread.view()
self._setSheet( KSpread.currentSheet() )
def selectionChanged():
print "Selection changed"
s = KSpread.currentSheet()
if s:
if s.sheetName() != self.sheetName:
self._setSheet(s)
if self.config.sheetNameOnSheetChanged:
self.speakSheetName()
else:
if self.config.cellNameOnSelectionChanged:
self.speakCellName()
if self.config.cellValueOnSelectionChanged:
self.speakCellValue()
self._view.connect("selectionChanged()", selectionChanged)
def __del__(self):
pass
def finalized(self):
if hasattr(self,"_listener") and self._listener:
print "FINALIZE!!!"
self._listener.deleteLater()
self._listener = None
if hasattr(__main__,"_KSpreadOrca"):
delattr(__main__,"_KSpreadOrca")
self.action = None
self.myorca = None
self.config = None
self._view = None
self.sheet = None
self.__del__()
def _setSheet(self, sheet):
self.sheet = sheet
self.sheetName = self.sheet.sheetName()
self._listener = KSpread.createListener(self.sheetName, self.config.sheetRange)
if not self._listener:
raise "Failed to create listener"
def cellChanged(column, row):
text = self.sheet.text(column, row)
if text:
#if self.myorca.isSpeaking():
# self.myorca.stop()
self.myorca.speak(text)
self._listener.connect("cellChanged(int,int)", cellChanged)
def sheetNameChanged(*args):
self.myorca.speak("Sheet name changed")
self.sheet.connect("nameChanged()", sheetNameChanged)
def sheetShowChanged(*args):
self.myorca.speak("Show sheet %s" % self.sheetName)
self.sheet.connect("showChanged()", sheetShowChanged)
def sheetHideChanged(*args):
self.myorca.speak("Hide sheet %s" % self.sheetName)
self.sheet.connect("hideChanged()", sheetHideChanged)
def speakCellName(self):
sheet = KSpread.currentSheet()
(x1,y1,x2,y2) = KSpread.view().selection()
p1 = sheet.cellName(x1,y1)
self.myorca.speak(p1)
def speakCellValue(self):
sheet = KSpread.currentSheet()
(x1,y1,x2,y2) = KSpread.view().selection()
t = sheet.text(x1,y1)
self.myorca.speak(t)
def speakSheetName(self):
n = KSpread.view().sheet()
self.myorca.speak(n)
class MyDialog:
def __init__(self, action, mykspread):
self.action = action
self.mykspread = mykspread
self.config = mykspread.config
forms = Kross.module("forms")
self.dialog = forms.createDialog("Orca Screen Reader")
self.dialog.setButtons("None")
self.dialog.setFaceType("Tabbed") #Auto Plain List Tree Tabbed
#self.dialog.minimumWidth = 300
self.dialog.minimumHeight = 40
page = self.dialog.addPage("Action","")
self.cellNameBtn = forms.createWidget(page, "QPushButton")
self.cellNameBtn.text = "Cell Name"
#self.cellNameBtn.shortcut = "Ctrl+S"
self.cellNameBtn.connect("clicked()", self.cellNameBtnClicked)
self.cellValueBtn = forms.createWidget(page, "QPushButton")
self.cellValueBtn.text = "Cell Value"
self.cellValueBtn.connect("clicked()", self.cellValueBtnClicked)
self.sheetNameBtn = forms.createWidget(page, "QPushButton")
self.sheetNameBtn.text = "Sheet Name"
self.sheetNameBtn.connect("clicked()", self.sheetNameBtnClicked)
page = self.dialog.addPage("Option","")
self.cellNameCheckbox = forms.createWidget(page, "QCheckBox")
self.cellNameCheckbox.text = "Cell Name if selection changed"
self.cellNameCheckbox.checked = self.config.cellNameOnSelectionChanged
self.cellNameCheckbox.connect("stateChanged(int)", self.optionChanged)
self.cellValueCheckbox = forms.createWidget(page, "QCheckBox")
self.cellValueCheckbox.text = "Cell Value if selection changed"
self.cellValueCheckbox.checked = self.config.cellValueOnSelectionChanged
self.cellValueCheckbox.connect("stateChanged(int)", self.optionChanged)
self.sheetNameChanged = forms.createWidget(page, "QCheckBox")
self.sheetNameChanged.text = "Sheet Name if sheet changed"
self.sheetNameChanged.checked = self.config.sheetNameOnSheetChanged
self.sheetNameChanged.connect("stateChanged(int)", self.optionChanged)
mykspread.__del__ = self.dialog.delayedDestruct
self.dialog.show()
def cellNameBtnClicked(self, *args):
self.mykspread.speakCellName()
def cellValueBtnClicked(self, *args):
self.mykspread.speakCellValue()
def sheetNameBtnClicked(self, *args):
self.mykspread.speakSheetName()
def optionChanged(self, *args):
self.config.cellNameOnSelectionChanged = self.cellNameCheckbox.checked
self.config.cellValueOnSelectionChanged = self.cellValueCheckbox.checked
self.config.sheetNameOnSheetChanged = self.sheetNameChanged.checked
def start(action, myconfig, myorca):
while True:
try:
myorca.speak("KOffice Spreadsheet")
break
except IOError:
forms = Kross.module("forms")
dialog = forms.createDialog("Orca Screen Reader")
dialog.minimumWidth = 400
dialog.minimumHeight = 40
dialog.setButtons("Ok|Cancel")
page = dialog.addPage("","")
label = forms.createWidget(page, "QLabel")
label.text = "Failed to connect with the Orca HTTP-Server."
widget = forms.createWidget(page, "QWidget")
layout = forms.createLayout(widget, "QHBoxLayout")
forms.createWidget(widget, "QLabel").text = "Url:"
urlEdit = forms.createWidget(widget, "QLineEdit")
urlEdit.text = myconfig.url
if not dialog.exec_loop():
return
myconfig.url = urlEdit.text
mykspread = MyKSpread(action, myorca)
setattr(__main__,"_KSpreadOrca",mykspread)
MyDialog(self, mykspread)
myconfig = MyConfig()
myorca = MyOrca(myconfig)
start(self, myconfig, myorca)
| gpl-2.0 |
pbhd/xbmc | tools/EventClients/lib/python/zeroconf.py | 181 | 4874 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2008-2013 Team XBMC
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Simple wrapper around Avahi
"""
__author__ = "d4rk@xbmc.org"
__version__ = "0.1"
try:
import time
import dbus, gobject, avahi
from dbus import DBusException
from dbus.mainloop.glib import DBusGMainLoop
except Exception, e:
print "Zeroconf support disabled. To enable, install the following Python modules:"
print " dbus, gobject, avahi"
pass
SERVICE_FOUND = 1
SERVICE_LOST = 2
class Browser:
""" Simple Zeroconf Browser """
def __init__( self, service_types = {} ):
"""
service_types - dictionary of services => handlers
"""
self._stop = False
self.loop = DBusGMainLoop()
self.bus = dbus.SystemBus( mainloop=self.loop )
self.server = dbus.Interface( self.bus.get_object( avahi.DBUS_NAME, '/' ),
'org.freedesktop.Avahi.Server')
self.handlers = {}
for type in service_types.keys():
self.add_service( type, service_types[ type ] )
def add_service( self, type, handler = None ):
"""
Add a service that the browser should watch for
"""
self.sbrowser = dbus.Interface(
self.bus.get_object(
avahi.DBUS_NAME,
self.server.ServiceBrowserNew(
avahi.IF_UNSPEC,
avahi.PROTO_UNSPEC,
type,
'local',
dbus.UInt32(0)
)
),
avahi.DBUS_INTERFACE_SERVICE_BROWSER)
self.handlers[ type ] = handler
self.sbrowser.connect_to_signal("ItemNew", self._new_item_handler)
self.sbrowser.connect_to_signal("ItemRemove", self._remove_item_handler)
def run(self):
"""
Run the gobject event loop
"""
# Don't use loop.run() because Python's GIL will block all threads
loop = gobject.MainLoop()
context = loop.get_context()
while not self._stop:
if context.pending():
context.iteration( True )
else:
time.sleep(1)
def stop(self):
"""
Stop the gobject event loop
"""
self._stop = True
def _new_item_handler(self, interface, protocol, name, stype, domain, flags):
if flags & avahi.LOOKUP_RESULT_LOCAL:
# local service, skip
pass
self.server.ResolveService(
interface,
protocol,
name,
stype,
domain,
avahi.PROTO_UNSPEC,
dbus.UInt32(0),
reply_handler = self._service_resolved_handler,
error_handler = self._error_handler
)
return
def _remove_item_handler(self, interface, protocol, name, stype, domain, flags):
if self.handlers[ stype ]:
# FIXME: more details needed here
try:
self.handlers[ stype ]( SERVICE_LOST, { 'type' : stype, 'name' : name } )
except:
pass
def _service_resolved_handler( self, *args ):
service = {}
service['type'] = str( args[3] )
service['name'] = str( args[2] )
service['address'] = str( args[7] )
service['hostname'] = str( args[5] )
service['port'] = int( args[8] )
# if the service type has a handler call it
try:
if self.handlers[ args[3] ]:
self.handlers[ args[3] ]( SERVICE_FOUND, service )
except:
pass
def _error_handler( self, *args ):
print 'ERROR: %s ' % str( args[0] )
if __name__ == "__main__":
def service_handler( found, service ):
print "---------------------"
print ['Found Service', 'Lost Service'][found-1]
for key in service.keys():
print key+" : "+str( service[key] )
browser = Browser( {
'_xbmc-events._udp' : service_handler,
'_xbmc-web._tcp' : service_handler
} )
browser.run()
| gpl-2.0 |
ProfessionalIT/professionalit-webiste | sdk/google_appengine/lib/cherrypy/cherrypy/test/benchmark.py | 36 | 12830 | """CherryPy Benchmark Tool
Usage:
benchmark.py --null --notests --help --cpmodpy --modpython --ab=path --apache=path
--null: use a null Request object (to bench the HTTP server only)
--notests: start the server but do not run the tests; this allows
you to check the tested pages with a browser
--help: show this help message
--cpmodpy: run tests via apache on 54583 (with the builtin _cpmodpy)
--modpython: run tests via apache on 54583 (with modpython_gateway)
--ab=path: Use the ab script/executable at 'path' (see below)
--apache=path: Use the apache script/exe at 'path' (see below)
To run the benchmarks, the Apache Benchmark tool "ab" must either be on
your system path, or specified via the --ab=path option.
To run the modpython tests, the "apache" executable or script must be
on your system path, or provided via the --apache=path option. On some
platforms, "apache" may be called "apachectl" or "apache2ctl"--create
a symlink to them if needed.
"""
import getopt
import os
curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
import re
import sys
import time
import traceback
import cherrypy
from cherrypy._cpcompat import ntob
from cherrypy import _cperror, _cpmodpy
from cherrypy.lib import httputil
AB_PATH = ""
APACHE_PATH = "apache"
SCRIPT_NAME = "/cpbench/users/rdelon/apps/blog"
__all__ = ['ABSession', 'Root', 'print_report',
'run_standard_benchmarks', 'safe_threads',
'size_report', 'startup', 'thread_report',
]
size_cache = {}
class Root:
def index(self):
return """<html>
<head>
<title>CherryPy Benchmark</title>
</head>
<body>
<ul>
<li><a href="hello">Hello, world! (14 byte dynamic)</a></li>
<li><a href="static/index.html">Static file (14 bytes static)</a></li>
<li><form action="sizer">Response of length:
<input type='text' name='size' value='10' /></form>
</li>
</ul>
</body>
</html>"""
index.exposed = True
def hello(self):
return "Hello, world\r\n"
hello.exposed = True
def sizer(self, size):
resp = size_cache.get(size, None)
if resp is None:
size_cache[size] = resp = "X" * int(size)
return resp
sizer.exposed = True
cherrypy.config.update({
'log.error.file': '',
'environment': 'production',
'server.socket_host': '127.0.0.1',
'server.socket_port': 54583,
'server.max_request_header_size': 0,
'server.max_request_body_size': 0,
'engine.deadlock_poll_freq': 0,
})
# Cheat mode on ;)
del cherrypy.config['tools.log_tracebacks.on']
del cherrypy.config['tools.log_headers.on']
del cherrypy.config['tools.trailing_slash.on']
appconf = {
'/static': {
'tools.staticdir.on': True,
'tools.staticdir.dir': 'static',
'tools.staticdir.root': curdir,
},
}
app = cherrypy.tree.mount(Root(), SCRIPT_NAME, appconf)
class NullRequest:
"""A null HTTP request class, returning 200 and an empty body."""
def __init__(self, local, remote, scheme="http"):
pass
def close(self):
pass
def run(self, method, path, query_string, protocol, headers, rfile):
cherrypy.response.status = "200 OK"
cherrypy.response.header_list = [("Content-Type", 'text/html'),
("Server", "Null CherryPy"),
("Date", httputil.HTTPDate()),
("Content-Length", "0"),
]
cherrypy.response.body = [""]
return cherrypy.response
class NullResponse:
pass
class ABSession:
"""A session of 'ab', the Apache HTTP server benchmarking tool.
Example output from ab:
This is ApacheBench, Version 2.0.40-dev <$Revision: 1.121.2.1 $> apache-2.0
Copyright (c) 1996 Adam Twiss, Zeus Technology Ltd, http://www.zeustech.net/
Copyright (c) 1998-2002 The Apache Software Foundation, http://www.apache.org/
Benchmarking 127.0.0.1 (be patient)
Completed 100 requests
Completed 200 requests
Completed 300 requests
Completed 400 requests
Completed 500 requests
Completed 600 requests
Completed 700 requests
Completed 800 requests
Completed 900 requests
Server Software: CherryPy/3.1beta
Server Hostname: 127.0.0.1
Server Port: 54583
Document Path: /static/index.html
Document Length: 14 bytes
Concurrency Level: 10
Time taken for tests: 9.643867 seconds
Complete requests: 1000
Failed requests: 0
Write errors: 0
Total transferred: 189000 bytes
HTML transferred: 14000 bytes
Requests per second: 103.69 [#/sec] (mean)
Time per request: 96.439 [ms] (mean)
Time per request: 9.644 [ms] (mean, across all concurrent requests)
Transfer rate: 19.08 [Kbytes/sec] received
Connection Times (ms)
min mean[+/-sd] median max
Connect: 0 0 2.9 0 10
Processing: 20 94 7.3 90 130
Waiting: 0 43 28.1 40 100
Total: 20 95 7.3 100 130
Percentage of the requests served within a certain time (ms)
50% 100
66% 100
75% 100
80% 100
90% 100
95% 100
98% 100
99% 110
100% 130 (longest request)
Finished 1000 requests
"""
parse_patterns = [('complete_requests', 'Completed',
ntob(r'^Complete requests:\s*(\d+)')),
('failed_requests', 'Failed',
ntob(r'^Failed requests:\s*(\d+)')),
('requests_per_second', 'req/sec',
ntob(r'^Requests per second:\s*([0-9.]+)')),
('time_per_request_concurrent', 'msec/req',
ntob(r'^Time per request:\s*([0-9.]+).*concurrent requests\)$')),
('transfer_rate', 'KB/sec',
ntob(r'^Transfer rate:\s*([0-9.]+)')),
]
def __init__(self, path=SCRIPT_NAME + "/hello", requests=1000, concurrency=10):
self.path = path
self.requests = requests
self.concurrency = concurrency
def args(self):
port = cherrypy.server.socket_port
assert self.concurrency > 0
assert self.requests > 0
# Don't use "localhost".
# Cf http://mail.python.org/pipermail/python-win32/2008-March/007050.html
return ("-k -n %s -c %s http://127.0.0.1:%s%s" %
(self.requests, self.concurrency, port, self.path))
def run(self):
# Parse output of ab, setting attributes on self
try:
self.output = _cpmodpy.read_process(AB_PATH or "ab", self.args())
except:
print(_cperror.format_exc())
raise
for attr, name, pattern in self.parse_patterns:
val = re.search(pattern, self.output, re.MULTILINE)
if val:
val = val.group(1)
setattr(self, attr, val)
else:
setattr(self, attr, None)
safe_threads = (25, 50, 100, 200, 400)
if sys.platform in ("win32",):
# For some reason, ab crashes with > 50 threads on my Win2k laptop.
safe_threads = (10, 20, 30, 40, 50)
def thread_report(path=SCRIPT_NAME + "/hello", concurrency=safe_threads):
sess = ABSession(path)
attrs, names, patterns = list(zip(*sess.parse_patterns))
avg = dict.fromkeys(attrs, 0.0)
yield ('threads',) + names
for c in concurrency:
sess.concurrency = c
sess.run()
row = [c]
for attr in attrs:
val = getattr(sess, attr)
if val is None:
print(sess.output)
row = None
break
val = float(val)
avg[attr] += float(val)
row.append(val)
if row:
yield row
# Add a row of averages.
yield ["Average"] + [str(avg[attr] / len(concurrency)) for attr in attrs]
def size_report(sizes=(10, 100, 1000, 10000, 100000, 100000000),
concurrency=50):
sess = ABSession(concurrency=concurrency)
attrs, names, patterns = list(zip(*sess.parse_patterns))
yield ('bytes',) + names
for sz in sizes:
sess.path = "%s/sizer?size=%s" % (SCRIPT_NAME, sz)
sess.run()
yield [sz] + [getattr(sess, attr) for attr in attrs]
def print_report(rows):
for row in rows:
print("")
for i, val in enumerate(row):
sys.stdout.write(str(val).rjust(10) + " | ")
print("")
def run_standard_benchmarks():
print("")
print("Client Thread Report (1000 requests, 14 byte response body, "
"%s server threads):" % cherrypy.server.thread_pool)
print_report(thread_report())
print("")
print("Client Thread Report (1000 requests, 14 bytes via staticdir, "
"%s server threads):" % cherrypy.server.thread_pool)
print_report(thread_report("%s/static/index.html" % SCRIPT_NAME))
print("")
print("Size Report (1000 requests, 50 client threads, "
"%s server threads):" % cherrypy.server.thread_pool)
print_report(size_report())
# modpython and other WSGI #
def startup_modpython(req=None):
"""Start the CherryPy app server in 'serverless' mode (for modpython/WSGI)."""
if cherrypy.engine.state == cherrypy._cpengine.STOPPED:
if req:
if "nullreq" in req.get_options():
cherrypy.engine.request_class = NullRequest
cherrypy.engine.response_class = NullResponse
ab_opt = req.get_options().get("ab", "")
if ab_opt:
global AB_PATH
AB_PATH = ab_opt
cherrypy.engine.start()
if cherrypy.engine.state == cherrypy._cpengine.STARTING:
cherrypy.engine.wait()
return 0 # apache.OK
def run_modpython(use_wsgi=False):
print("Starting mod_python...")
pyopts = []
# Pass the null and ab=path options through Apache
if "--null" in opts:
pyopts.append(("nullreq", ""))
if "--ab" in opts:
pyopts.append(("ab", opts["--ab"]))
s = _cpmodpy.ModPythonServer
if use_wsgi:
pyopts.append(("wsgi.application", "cherrypy::tree"))
pyopts.append(("wsgi.startup", "cherrypy.test.benchmark::startup_modpython"))
handler = "modpython_gateway::handler"
s = s(port=54583, opts=pyopts, apache_path=APACHE_PATH, handler=handler)
else:
pyopts.append(("cherrypy.setup", "cherrypy.test.benchmark::startup_modpython"))
s = s(port=54583, opts=pyopts, apache_path=APACHE_PATH)
try:
s.start()
run()
finally:
s.stop()
if __name__ == '__main__':
longopts = ['cpmodpy', 'modpython', 'null', 'notests',
'help', 'ab=', 'apache=']
try:
switches, args = getopt.getopt(sys.argv[1:], "", longopts)
opts = dict(switches)
except getopt.GetoptError:
print(__doc__)
sys.exit(2)
if "--help" in opts:
print(__doc__)
sys.exit(0)
if "--ab" in opts:
AB_PATH = opts['--ab']
if "--notests" in opts:
# Return without stopping the server, so that the pages
# can be tested from a standard web browser.
def run():
port = cherrypy.server.socket_port
print("You may now open http://127.0.0.1:%s%s/" %
(port, SCRIPT_NAME))
if "--null" in opts:
print("Using null Request object")
else:
def run():
end = time.time() - start
print("Started in %s seconds" % end)
if "--null" in opts:
print("\nUsing null Request object")
try:
try:
run_standard_benchmarks()
except:
print(_cperror.format_exc())
raise
finally:
cherrypy.engine.exit()
print("Starting CherryPy app server...")
class NullWriter(object):
"""Suppresses the printing of socket errors."""
def write(self, data):
pass
sys.stderr = NullWriter()
start = time.time()
if "--cpmodpy" in opts:
run_modpython()
elif "--modpython" in opts:
run_modpython(use_wsgi=True)
else:
if "--null" in opts:
cherrypy.server.request_class = NullRequest
cherrypy.server.response_class = NullResponse
cherrypy.engine.start_with_callback(run)
cherrypy.engine.block()
| lgpl-3.0 |
Alpistinho/FreeCAD | src/Mod/Inspection/Init.py | 58 | 1873 | # FreeCAD init script of the Inspection module
# (c) 2001 Juergen Riegel
#***************************************************************************
#* (c) Juergen Riegel (juergen.riegel@web.de) 2002 *
#* *
#* This file is part of the FreeCAD CAx development system. *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* FreeCAD is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Lesser General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with FreeCAD; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#* Juergen Riegel 2002 *
#***************************************************************************/
| lgpl-2.1 |
gauribhoite/personfinder | env/google_appengine/lib/django-1.5/django/contrib/gis/tests/test_measure.py | 221 | 8307 | """
Distance and Area objects to allow for sensible and convienient calculation
and conversions. Here are some tests.
"""
from django.contrib.gis.measure import Distance, Area, D, A
from django.utils import unittest
class DistanceTest(unittest.TestCase):
"Testing the Distance object"
def testInit(self):
"Testing initialisation from valid units"
d = Distance(m=100)
self.assertEqual(d.m, 100)
d1, d2, d3 = D(m=100), D(meter=100), D(metre=100)
for d in (d1, d2, d3):
self.assertEqual(d.m, 100)
d = D(nm=100)
self.assertEqual(d.m, 185200)
y1, y2, y3 = D(yd=100), D(yard=100), D(Yard=100)
for d in (y1, y2, y3):
self.assertEqual(d.yd, 100)
mm1, mm2 = D(millimeter=1000), D(MiLLiMeTeR=1000)
for d in (mm1, mm2):
self.assertEqual(d.m, 1.0)
self.assertEqual(d.mm, 1000.0)
def testInitInvalid(self):
"Testing initialisation from invalid units"
self.assertRaises(AttributeError, D, banana=100)
def testAccess(self):
"Testing access in different units"
d = D(m=100)
self.assertEqual(d.km, 0.1)
self.assertAlmostEqual(d.ft, 328.084, 3)
def testAccessInvalid(self):
"Testing access in invalid units"
d = D(m=100)
self.assertFalse(hasattr(d, 'banana'))
def testAddition(self):
"Test addition & subtraction"
d1 = D(m=100)
d2 = D(m=200)
d3 = d1 + d2
self.assertEqual(d3.m, 300)
d3 += d1
self.assertEqual(d3.m, 400)
d4 = d1 - d2
self.assertEqual(d4.m, -100)
d4 -= d1
self.assertEqual(d4.m, -200)
with self.assertRaises(TypeError):
d5 = d1 + 1
self.fail('Distance + number should raise TypeError')
with self.assertRaises(TypeError):
d5 = d1 - 1
self.fail('Distance - number should raise TypeError')
with self.assertRaises(TypeError):
d1 += 1
self.fail('Distance += number should raise TypeError')
with self.assertRaises(TypeError):
d1 -= 1
self.fail('Distance -= number should raise TypeError')
def testMultiplication(self):
"Test multiplication & division"
d1 = D(m=100)
d3 = d1 * 2
self.assertEqual(d3.m, 200)
d3 = 2 * d1
self.assertEqual(d3.m, 200)
d3 *= 5
self.assertEqual(d3.m, 1000)
d4 = d1 / 2
self.assertEqual(d4.m, 50)
d4 /= 5
self.assertEqual(d4.m, 10)
d5 = d1 / D(m=2)
self.assertEqual(d5, 50)
a5 = d1 * D(m=10)
self.assertTrue(isinstance(a5, Area))
self.assertEqual(a5.sq_m, 100*10)
with self.assertRaises(TypeError):
d1 *= D(m=1)
self.fail('Distance *= Distance should raise TypeError')
with self.assertRaises(TypeError):
d1 /= D(m=1)
self.fail('Distance /= Distance should raise TypeError')
def testUnitConversions(self):
"Testing default units during maths"
d1 = D(m=100)
d2 = D(km=1)
d3 = d1 + d2
self.assertEqual(d3._default_unit, 'm')
d4 = d2 + d1
self.assertEqual(d4._default_unit, 'km')
d5 = d1 * 2
self.assertEqual(d5._default_unit, 'm')
d6 = d1 / 2
self.assertEqual(d6._default_unit, 'm')
def testComparisons(self):
"Testing comparisons"
d1 = D(m=100)
d2 = D(km=1)
d3 = D(km=0)
self.assertTrue(d2 > d1)
self.assertTrue(d1 == d1)
self.assertTrue(d1 < d2)
self.assertFalse(d3)
def testUnitsStr(self):
"Testing conversion to strings"
d1 = D(m=100)
d2 = D(km=3.5)
self.assertEqual(str(d1), '100.0 m')
self.assertEqual(str(d2), '3.5 km')
self.assertEqual(repr(d1), 'Distance(m=100.0)')
self.assertEqual(repr(d2), 'Distance(km=3.5)')
def testUnitAttName(self):
"Testing the `unit_attname` class method"
unit_tuple = [('Yard', 'yd'), ('Nautical Mile', 'nm'), ('German legal metre', 'german_m'),
('Indian yard', 'indian_yd'), ('Chain (Sears)', 'chain_sears'), ('Chain', 'chain')]
for nm, att in unit_tuple:
self.assertEqual(att, D.unit_attname(nm))
class AreaTest(unittest.TestCase):
"Testing the Area object"
def testInit(self):
"Testing initialisation from valid units"
a = Area(sq_m=100)
self.assertEqual(a.sq_m, 100)
a = A(sq_m=100)
self.assertEqual(a.sq_m, 100)
a = A(sq_mi=100)
self.assertEqual(a.sq_m, 258998811.0336)
def testInitInvaliA(self):
"Testing initialisation from invalid units"
self.assertRaises(AttributeError, A, banana=100)
def testAccess(self):
"Testing access in different units"
a = A(sq_m=100)
self.assertEqual(a.sq_km, 0.0001)
self.assertAlmostEqual(a.sq_ft, 1076.391, 3)
def testAccessInvaliA(self):
"Testing access in invalid units"
a = A(sq_m=100)
self.assertFalse(hasattr(a, 'banana'))
def testAddition(self):
"Test addition & subtraction"
a1 = A(sq_m=100)
a2 = A(sq_m=200)
a3 = a1 + a2
self.assertEqual(a3.sq_m, 300)
a3 += a1
self.assertEqual(a3.sq_m, 400)
a4 = a1 - a2
self.assertEqual(a4.sq_m, -100)
a4 -= a1
self.assertEqual(a4.sq_m, -200)
with self.assertRaises(TypeError):
a5 = a1 + 1
self.fail('Area + number should raise TypeError')
with self.assertRaises(TypeError):
a5 = a1 - 1
self.fail('Area - number should raise TypeError')
with self.assertRaises(TypeError):
a1 += 1
self.fail('Area += number should raise TypeError')
with self.assertRaises(TypeError):
a1 -= 1
self.fail('Area -= number should raise TypeError')
def testMultiplication(self):
"Test multiplication & division"
a1 = A(sq_m=100)
a3 = a1 * 2
self.assertEqual(a3.sq_m, 200)
a3 = 2 * a1
self.assertEqual(a3.sq_m, 200)
a3 *= 5
self.assertEqual(a3.sq_m, 1000)
a4 = a1 / 2
self.assertEqual(a4.sq_m, 50)
a4 /= 5
self.assertEqual(a4.sq_m, 10)
with self.assertRaises(TypeError):
a5 = a1 * A(sq_m=1)
self.fail('Area * Area should raise TypeError')
with self.assertRaises(TypeError):
a1 *= A(sq_m=1)
self.fail('Area *= Area should raise TypeError')
with self.assertRaises(TypeError):
a5 = a1 / A(sq_m=1)
self.fail('Area / Area should raise TypeError')
with self.assertRaises(TypeError):
a1 /= A(sq_m=1)
self.fail('Area /= Area should raise TypeError')
def testUnitConversions(self):
"Testing default units during maths"
a1 = A(sq_m=100)
a2 = A(sq_km=1)
a3 = a1 + a2
self.assertEqual(a3._default_unit, 'sq_m')
a4 = a2 + a1
self.assertEqual(a4._default_unit, 'sq_km')
a5 = a1 * 2
self.assertEqual(a5._default_unit, 'sq_m')
a6 = a1 / 2
self.assertEqual(a6._default_unit, 'sq_m')
def testComparisons(self):
"Testing comparisons"
a1 = A(sq_m=100)
a2 = A(sq_km=1)
a3 = A(sq_km=0)
self.assertTrue(a2 > a1)
self.assertTrue(a1 == a1)
self.assertTrue(a1 < a2)
self.assertFalse(a3)
def testUnitsStr(self):
"Testing conversion to strings"
a1 = A(sq_m=100)
a2 = A(sq_km=3.5)
self.assertEqual(str(a1), '100.0 sq_m')
self.assertEqual(str(a2), '3.5 sq_km')
self.assertEqual(repr(a1), 'Area(sq_m=100.0)')
self.assertEqual(repr(a2), 'Area(sq_km=3.5)')
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(DistanceTest))
s.addTest(unittest.makeSuite(AreaTest))
return s
def run(verbosity=2):
unittest.TextTestRunner(verbosity=verbosity).run(suite())
if __name__=="__main__":
run()
| apache-2.0 |
CasparLi/calibre | src/calibre/ebooks/tweak.py | 14 | 5485 | #!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2012, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import sys, os, shlex, subprocess, shutil, unicodedata
from calibre import prints, as_unicode, walk
from calibre.constants import iswindows, __appname__
from calibre.ptempfile import TemporaryDirectory, TemporaryFile
from calibre.libunzip import extract as zipextract
from calibre.utils.zipfile import ZipFile, ZIP_DEFLATED, ZIP_STORED
from calibre.utils.ipc.simple_worker import WorkerError
class Error(ValueError):
pass
def ask_cli_question(msg):
prints(msg, end=' [y/N]: ')
sys.stdout.flush()
if iswindows:
import msvcrt
ans = msvcrt.getch()
else:
import tty, termios
old_settings = termios.tcgetattr(sys.stdin.fileno())
try:
tty.setraw(sys.stdin.fileno())
try:
ans = sys.stdin.read(1)
except KeyboardInterrupt:
ans = b''
finally:
termios.tcsetattr(sys.stdin.fileno(), termios.TCSADRAIN, old_settings)
print()
return ans == b'y'
def mobi_exploder(path, tdir, question=lambda x:True):
from calibre.ebooks.mobi.tweak import explode, BadFormat
try:
return explode(path, tdir, question=question)
except BadFormat as e:
raise Error(as_unicode(e))
def zip_exploder(path, tdir, question=lambda x:True):
zipextract(path, tdir)
for f in walk(tdir):
if f.lower().endswith('.opf'):
return f
raise Error('Invalid book: Could not find .opf')
def zip_rebuilder(tdir, path):
with ZipFile(path, 'w', compression=ZIP_DEFLATED) as zf:
# Write mimetype
mt = os.path.join(tdir, 'mimetype')
if os.path.exists(mt):
zf.write(mt, 'mimetype', compress_type=ZIP_STORED)
# Write everything else
exclude_files = {'.DS_Store', 'mimetype', 'iTunesMetadata.plist'}
for root, dirs, files in os.walk(tdir):
for fn in files:
if fn in exclude_files:
continue
absfn = os.path.join(root, fn)
zfn = unicodedata.normalize('NFC', os.path.relpath(absfn, tdir).replace(os.sep, '/'))
zf.write(absfn, zfn)
def docx_exploder(path, tdir, question=lambda x:True):
zipextract(path, tdir)
from calibre.ebooks.docx.dump import pretty_all_xml_in_dir
pretty_all_xml_in_dir(tdir)
for f in walk(tdir):
if os.path.basename(f) == 'document.xml':
return f
raise Error('Invalid book: Could not find document.xml')
def get_tools(fmt):
fmt = fmt.lower()
if fmt in {'mobi', 'azw', 'azw3'}:
from calibre.ebooks.mobi.tweak import rebuild
ans = mobi_exploder, rebuild
elif fmt in {'epub', 'htmlz'}:
ans = zip_exploder, zip_rebuilder
elif fmt == 'docx':
ans = docx_exploder, zip_rebuilder
else:
ans = None, None
return ans
def tweak(ebook_file):
''' Command line interface to the Tweak Book tool '''
fmt = ebook_file.rpartition('.')[-1].lower()
exploder, rebuilder = get_tools(fmt)
if exploder is None:
prints('Cannot tweak %s files. Supported formats are: EPUB, HTMLZ, AZW3, MOBI' % fmt.upper()
, file=sys.stderr)
raise SystemExit(1)
with TemporaryDirectory('_tweak_'+
os.path.basename(ebook_file).rpartition('.')[0]) as tdir:
try:
opf = exploder(ebook_file, tdir, question=ask_cli_question)
except WorkerError as e:
prints('Failed to unpack', ebook_file)
prints(e.orig_tb)
raise SystemExit(1)
except Error as e:
prints(as_unicode(e), file=sys.stderr)
raise SystemExit(1)
if opf is None:
# The question was answered with No
return
ed = os.environ.get('EDITOR', 'dummy')
cmd = shlex.split(ed)
isvim = bool([x for x in cmd[0].split('/') if x.endswith('vim')])
proceed = False
prints('Book extracted to', tdir)
if not isvim:
prints('Make your tweaks and once you are done,', __appname__,
'will rebuild', ebook_file, 'from', tdir)
print()
proceed = ask_cli_question('Rebuild ' + ebook_file + '?')
else:
base = os.path.basename(ebook_file)
with TemporaryFile(base+'.zip') as zipf:
with ZipFile(zipf, 'w') as zf:
zf.add_dir(tdir)
try:
subprocess.check_call(cmd + [zipf])
except:
prints(ed, 'failed, aborting...')
raise SystemExit(1)
with ZipFile(zipf, 'r') as zf:
shutil.rmtree(tdir)
os.mkdir(tdir)
zf.extractall(path=tdir)
proceed = True
if proceed:
prints('Rebuilding', ebook_file, 'please wait ...')
try:
rebuilder(tdir, ebook_file)
except WorkerError as e:
prints('Failed to rebuild', ebook_file)
prints(e.orig_tb)
raise SystemExit(1)
prints(ebook_file, 'successfully tweaked')
| gpl-3.0 |
rdmorganiser/rdmo | rdmo/projects/tests/test_view_project_update_import.py | 1 | 14683 | import os
import re
from pathlib import Path
import pytest
from django.urls import reverse
from rdmo.core.constants import VALUE_TYPE_FILE
from ..models import Project, Value
users = (
('owner', 'owner'),
('manager', 'manager'),
('author', 'author'),
('guest', 'guest'),
('user', 'user'),
('site', 'site'),
('anonymous', None),
)
view_project_permission_map = {
'owner': [1, 2, 3, 4, 5],
'manager': [1, 3, 5],
'author': [1, 3, 5],
'guest': [1, 3, 5],
'api': [1, 2, 3, 4, 5],
'site': [1, 2, 3, 4, 5]
}
change_project_permission_map = {
'owner': [1, 2, 3, 4, 5],
'manager': [1, 3, 5],
'api': [1, 2, 3, 4, 5],
'site': [1, 2, 3, 4, 5]
}
projects = [1, 2, 3, 4, 5]
@pytest.mark.parametrize('username,password', users)
@pytest.mark.parametrize('project_id', projects)
def test_project_update_import_get(db, client, username, password, project_id):
client.login(username=username, password=password)
url = reverse('project_update_import', args=[project_id])
response = client.get(url)
if project_id in change_project_permission_map.get(username, []):
assert response.status_code == 302
assert response.url == '/projects/{}/'.format(project_id)
elif password:
assert response.status_code == 403
else:
assert response.status_code == 302
assert response.url.startswith('/account/login/')
@pytest.mark.parametrize('username,password', users)
@pytest.mark.parametrize('project_id', projects)
def test_project_update_import_post_error(db, settings, client, username, password, project_id):
client.login(username=username, password=password)
url = reverse('project_update_import', args=[project_id])
response = client.post(url, {
'method': 'wrong'
})
if project_id in change_project_permission_map.get(username, []):
assert response.status_code == 400
elif password:
assert response.status_code == 403
else:
assert response.status_code == 302
assert response.url.startswith('/account/login/')
@pytest.mark.parametrize('username,password', users)
@pytest.mark.parametrize('project_id', projects)
def test_project_update_import_post_upload_file(db, settings, client, username, password, project_id):
client.login(username=username, password=password)
url = reverse('project_update_import', args=[project_id])
xml_file = os.path.join(settings.BASE_DIR, 'xml', 'project.xml')
with open(xml_file, encoding='utf8') as f:
response = client.post(url, {
'method': 'upload_file',
'uploaded_file': f
})
if project_id in change_project_permission_map.get(username, []):
assert response.status_code == 200
assert b'Import from project.xml' in response.content
elif password:
assert response.status_code == 403
else:
assert response.status_code == 302
assert response.url.startswith('/account/login/')
@pytest.mark.parametrize('username,password', users)
@pytest.mark.parametrize('project_id', projects)
def test_project_update_import_post_upload_file_error(db, settings, client, username, password, project_id):
client.login(username=username, password=password)
url = reverse('project_update_import', args=[project_id])
xml_file = os.path.join(settings.BASE_DIR, 'xml', 'error.xml')
with open(xml_file, encoding='utf8') as f:
response = client.post(url, {
'method': 'upload_file',
'uploaded_file': f
})
if project_id in change_project_permission_map.get(username, []):
assert response.status_code == 400
assert b'Files of this type cannot be imported.' in response.content
elif password:
assert response.status_code == 403
else:
assert response.status_code == 302
assert response.url.startswith('/account/login/')
@pytest.mark.parametrize('username,password', users)
@pytest.mark.parametrize('project_id', projects)
def test_project_update_import_post_upload_file_empty(db, client, username, password, project_id):
client.login(username=username, password=password)
url = reverse('project_update_import', args=[project_id])
response = client.post(url, {
'method': 'upload_file'
})
if project_id in change_project_permission_map.get(username, []):
assert response.status_code == 400
assert b'There has been an error with your import.' in response.content
elif password:
assert response.status_code == 403
else:
assert response.status_code == 302
assert response.url.startswith('/account/login/')
@pytest.mark.parametrize('username,password', users)
@pytest.mark.parametrize('project_id', projects)
def test_project_update_import_post_import_file(db, settings, client, files, username, password, project_id):
client.login(username=username, password=password)
projects_count = Project.objects.count()
project = Project.objects.get(pk=project_id)
project_updated = project.updated
project_snapshot_count = project.snapshots.count()
project_snapshot_values_count = project.values.filter(snapshot=None).count()
project_values_count = project.values.count()
# upload file
url = reverse('project_update_import', args=[project_id])
xml_file = os.path.join(settings.BASE_DIR, 'xml', 'project.xml')
with open(xml_file, encoding='utf8') as f:
response = client.post(url, {
'method': 'upload_file',
'uploaded_file': f
})
if project_id in change_project_permission_map.get(username, []):
assert response.status_code == 200
# get keys from the response
keys = re.findall(r'name=\"(.*?)\"', response.content.decode())
# import file
url = reverse('project_update_import', args=[project_id])
data = {key: ['on'] for key in keys}
data.update({'method': 'import_file'})
response = client.post(url, data)
# check if all the files are where are supposed to be
for file_value in Value.objects.filter(value_type=VALUE_TYPE_FILE):
assert Path(settings.MEDIA_ROOT).joinpath(file_value.file.name).exists()
# no new project, snapshots, values were created
project = Project.objects.get(pk=project_id)
assert Project.objects.count() == projects_count
assert project.snapshots.count() == project_snapshot_count
if project_id == 1:
assert project.values.count() == project_values_count
assert project.values.filter(snapshot=None).count() == project_snapshot_values_count
assert project.updated == project_updated
if project_id in change_project_permission_map.get(username, []):
assert response.status_code == 302
assert response.url == '/projects/{}/'.format(project_id)
else:
if password:
assert response.status_code == 403
else:
assert response.status_code == 302
assert response.url.startswith('/account/login/')
elif password:
assert response.status_code == 403
else:
assert response.status_code == 302
assert response.url.startswith('/account/login/')
@pytest.mark.parametrize('username,password', users)
@pytest.mark.parametrize('project_id', projects)
def test_project_update_import_post_import_file_empty(db, settings, client, username, password, project_id):
client.login(username=username, password=password)
projects_count = Project.objects.count()
project = Project.objects.get(pk=project_id)
project_updated = project.updated
project_snapshot_count = project.snapshots.count()
project_snapshot_values_count = project.values.filter(snapshot=None).count()
project_values_count = project.values.count()
# upload file
url = reverse('project_update_import', args=[project_id])
xml_file = os.path.join(settings.BASE_DIR, 'xml', 'project.xml')
with open(xml_file, encoding='utf8') as f:
response = client.post(url, {
'method': 'upload_file',
'uploaded_file': f
})
if project_id in change_project_permission_map.get(username, []):
assert response.status_code == 200, project_id
response = client.post(url, {
'method': 'import_file'
})
# check if all the files are where are supposed to be
for file_value in Value.objects.filter(value_type=VALUE_TYPE_FILE):
assert Path(settings.MEDIA_ROOT).joinpath(file_value.file.name).exists()
# no new project, snapshots, values were created
project = Project.objects.get(pk=project_id)
assert Project.objects.count() == projects_count
assert project.snapshots.count() == project_snapshot_count
assert project.values.count() == project_values_count
assert project.values.filter(snapshot=None).count() == project_snapshot_values_count
assert project.updated == project_updated
assert response.status_code == 302
assert response.url == '/projects/{}/'.format(project_id)
elif password:
assert response.status_code == 403
else:
assert response.status_code == 302
assert response.url.startswith('/account/login/')
@pytest.mark.parametrize('username,password', users)
@pytest.mark.parametrize('project_id', projects)
@pytest.mark.parametrize('source_id', projects)
def test_project_update_import_post_import_project_step1(db, settings, client, username, password, project_id, source_id):
client.login(username=username, password=password)
url = reverse('project_update_import', args=[project_id])
response = client.post(url, {
'method': 'import_project',
'source': source_id
})
if project_id in change_project_permission_map.get(username, []):
if source_id in view_project_permission_map.get(username, []):
assert response.status_code == 200
else:
assert response.status_code == 403
elif password:
assert response.status_code == 403
else:
assert response.status_code == 302
assert response.url.startswith('/account/login/')
@pytest.mark.parametrize('username,password', users)
@pytest.mark.parametrize('project_id', projects)
@pytest.mark.parametrize('source_id', projects)
def test_project_update_import_post_import_project_step2(db, settings, client, username, password, project_id, source_id):
client.login(username=username, password=password)
projects_count = Project.objects.count()
project = Project.objects.get(pk=project_id)
project_updated = project.updated
project_snapshot_count = project.snapshots.count()
project_snapshot_values_count = project.values.filter(snapshot=None).count()
project_values_count = project.values.count()
source = Project.objects.get(pk=source_id)
source_snapshot_count = source.snapshots.count()
source_snapshot_values_count = source.values.filter(snapshot=None).count()
source_values_count = source.values.count()
url = reverse('project_update_import', args=[project_id])
response = client.post(url, {
'method': 'import_project',
'source': source_id
})
if project_id in change_project_permission_map.get(username, []):
if source_id in view_project_permission_map.get(username, []):
assert response.status_code == 200
# get keys from the response
keys = re.findall(r'name=\"(.*?)\"', response.content.decode())
# import file
url = reverse('project_update_import', args=[project_id])
data = {key: ['on'] for key in keys}
data.update({
'method': 'import_project',
'source': source_id
})
response = client.post(url, data)
# check if all the files are where are supposed to be
for file_value in Value.objects.filter(value_type=VALUE_TYPE_FILE):
assert Path(settings.MEDIA_ROOT).joinpath(file_value.file.name).exists()
# no new project, snapshots, values were created
project = Project.objects.get(pk=project_id)
source = Project.objects.get(pk=source_id)
# no new project was created
assert Project.objects.count() == projects_count
# the project has the correct count of snapshot and values
assert project.snapshots.count() == project_snapshot_count
if project_id == 1:
assert project.values.count() == project_values_count
assert project.values.filter(snapshot=None).count() == project_snapshot_values_count
# the source project has the correct count of snapshot and values
assert source.snapshots.count() == source_snapshot_count
if source_id == 1:
assert source.values.count() == source_values_count
assert source.values.filter(snapshot=None).count() == source_snapshot_values_count
assert project.updated == project_updated
if project_id in change_project_permission_map.get(username, []):
assert response.status_code == 302
assert response.url == '/projects/{}/'.format(project_id)
else:
if password:
assert response.status_code == 403
else:
assert response.status_code == 302
assert response.url.startswith('/account/login/')
else:
assert response.status_code == 403
elif password:
assert response.status_code == 403
else:
assert response.status_code == 302
assert response.url.startswith('/account/login/')
@pytest.mark.parametrize('username,password', users)
@pytest.mark.parametrize('project_id', projects)
def test_project_update_import_post_import_project_error(db, settings, client, username, password, project_id):
client.login(username=username, password=password)
# upload file
url = reverse('project_update_import', args=[project_id])
response = client.post(url, {
'method': 'import_project'
})
if project_id in change_project_permission_map.get(username, []):
assert response.status_code == 404
elif password:
assert response.status_code == 403
else:
assert response.status_code == 302
assert response.url.startswith('/account/login/')
| apache-2.0 |
BubuLK/sfepy | sfepy/discrete/structural/mappings.py | 5 | 2861 | """
Finite element reference mappings for structural elements.
"""
import numpy as nm
from sfepy.linalg import dot_sequences as ddot
from sfepy.discrete.common.mappings import Mapping, PhysicalQPs
import sfepy.mechanics.shell10x as shell10x
class Shell10XMapping(Mapping):
"""
The reference mapping for the shell10x element.
"""
def __init__(self, region, field):
self.region = region
self.field = field
def get_physical_qps(self, qp_coors):
"""
Get physical quadrature points corresponding the given reference
element quadrature points.
Returns
-------
qps : array
The physical quadrature points ordered element by element,
i.e. with shape (n_el, n_qp, dim).
"""
phys_qps = PhysicalQPs()
bf = self.bfu.squeeze()
qps_loc = nm.einsum('qi,cqij->cqj', bf, self.coors_loc_3d)
qps = nm.einsum('cqi,cji->cqj', qps_loc, self.mtx_t)
qps += self.e_centres[:, None, :]
n_el, n_qp = qps.shape[0], qps.shape[1]
phys_qps.num = n_el * n_qp
phys_qps.shape = qps.shape
qps.shape = (phys_qps.num, qps.shape[2])
phys_qps.values = qps
return phys_qps
def get_mapping(self, qp_coors, weights):
"""
Get the mapping for given quadrature points and weights.
"""
domain = self.region.domain
mesh = domain.mesh
iels = self.region.get_cells()
conn = nm.take(domain.get_conn(), iels.astype(nm.int32), axis=0)
e_coors = mesh.coors[conn]
mtx_t = shell10x.create_transformation_matrix(e_coors)
e_centres = mesh.cmesh.get_centroids(2)[iels]
coors_loc = ddot((e_coors - e_centres[:, None, :]), mtx_t)
ebs = shell10x.create_local_bases(coors_loc)
rops = shell10x.create_rotation_ops(ebs)
ps = self.field.poly_space
qp_coors0 = nm.array([[0.5, 0.5, 0.5]])
# Should be thickness, but not used anywhere.
qp_weights0 = nm.array([0.0])
dxidx0, det0 = shell10x.get_mapping_data(ebs, rops, ps, coors_loc,
qp_coors0, qp_weights0,
special_dx3=True)
aux = shell10x.get_mapping_data(ebs, rops, ps, coors_loc,
qp_coors, weights)
coors_loc_3d, bfu, bfgm, dxidx, det = aux
self.coors_loc = coors_loc
self.e_centres = e_centres
self.mtx_t = mtx_t
self.ebs = ebs
self.rops = rops
self.dxidx0 = dxidx0
self.det0 = det0
self.coors_loc_3d = coors_loc_3d
self.bfu = bfu
self.bfgm = bfgm
self.dxidx = dxidx
self.det = det
self.volume = (det * weights).sum(1)
return self
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.