repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
raingo/TGIF-Release
code/gifs-filter/email_notify.py
1
1267
#!/usr/bin/env python """ Notify program finishing with email *.py "msg" """ import smtplib import sys import socket app_name = 'GIF Filtering' hostname = socket.gethostname() import os cwd = os.getcwd() g_config = { 'HOST': 'smtp.gmail.com', 'port': 587, 'FROM': '"%s ALERT" <m9Bb7voPjTEC@gmail.com>' % app_name, 'TO': 'raingomm@gmail.com', 'SUBJECT':'New Notification Event From [%s]' % app_name, 'username': 'm9Bb7voPjTEC@gmail.com', 'pwd': 'Rq3TdwTs7M7gQfWz', } def send_mail(msg, config = None): if not config: config = g_config session = smtplib.SMTP(config['HOST'], config['port']) session.ehlo() session.starttls() session.ehlo() session.login(config['username'], config['pwd']) headers = ["from: " + config['FROM'], "subject: " + config['SUBJECT'], "to: " + config['TO'], "mime-version: 1.0", "content-type: text/html"] headers = "\r\n".join(headers) msg = msg + '\r\n\r\n from ' + hostname + ' \r\n\r\n at ' + cwd session.sendmail(config['FROM'], config['TO'], headers + "\r\n\r\n" + msg) def main(): if len(sys.argv) == 2: send_mail(sys.argv[1]) if __name__ == "__main__": main() # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
bsd-3-clause
dreamhost/teuthology
teuthology/orchestra/test/integration/test_integration.py
9
2304
from teuthology.orchestra import monkey monkey.patch_all() from cStringIO import StringIO import os from teuthology.orchestra import connection, remote, run from teuthology.orchestra.test.util import assert_raises from teuthology.exceptions import CommandCrashedError, ConnectionLostError from pytest import skip HOST = None class TestIntegration(): def setup(self): try: host = os.environ['ORCHESTRA_TEST_HOST'] except KeyError: skip('To run integration tests, set environment ' + 'variable ORCHESTRA_TEST_HOST to user@host to use.') global HOST HOST = host def test_crash(self): ssh = connection.connect(HOST) e = assert_raises( CommandCrashedError, run.run, client=ssh, args=['sh', '-c', 'kill -ABRT $$'], ) assert e.command == "sh -c 'kill -ABRT $$'" assert str(e) == "Command crashed: \"sh -c 'kill -ABRT $$'\"" def test_lost(self): ssh = connection.connect(HOST) e = assert_raises( ConnectionLostError, run.run, client=ssh, args=['sh', '-c', 'kill -ABRT $PPID'], name=HOST, ) assert e.command == "sh -c 'kill -ABRT $PPID'" assert str(e) == \ "SSH connection to {host} was lost: ".format(host=HOST) + \ "\"sh -c 'kill -ABRT $PPID'\"" def test_pipe(self): ssh = connection.connect(HOST) r = run.run( client=ssh, args=['cat'], stdin=run.PIPE, stdout=StringIO(), wait=False, ) assert r.stdout.getvalue() == '' r.stdin.write('foo\n') r.stdin.write('bar\n') r.stdin.close() r.wait() got = r.exitstatus assert got == 0 assert r.stdout.getvalue() == 'foo\nbar\n' def test_and(self): ssh = connection.connect(HOST) r = run.run( client=ssh, args=['true', run.Raw('&&'), 'echo', 'yup'], stdout=StringIO(), ) assert r.stdout.getvalue() == 'yup\n' def test_os(self): rem = remote.Remote(HOST) assert rem.os.name assert rem.os.version
mit
OptimusGitEtna/RestSymf
Python-3.4.2/Lib/test/test_xmlrpc.py
72
42366
import base64 import datetime import sys import time import unittest from unittest import mock import xmlrpc.client as xmlrpclib import xmlrpc.server import http.client import socket import os import re import io import contextlib from test import support try: import gzip except ImportError: gzip = None try: import threading except ImportError: threading = None alist = [{'astring': 'foo@bar.baz.spam', 'afloat': 7283.43, 'anint': 2**20, 'ashortlong': 2, 'anotherlist': ['.zyx.41'], 'abase64': xmlrpclib.Binary(b"my dog has fleas"), 'b64bytes': b"my dog has fleas", 'b64bytearray': bytearray(b"my dog has fleas"), 'boolean': False, 'unicode': '\u4000\u6000\u8000', 'ukey\u4000': 'regular value', 'datetime1': xmlrpclib.DateTime('20050210T11:41:23'), 'datetime2': xmlrpclib.DateTime( (2005, 2, 10, 11, 41, 23, 0, 1, -1)), 'datetime3': xmlrpclib.DateTime( datetime.datetime(2005, 2, 10, 11, 41, 23)), }] class XMLRPCTestCase(unittest.TestCase): def test_dump_load(self): dump = xmlrpclib.dumps((alist,)) load = xmlrpclib.loads(dump) self.assertEqual(alist, load[0][0]) def test_dump_bare_datetime(self): # This checks that an unwrapped datetime.date object can be handled # by the marshalling code. This can't be done via test_dump_load() # since with use_builtin_types set to 1 the unmarshaller would create # datetime objects for the 'datetime[123]' keys as well dt = datetime.datetime(2005, 2, 10, 11, 41, 23) self.assertEqual(dt, xmlrpclib.DateTime('20050210T11:41:23')) s = xmlrpclib.dumps((dt,)) result, m = xmlrpclib.loads(s, use_builtin_types=True) (newdt,) = result self.assertEqual(newdt, dt) self.assertIs(type(newdt), datetime.datetime) self.assertIsNone(m) result, m = xmlrpclib.loads(s, use_builtin_types=False) (newdt,) = result self.assertEqual(newdt, dt) self.assertIs(type(newdt), xmlrpclib.DateTime) self.assertIsNone(m) result, m = xmlrpclib.loads(s, use_datetime=True) (newdt,) = result self.assertEqual(newdt, dt) self.assertIs(type(newdt), datetime.datetime) self.assertIsNone(m) result, m = xmlrpclib.loads(s, use_datetime=False) (newdt,) = result self.assertEqual(newdt, dt) self.assertIs(type(newdt), xmlrpclib.DateTime) self.assertIsNone(m) def test_datetime_before_1900(self): # same as before but with a date before 1900 dt = datetime.datetime(1, 2, 10, 11, 41, 23) self.assertEqual(dt, xmlrpclib.DateTime('00010210T11:41:23')) s = xmlrpclib.dumps((dt,)) result, m = xmlrpclib.loads(s, use_builtin_types=True) (newdt,) = result self.assertEqual(newdt, dt) self.assertIs(type(newdt), datetime.datetime) self.assertIsNone(m) result, m = xmlrpclib.loads(s, use_builtin_types=False) (newdt,) = result self.assertEqual(newdt, dt) self.assertIs(type(newdt), xmlrpclib.DateTime) self.assertIsNone(m) def test_bug_1164912 (self): d = xmlrpclib.DateTime() ((new_d,), dummy) = xmlrpclib.loads(xmlrpclib.dumps((d,), methodresponse=True)) self.assertIsInstance(new_d.value, str) # Check that the output of dumps() is still an 8-bit string s = xmlrpclib.dumps((new_d,), methodresponse=True) self.assertIsInstance(s, str) def test_newstyle_class(self): class T(object): pass t = T() t.x = 100 t.y = "Hello" ((t2,), dummy) = xmlrpclib.loads(xmlrpclib.dumps((t,))) self.assertEqual(t2, t.__dict__) def test_dump_big_long(self): self.assertRaises(OverflowError, xmlrpclib.dumps, (2**99,)) def test_dump_bad_dict(self): self.assertRaises(TypeError, xmlrpclib.dumps, ({(1,2,3): 1},)) def test_dump_recursive_seq(self): l = [1,2,3] t = [3,4,5,l] l.append(t) self.assertRaises(TypeError, xmlrpclib.dumps, (l,)) def test_dump_recursive_dict(self): d = {'1':1, '2':1} t = {'3':3, 'd':d} d['t'] = t self.assertRaises(TypeError, xmlrpclib.dumps, (d,)) def test_dump_big_int(self): if sys.maxsize > 2**31-1: self.assertRaises(OverflowError, xmlrpclib.dumps, (int(2**34),)) xmlrpclib.dumps((xmlrpclib.MAXINT, xmlrpclib.MININT)) self.assertRaises(OverflowError, xmlrpclib.dumps, (xmlrpclib.MAXINT+1,)) self.assertRaises(OverflowError, xmlrpclib.dumps, (xmlrpclib.MININT-1,)) def dummy_write(s): pass m = xmlrpclib.Marshaller() m.dump_int(xmlrpclib.MAXINT, dummy_write) m.dump_int(xmlrpclib.MININT, dummy_write) self.assertRaises(OverflowError, m.dump_int, xmlrpclib.MAXINT+1, dummy_write) self.assertRaises(OverflowError, m.dump_int, xmlrpclib.MININT-1, dummy_write) def test_dump_double(self): xmlrpclib.dumps((float(2 ** 34),)) xmlrpclib.dumps((float(xmlrpclib.MAXINT), float(xmlrpclib.MININT))) xmlrpclib.dumps((float(xmlrpclib.MAXINT + 42), float(xmlrpclib.MININT - 42))) def dummy_write(s): pass m = xmlrpclib.Marshaller() m.dump_double(xmlrpclib.MAXINT, dummy_write) m.dump_double(xmlrpclib.MININT, dummy_write) m.dump_double(xmlrpclib.MAXINT + 42, dummy_write) m.dump_double(xmlrpclib.MININT - 42, dummy_write) def test_dump_none(self): value = alist + [None] arg1 = (alist + [None],) strg = xmlrpclib.dumps(arg1, allow_none=True) self.assertEqual(value, xmlrpclib.loads(strg)[0][0]) self.assertRaises(TypeError, xmlrpclib.dumps, (arg1,)) def test_dump_bytes(self): sample = b"my dog has fleas" self.assertEqual(sample, xmlrpclib.Binary(sample)) for type_ in bytes, bytearray, xmlrpclib.Binary: value = type_(sample) s = xmlrpclib.dumps((value,)) result, m = xmlrpclib.loads(s, use_builtin_types=True) (newvalue,) = result self.assertEqual(newvalue, sample) self.assertIs(type(newvalue), bytes) self.assertIsNone(m) result, m = xmlrpclib.loads(s, use_builtin_types=False) (newvalue,) = result self.assertEqual(newvalue, sample) self.assertIs(type(newvalue), xmlrpclib.Binary) self.assertIsNone(m) def test_get_host_info(self): # see bug #3613, this raised a TypeError transp = xmlrpc.client.Transport() self.assertEqual(transp.get_host_info("user@host.tld"), ('host.tld', [('Authorization', 'Basic dXNlcg==')], {})) def test_ssl_presence(self): try: import ssl except ImportError: has_ssl = False else: has_ssl = True try: xmlrpc.client.ServerProxy('https://localhost:9999').bad_function() except NotImplementedError: self.assertFalse(has_ssl, "xmlrpc client's error with SSL support") except OSError: self.assertTrue(has_ssl) class HelperTestCase(unittest.TestCase): def test_escape(self): self.assertEqual(xmlrpclib.escape("a&b"), "a&amp;b") self.assertEqual(xmlrpclib.escape("a<b"), "a&lt;b") self.assertEqual(xmlrpclib.escape("a>b"), "a&gt;b") class FaultTestCase(unittest.TestCase): def test_repr(self): f = xmlrpclib.Fault(42, 'Test Fault') self.assertEqual(repr(f), "<Fault 42: 'Test Fault'>") self.assertEqual(repr(f), str(f)) def test_dump_fault(self): f = xmlrpclib.Fault(42, 'Test Fault') s = xmlrpclib.dumps((f,)) (newf,), m = xmlrpclib.loads(s) self.assertEqual(newf, {'faultCode': 42, 'faultString': 'Test Fault'}) self.assertEqual(m, None) s = xmlrpclib.Marshaller().dumps(f) self.assertRaises(xmlrpclib.Fault, xmlrpclib.loads, s) def test_dotted_attribute(self): # this will raise AttributeError because code don't want us to use # private methods self.assertRaises(AttributeError, xmlrpc.server.resolve_dotted_attribute, str, '__add') self.assertTrue(xmlrpc.server.resolve_dotted_attribute(str, 'title')) class DateTimeTestCase(unittest.TestCase): def test_default(self): with mock.patch('time.localtime') as localtime_mock: time_struct = time.struct_time( [2013, 7, 15, 0, 24, 49, 0, 196, 0]) localtime_mock.return_value = time_struct localtime = time.localtime() t = xmlrpclib.DateTime() self.assertEqual(str(t), time.strftime("%Y%m%dT%H:%M:%S", localtime)) def test_time(self): d = 1181399930.036952 t = xmlrpclib.DateTime(d) self.assertEqual(str(t), time.strftime("%Y%m%dT%H:%M:%S", time.localtime(d))) def test_time_tuple(self): d = (2007,6,9,10,38,50,5,160,0) t = xmlrpclib.DateTime(d) self.assertEqual(str(t), '20070609T10:38:50') def test_time_struct(self): d = time.localtime(1181399930.036952) t = xmlrpclib.DateTime(d) self.assertEqual(str(t), time.strftime("%Y%m%dT%H:%M:%S", d)) def test_datetime_datetime(self): d = datetime.datetime(2007,1,2,3,4,5) t = xmlrpclib.DateTime(d) self.assertEqual(str(t), '20070102T03:04:05') def test_repr(self): d = datetime.datetime(2007,1,2,3,4,5) t = xmlrpclib.DateTime(d) val ="<DateTime '20070102T03:04:05' at %x>" % id(t) self.assertEqual(repr(t), val) def test_decode(self): d = ' 20070908T07:11:13 ' t1 = xmlrpclib.DateTime() t1.decode(d) tref = xmlrpclib.DateTime(datetime.datetime(2007,9,8,7,11,13)) self.assertEqual(t1, tref) t2 = xmlrpclib._datetime(d) self.assertEqual(t2, tref) def test_comparison(self): now = datetime.datetime.now() dtime = xmlrpclib.DateTime(now.timetuple()) # datetime vs. DateTime self.assertTrue(dtime == now) self.assertTrue(now == dtime) then = now + datetime.timedelta(seconds=4) self.assertTrue(then >= dtime) self.assertTrue(dtime < then) # str vs. DateTime dstr = now.strftime("%Y%m%dT%H:%M:%S") self.assertTrue(dtime == dstr) self.assertTrue(dstr == dtime) dtime_then = xmlrpclib.DateTime(then.timetuple()) self.assertTrue(dtime_then >= dstr) self.assertTrue(dstr < dtime_then) # some other types dbytes = dstr.encode('ascii') dtuple = now.timetuple() with self.assertRaises(TypeError): dtime == 1970 with self.assertRaises(TypeError): dtime != dbytes with self.assertRaises(TypeError): dtime == bytearray(dbytes) with self.assertRaises(TypeError): dtime != dtuple with self.assertRaises(TypeError): dtime < float(1970) with self.assertRaises(TypeError): dtime > dbytes with self.assertRaises(TypeError): dtime <= bytearray(dbytes) with self.assertRaises(TypeError): dtime >= dtuple class BinaryTestCase(unittest.TestCase): # XXX What should str(Binary(b"\xff")) return? I'm chosing "\xff" # for now (i.e. interpreting the binary data as Latin-1-encoded # text). But this feels very unsatisfactory. Perhaps we should # only define repr(), and return r"Binary(b'\xff')" instead? def test_default(self): t = xmlrpclib.Binary() self.assertEqual(str(t), '') def test_string(self): d = b'\x01\x02\x03abc123\xff\xfe' t = xmlrpclib.Binary(d) self.assertEqual(str(t), str(d, "latin-1")) def test_decode(self): d = b'\x01\x02\x03abc123\xff\xfe' de = base64.encodebytes(d) t1 = xmlrpclib.Binary() t1.decode(de) self.assertEqual(str(t1), str(d, "latin-1")) t2 = xmlrpclib._binary(de) self.assertEqual(str(t2), str(d, "latin-1")) ADDR = PORT = URL = None # The evt is set twice. First when the server is ready to serve. # Second when the server has been shutdown. The user must clear # the event after it has been set the first time to catch the second set. def http_server(evt, numrequests, requestHandler=None): class TestInstanceClass: def div(self, x, y): return x // y def _methodHelp(self, name): if name == 'div': return 'This is the div function' class Fixture: @staticmethod def getData(): return '42' def my_function(): '''This is my function''' return True class MyXMLRPCServer(xmlrpc.server.SimpleXMLRPCServer): def get_request(self): # Ensure the socket is always non-blocking. On Linux, socket # attributes are not inherited like they are on *BSD and Windows. s, port = self.socket.accept() s.setblocking(True) return s, port if not requestHandler: requestHandler = xmlrpc.server.SimpleXMLRPCRequestHandler serv = MyXMLRPCServer(("localhost", 0), requestHandler, logRequests=False, bind_and_activate=False) try: serv.server_bind() global ADDR, PORT, URL ADDR, PORT = serv.socket.getsockname() #connect to IP address directly. This avoids socket.create_connection() #trying to connect to "localhost" using all address families, which #causes slowdown e.g. on vista which supports AF_INET6. The server listens #on AF_INET only. URL = "http://%s:%d"%(ADDR, PORT) serv.server_activate() serv.register_introspection_functions() serv.register_multicall_functions() serv.register_function(pow) serv.register_function(lambda x,y: x+y, 'add') serv.register_function(my_function) testInstance = TestInstanceClass() serv.register_instance(testInstance, allow_dotted_names=True) evt.set() # handle up to 'numrequests' requests while numrequests > 0: serv.handle_request() numrequests -= 1 except socket.timeout: pass finally: serv.socket.close() PORT = None evt.set() def http_multi_server(evt, numrequests, requestHandler=None): class TestInstanceClass: def div(self, x, y): return x // y def _methodHelp(self, name): if name == 'div': return 'This is the div function' def my_function(): '''This is my function''' return True class MyXMLRPCServer(xmlrpc.server.MultiPathXMLRPCServer): def get_request(self): # Ensure the socket is always non-blocking. On Linux, socket # attributes are not inherited like they are on *BSD and Windows. s, port = self.socket.accept() s.setblocking(True) return s, port if not requestHandler: requestHandler = xmlrpc.server.SimpleXMLRPCRequestHandler class MyRequestHandler(requestHandler): rpc_paths = [] class BrokenDispatcher: def _marshaled_dispatch(self, data, dispatch_method=None, path=None): raise RuntimeError("broken dispatcher") serv = MyXMLRPCServer(("localhost", 0), MyRequestHandler, logRequests=False, bind_and_activate=False) serv.socket.settimeout(3) serv.server_bind() try: global ADDR, PORT, URL ADDR, PORT = serv.socket.getsockname() #connect to IP address directly. This avoids socket.create_connection() #trying to connect to "localhost" using all address families, which #causes slowdown e.g. on vista which supports AF_INET6. The server listens #on AF_INET only. URL = "http://%s:%d"%(ADDR, PORT) serv.server_activate() paths = ["/foo", "/foo/bar"] for path in paths: d = serv.add_dispatcher(path, xmlrpc.server.SimpleXMLRPCDispatcher()) d.register_introspection_functions() d.register_multicall_functions() serv.get_dispatcher(paths[0]).register_function(pow) serv.get_dispatcher(paths[1]).register_function(lambda x,y: x+y, 'add') serv.add_dispatcher("/is/broken", BrokenDispatcher()) evt.set() # handle up to 'numrequests' requests while numrequests > 0: serv.handle_request() numrequests -= 1 except socket.timeout: pass finally: serv.socket.close() PORT = None evt.set() # This function prevents errors like: # <ProtocolError for localhost:57527/RPC2: 500 Internal Server Error> def is_unavailable_exception(e): '''Returns True if the given ProtocolError is the product of a server-side exception caused by the 'temporarily unavailable' response sometimes given by operations on non-blocking sockets.''' # sometimes we get a -1 error code and/or empty headers try: if e.errcode == -1 or e.headers is None: return True exc_mess = e.headers.get('X-exception') except AttributeError: # Ignore OSErrors here. exc_mess = str(e) if exc_mess and 'temporarily unavailable' in exc_mess.lower(): return True def make_request_and_skipIf(condition, reason): # If we skip the test, we have to make a request because the # the server created in setUp blocks expecting one to come in. if not condition: return lambda func: func def decorator(func): def make_request_and_skip(self): try: xmlrpclib.ServerProxy(URL).my_function() except (xmlrpclib.ProtocolError, OSError) as e: if not is_unavailable_exception(e): raise raise unittest.SkipTest(reason) return make_request_and_skip return decorator @unittest.skipUnless(threading, 'Threading required for this test.') class BaseServerTestCase(unittest.TestCase): requestHandler = None request_count = 1 threadFunc = staticmethod(http_server) def setUp(self): # enable traceback reporting xmlrpc.server.SimpleXMLRPCServer._send_traceback_header = True self.evt = threading.Event() # start server thread to handle requests serv_args = (self.evt, self.request_count, self.requestHandler) threading.Thread(target=self.threadFunc, args=serv_args).start() # wait for the server to be ready self.evt.wait() self.evt.clear() def tearDown(self): # wait on the server thread to terminate self.evt.wait() # disable traceback reporting xmlrpc.server.SimpleXMLRPCServer._send_traceback_header = False class SimpleServerTestCase(BaseServerTestCase): def test_simple1(self): try: p = xmlrpclib.ServerProxy(URL) self.assertEqual(p.pow(6,8), 6**8) except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output self.fail("%s\n%s" % (e, getattr(e, "headers", ""))) def test_nonascii(self): start_string = 'P\N{LATIN SMALL LETTER Y WITH CIRCUMFLEX}t' end_string = 'h\N{LATIN SMALL LETTER O WITH HORN}n' try: p = xmlrpclib.ServerProxy(URL) self.assertEqual(p.add(start_string, end_string), start_string + end_string) except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output self.fail("%s\n%s" % (e, getattr(e, "headers", ""))) # [ch] The test 404 is causing lots of false alarms. def XXXtest_404(self): # send POST with http.client, it should return 404 header and # 'Not Found' message. conn = httplib.client.HTTPConnection(ADDR, PORT) conn.request('POST', '/this-is-not-valid') response = conn.getresponse() conn.close() self.assertEqual(response.status, 404) self.assertEqual(response.reason, 'Not Found') def test_introspection1(self): expected_methods = set(['pow', 'div', 'my_function', 'add', 'system.listMethods', 'system.methodHelp', 'system.methodSignature', 'system.multicall', 'Fixture']) try: p = xmlrpclib.ServerProxy(URL) meth = p.system.listMethods() self.assertEqual(set(meth), expected_methods) except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output self.fail("%s\n%s" % (e, getattr(e, "headers", ""))) def test_introspection2(self): try: # test _methodHelp() p = xmlrpclib.ServerProxy(URL) divhelp = p.system.methodHelp('div') self.assertEqual(divhelp, 'This is the div function') except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output self.fail("%s\n%s" % (e, getattr(e, "headers", ""))) @make_request_and_skipIf(sys.flags.optimize >= 2, "Docstrings are omitted with -O2 and above") def test_introspection3(self): try: # test native doc p = xmlrpclib.ServerProxy(URL) myfunction = p.system.methodHelp('my_function') self.assertEqual(myfunction, 'This is my function') except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output self.fail("%s\n%s" % (e, getattr(e, "headers", ""))) def test_introspection4(self): # the SimpleXMLRPCServer doesn't support signatures, but # at least check that we can try making the call try: p = xmlrpclib.ServerProxy(URL) divsig = p.system.methodSignature('div') self.assertEqual(divsig, 'signatures not supported') except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output self.fail("%s\n%s" % (e, getattr(e, "headers", ""))) def test_multicall(self): try: p = xmlrpclib.ServerProxy(URL) multicall = xmlrpclib.MultiCall(p) multicall.add(2,3) multicall.pow(6,8) multicall.div(127,42) add_result, pow_result, div_result = multicall() self.assertEqual(add_result, 2+3) self.assertEqual(pow_result, 6**8) self.assertEqual(div_result, 127//42) except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output self.fail("%s\n%s" % (e, getattr(e, "headers", ""))) def test_non_existing_multicall(self): try: p = xmlrpclib.ServerProxy(URL) multicall = xmlrpclib.MultiCall(p) multicall.this_is_not_exists() result = multicall() # result.results contains; # [{'faultCode': 1, 'faultString': '<class \'exceptions.Exception\'>:' # 'method "this_is_not_exists" is not supported'>}] self.assertEqual(result.results[0]['faultCode'], 1) self.assertEqual(result.results[0]['faultString'], '<class \'Exception\'>:method "this_is_not_exists" ' 'is not supported') except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output self.fail("%s\n%s" % (e, getattr(e, "headers", ""))) def test_dotted_attribute(self): # Raises an AttributeError because private methods are not allowed. self.assertRaises(AttributeError, xmlrpc.server.resolve_dotted_attribute, str, '__add') self.assertTrue(xmlrpc.server.resolve_dotted_attribute(str, 'title')) # Get the test to run faster by sending a request with test_simple1. # This avoids waiting for the socket timeout. self.test_simple1() def test_allow_dotted_names_true(self): # XXX also need allow_dotted_names_false test. server = xmlrpclib.ServerProxy("http://%s:%d/RPC2" % (ADDR, PORT)) data = server.Fixture.getData() self.assertEqual(data, '42') def test_unicode_host(self): server = xmlrpclib.ServerProxy("http://%s:%d/RPC2" % (ADDR, PORT)) self.assertEqual(server.add("a", "\xe9"), "a\xe9") def test_partial_post(self): # Check that a partial POST doesn't make the server loop: issue #14001. conn = http.client.HTTPConnection(ADDR, PORT) conn.request('POST', '/RPC2 HTTP/1.0\r\nContent-Length: 100\r\n\r\nbye') conn.close() class MultiPathServerTestCase(BaseServerTestCase): threadFunc = staticmethod(http_multi_server) request_count = 2 def test_path1(self): p = xmlrpclib.ServerProxy(URL+"/foo") self.assertEqual(p.pow(6,8), 6**8) self.assertRaises(xmlrpclib.Fault, p.add, 6, 8) def test_path2(self): p = xmlrpclib.ServerProxy(URL+"/foo/bar") self.assertEqual(p.add(6,8), 6+8) self.assertRaises(xmlrpclib.Fault, p.pow, 6, 8) def test_path3(self): p = xmlrpclib.ServerProxy(URL+"/is/broken") self.assertRaises(xmlrpclib.Fault, p.add, 6, 8) #A test case that verifies that a server using the HTTP/1.1 keep-alive mechanism #does indeed serve subsequent requests on the same connection class BaseKeepaliveServerTestCase(BaseServerTestCase): #a request handler that supports keep-alive and logs requests into a #class variable class RequestHandler(xmlrpc.server.SimpleXMLRPCRequestHandler): parentClass = xmlrpc.server.SimpleXMLRPCRequestHandler protocol_version = 'HTTP/1.1' myRequests = [] def handle(self): self.myRequests.append([]) self.reqidx = len(self.myRequests)-1 return self.parentClass.handle(self) def handle_one_request(self): result = self.parentClass.handle_one_request(self) self.myRequests[self.reqidx].append(self.raw_requestline) return result requestHandler = RequestHandler def setUp(self): #clear request log self.RequestHandler.myRequests = [] return BaseServerTestCase.setUp(self) #A test case that verifies that a server using the HTTP/1.1 keep-alive mechanism #does indeed serve subsequent requests on the same connection class KeepaliveServerTestCase1(BaseKeepaliveServerTestCase): def test_two(self): p = xmlrpclib.ServerProxy(URL) #do three requests. self.assertEqual(p.pow(6,8), 6**8) self.assertEqual(p.pow(6,8), 6**8) self.assertEqual(p.pow(6,8), 6**8) p("close")() #they should have all been handled by a single request handler self.assertEqual(len(self.RequestHandler.myRequests), 1) #check that we did at least two (the third may be pending append #due to thread scheduling) self.assertGreaterEqual(len(self.RequestHandler.myRequests[-1]), 2) #test special attribute access on the serverproxy, through the __call__ #function. class KeepaliveServerTestCase2(BaseKeepaliveServerTestCase): #ask for two keepalive requests to be handled. request_count=2 def test_close(self): p = xmlrpclib.ServerProxy(URL) #do some requests with close. self.assertEqual(p.pow(6,8), 6**8) self.assertEqual(p.pow(6,8), 6**8) self.assertEqual(p.pow(6,8), 6**8) p("close")() #this should trigger a new keep-alive request self.assertEqual(p.pow(6,8), 6**8) self.assertEqual(p.pow(6,8), 6**8) self.assertEqual(p.pow(6,8), 6**8) p("close")() #they should have all been two request handlers, each having logged at least #two complete requests self.assertEqual(len(self.RequestHandler.myRequests), 2) self.assertGreaterEqual(len(self.RequestHandler.myRequests[-1]), 2) self.assertGreaterEqual(len(self.RequestHandler.myRequests[-2]), 2) def test_transport(self): p = xmlrpclib.ServerProxy(URL) #do some requests with close. self.assertEqual(p.pow(6,8), 6**8) p("transport").close() #same as above, really. self.assertEqual(p.pow(6,8), 6**8) p("close")() self.assertEqual(len(self.RequestHandler.myRequests), 2) #A test case that verifies that gzip encoding works in both directions #(for a request and the response) @unittest.skipIf(gzip is None, 'requires gzip') class GzipServerTestCase(BaseServerTestCase): #a request handler that supports keep-alive and logs requests into a #class variable class RequestHandler(xmlrpc.server.SimpleXMLRPCRequestHandler): parentClass = xmlrpc.server.SimpleXMLRPCRequestHandler protocol_version = 'HTTP/1.1' def do_POST(self): #store content of last request in class self.__class__.content_length = int(self.headers["content-length"]) return self.parentClass.do_POST(self) requestHandler = RequestHandler class Transport(xmlrpclib.Transport): #custom transport, stores the response length for our perusal fake_gzip = False def parse_response(self, response): self.response_length=int(response.getheader("content-length", 0)) return xmlrpclib.Transport.parse_response(self, response) def send_content(self, connection, body): if self.fake_gzip: #add a lone gzip header to induce decode error remotely connection.putheader("Content-Encoding", "gzip") return xmlrpclib.Transport.send_content(self, connection, body) def setUp(self): BaseServerTestCase.setUp(self) def test_gzip_request(self): t = self.Transport() t.encode_threshold = None p = xmlrpclib.ServerProxy(URL, transport=t) self.assertEqual(p.pow(6,8), 6**8) a = self.RequestHandler.content_length t.encode_threshold = 0 #turn on request encoding self.assertEqual(p.pow(6,8), 6**8) b = self.RequestHandler.content_length self.assertTrue(a>b) p("close")() def test_bad_gzip_request(self): t = self.Transport() t.encode_threshold = None t.fake_gzip = True p = xmlrpclib.ServerProxy(URL, transport=t) cm = self.assertRaisesRegex(xmlrpclib.ProtocolError, re.compile(r"\b400\b")) with cm: p.pow(6, 8) p("close")() def test_gsip_response(self): t = self.Transport() p = xmlrpclib.ServerProxy(URL, transport=t) old = self.requestHandler.encode_threshold self.requestHandler.encode_threshold = None #no encoding self.assertEqual(p.pow(6,8), 6**8) a = t.response_length self.requestHandler.encode_threshold = 0 #always encode self.assertEqual(p.pow(6,8), 6**8) p("close")() b = t.response_length self.requestHandler.encode_threshold = old self.assertTrue(a>b) #Test special attributes of the ServerProxy object class ServerProxyTestCase(unittest.TestCase): def setUp(self): unittest.TestCase.setUp(self) if threading: self.url = URL else: # Without threading, http_server() and http_multi_server() will not # be executed and URL is still equal to None. 'http://' is a just # enough to choose the scheme (HTTP) self.url = 'http://' def test_close(self): p = xmlrpclib.ServerProxy(self.url) self.assertEqual(p('close')(), None) def test_transport(self): t = xmlrpclib.Transport() p = xmlrpclib.ServerProxy(self.url, transport=t) self.assertEqual(p('transport'), t) # This is a contrived way to make a failure occur on the server side # in order to test the _send_traceback_header flag on the server class FailingMessageClass(http.client.HTTPMessage): def get(self, key, failobj=None): key = key.lower() if key == 'content-length': return 'I am broken' return super().get(key, failobj) @unittest.skipUnless(threading, 'Threading required for this test.') class FailingServerTestCase(unittest.TestCase): def setUp(self): self.evt = threading.Event() # start server thread to handle requests serv_args = (self.evt, 1) threading.Thread(target=http_server, args=serv_args).start() # wait for the server to be ready self.evt.wait() self.evt.clear() def tearDown(self): # wait on the server thread to terminate self.evt.wait() # reset flag xmlrpc.server.SimpleXMLRPCServer._send_traceback_header = False # reset message class default_class = http.client.HTTPMessage xmlrpc.server.SimpleXMLRPCRequestHandler.MessageClass = default_class def test_basic(self): # check that flag is false by default flagval = xmlrpc.server.SimpleXMLRPCServer._send_traceback_header self.assertEqual(flagval, False) # enable traceback reporting xmlrpc.server.SimpleXMLRPCServer._send_traceback_header = True # test a call that shouldn't fail just as a smoke test try: p = xmlrpclib.ServerProxy(URL) self.assertEqual(p.pow(6,8), 6**8) except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e): # protocol error; provide additional information in test output self.fail("%s\n%s" % (e, getattr(e, "headers", ""))) def test_fail_no_info(self): # use the broken message class xmlrpc.server.SimpleXMLRPCRequestHandler.MessageClass = FailingMessageClass try: p = xmlrpclib.ServerProxy(URL) p.pow(6,8) except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e) and hasattr(e, "headers"): # The two server-side error headers shouldn't be sent back in this case self.assertTrue(e.headers.get("X-exception") is None) self.assertTrue(e.headers.get("X-traceback") is None) else: self.fail('ProtocolError not raised') def test_fail_with_info(self): # use the broken message class xmlrpc.server.SimpleXMLRPCRequestHandler.MessageClass = FailingMessageClass # Check that errors in the server send back exception/traceback # info when flag is set xmlrpc.server.SimpleXMLRPCServer._send_traceback_header = True try: p = xmlrpclib.ServerProxy(URL) p.pow(6,8) except (xmlrpclib.ProtocolError, OSError) as e: # ignore failures due to non-blocking socket 'unavailable' errors if not is_unavailable_exception(e) and hasattr(e, "headers"): # We should get error info in the response expected_err = "invalid literal for int() with base 10: 'I am broken'" self.assertEqual(e.headers.get("X-exception"), expected_err) self.assertTrue(e.headers.get("X-traceback") is not None) else: self.fail('ProtocolError not raised') @contextlib.contextmanager def captured_stdout(encoding='utf-8'): """A variation on support.captured_stdout() which gives a text stream having a `buffer` attribute. """ import io orig_stdout = sys.stdout sys.stdout = io.TextIOWrapper(io.BytesIO(), encoding=encoding) try: yield sys.stdout finally: sys.stdout = orig_stdout class CGIHandlerTestCase(unittest.TestCase): def setUp(self): self.cgi = xmlrpc.server.CGIXMLRPCRequestHandler() def tearDown(self): self.cgi = None def test_cgi_get(self): with support.EnvironmentVarGuard() as env: env['REQUEST_METHOD'] = 'GET' # if the method is GET and no request_text is given, it runs handle_get # get sysout output with captured_stdout(encoding=self.cgi.encoding) as data_out: self.cgi.handle_request() # parse Status header data_out.seek(0) handle = data_out.read() status = handle.split()[1] message = ' '.join(handle.split()[2:4]) self.assertEqual(status, '400') self.assertEqual(message, 'Bad Request') def test_cgi_xmlrpc_response(self): data = """<?xml version='1.0'?> <methodCall> <methodName>test_method</methodName> <params> <param> <value><string>foo</string></value> </param> <param> <value><string>bar</string></value> </param> </params> </methodCall> """ with support.EnvironmentVarGuard() as env, \ captured_stdout(encoding=self.cgi.encoding) as data_out, \ support.captured_stdin() as data_in: data_in.write(data) data_in.seek(0) env['CONTENT_LENGTH'] = str(len(data)) self.cgi.handle_request() data_out.seek(0) # will respond exception, if so, our goal is achieved ;) handle = data_out.read() # start with 44th char so as not to get http header, we just # need only xml self.assertRaises(xmlrpclib.Fault, xmlrpclib.loads, handle[44:]) # Also test the content-length returned by handle_request # Using the same test method inorder to avoid all the datapassing # boilerplate code. # Test for bug: http://bugs.python.org/issue5040 content = handle[handle.find("<?xml"):] self.assertEqual( int(re.search('Content-Length: (\d+)', handle).group(1)), len(content)) class UseBuiltinTypesTestCase(unittest.TestCase): def test_use_builtin_types(self): # SimpleXMLRPCDispatcher.__init__ accepts use_builtin_types, which # makes all dispatch of binary data as bytes instances, and all # dispatch of datetime argument as datetime.datetime instances. self.log = [] expected_bytes = b"my dog has fleas" expected_date = datetime.datetime(2008, 5, 26, 18, 25, 12) marshaled = xmlrpclib.dumps((expected_bytes, expected_date), 'foobar') def foobar(*args): self.log.extend(args) handler = xmlrpc.server.SimpleXMLRPCDispatcher( allow_none=True, encoding=None, use_builtin_types=True) handler.register_function(foobar) handler._marshaled_dispatch(marshaled) self.assertEqual(len(self.log), 2) mybytes, mydate = self.log self.assertEqual(self.log, [expected_bytes, expected_date]) self.assertIs(type(mydate), datetime.datetime) self.assertIs(type(mybytes), bytes) def test_cgihandler_has_use_builtin_types_flag(self): handler = xmlrpc.server.CGIXMLRPCRequestHandler(use_builtin_types=True) self.assertTrue(handler.use_builtin_types) def test_xmlrpcserver_has_use_builtin_types_flag(self): server = xmlrpc.server.SimpleXMLRPCServer(("localhost", 0), use_builtin_types=True) server.server_close() self.assertTrue(server.use_builtin_types) @support.reap_threads def test_main(): support.run_unittest(XMLRPCTestCase, HelperTestCase, DateTimeTestCase, BinaryTestCase, FaultTestCase, UseBuiltinTypesTestCase, SimpleServerTestCase, KeepaliveServerTestCase1, KeepaliveServerTestCase2, GzipServerTestCase, MultiPathServerTestCase, ServerProxyTestCase, FailingServerTestCase, CGIHandlerTestCase) if __name__ == "__main__": test_main()
mit
kantlove/flask-simple-page
Lib/site-packages/pkg_resources/_vendor/packaging/__about__.py
441
1073
# Copyright 2014 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function __all__ = [ "__title__", "__summary__", "__uri__", "__version__", "__author__", "__email__", "__license__", "__copyright__", ] __title__ = "packaging" __summary__ = "Core utilities for Python packages" __uri__ = "https://github.com/pypa/packaging" __version__ = "15.3" __author__ = "Donald Stufft" __email__ = "donald@stufft.io" __license__ = "Apache License, Version 2.0" __copyright__ = "Copyright 2014 %s" % __author__
mit
joyider/op_mon
op_mon/public/views.py
1
2824
# -*- coding: utf-8 -*- """Public section, including homepage and signup.""" from flask import Blueprint, flash, redirect, render_template, request, current_app, session, g, app from flask_login import login_required, login_user, logout_user, current_user from flask_themes2 import render_theme_template, get_theme, get_themes_list, url_for from op_mon.extensions import login_manager from op_mon.public.forms import LoginForm from op_mon.user.forms import RegisterForm from op_mon.user.models import User from op_mon.utils import flash_errors blueprint = Blueprint('public', __name__, static_folder='../static') @login_manager.user_loader def load_user(user_id): """Load user by ID.""" return User.get_by_id(int(user_id)) @blueprint.route('/') def home(): """Home page.""" # Handle logging in if current_user.is_authenticated: return render_theme_template(session.get('theme', current_app.config['DEFAULT_THEME']), 'users/dashboard.html') else: return redirect(url_for('public.login'), code=302) @blueprint.route('/logout/') @login_required def logout(): """Logout.""" logout_user() flash('You are logged out.', 'info') return redirect(url_for('public.home')) @blueprint.route('/register/', methods=['GET', 'POST']) def register(): """Register new user.""" form = RegisterForm(request.form, csrf_enabled=False) if form.validate_on_submit(): User.create(username=form.username.data, email=form.email.data, password=form.password.data, active=True) flash('Thank you for registering. You can now log in.', 'success') return redirect(url_for('public.home')) else: flash_errors(form) return render_template('public/register.html', form=form) @blueprint.route('/login/', methods=['GET', 'POST']) def login(): """Login Page.""" form = LoginForm(request.form) # Handle logging in if request.method == 'POST': if form.validate_on_submit(): login_user(form.user) flash('You are logged in.', 'success') redirect_url = request.args.get('next') or url_for('user.dashboard') return redirect(redirect_url) else: flash_errors(form) return render_theme_template(session.get('theme', current_app.config['DEFAULT_THEME']), 'public/login.html', form=form) # Below here is only code for development and theme testing @blueprint.route('/themes/') def themes(): """Theme Page.""" themes = get_themes_list() return render_theme_template(current_app.name, 'public/themes.html', themes=themes) @blueprint.route('/themes/<ident>') def settheme(ident): if ident not in current_app.theme_manager.themes: abort(404) session['theme'] = ident return redirect(url_for('public.themes'))
bsd-3-clause
dongguangming/python-phonenumbers
python/phonenumbers/tzdata/data0.py
4
65055
"""Per-prefix data, mapping each prefix to a name. Auto-generated file, do not edit by hand. """ from ..util import u # Copyright (C) 2011-2015 The Libphonenumber Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. data = { '1850897':('America/Chicago',), '7845':('Europe/Moscow',), '7841':('Europe/Moscow',), '7840':('Europe/Bucharest', 'Europe/Moscow'), '130869':('America/Chicago',), '130866':('America/Denver',), '130862':('America/Denver',), '130863':('America/Denver',), '130886':('America/Chicago',), '12705':('America/Chicago',), '130883':('America/Chicago',), '1812485':('America/Chicago',), '1812486':('America/New_York',), '1812481':('America/New_York',), '130888':('America/Denver',), '1812482':('America/New_York',), '962':('Asia/Amman',), '185079':('America/Chicago',), '185078':('America/Chicago',), '185074':('America/Chicago',), '298':('Atlantic/Faeroe',), '299':('America/Godthab', 'America/Scoresbysund', 'America/Thule', 'Atlantic/Reykjavik'), '297':('America/Aruba',), '6295':('Asia/Jayapura',), '290':('Atlantic/St_Helena',), '291':('Africa/Asmera',), '961':('Asia/Beirut',), '52641':('America/Mazatlan',), '52642':('America/Mazatlan',), '52643':('America/Mazatlan',), '52644':('America/Mazatlan',), '52645':('America/Mazatlan',), '52646':('America/Tijuana',), '52647':('America/Mazatlan',), '52648':('America/Mazatlan',), '52649':('America/Mazatlan', 'America/Mexico_City'), '62513':('Asia/Jakarta',), '62512':('Asia/Makassar',), '62511':('Asia/Makassar',), '162034':('America/Chicago',), '162035':('America/Chicago',), '162036':('America/Chicago',), '162032':('America/Chicago',), '162033':('America/Chicago',), '162039':('America/Chicago',), '1812466':('America/New_York',), '1812464':('America/Chicago',), '1812462':('America/New_York',), '1605348':('America/Denver',), '1605341':('America/Denver',), '1605342':('America/Denver',), '1605343':('America/Denver',), '1605345':('America/Chicago',), '1605347':('America/Denver',), '62408':('Asia/Makassar',), '441942':('Europe/London',), '441943':('Europe/London',), '441944':('Europe/London',), '441945':('Europe/London',), '441946':('Europe/London',), '441947':('Europe/London',), '441948':('Europe/London',), '441949':('Europe/London',), '98':('Asia/Tehran',), '91':('Asia/Calcutta',), '90':('Europe/Bucharest',), '93':('Asia/Kabul',), '92':('Asia/Karachi',), '95':('Asia/Rangoon',), '94':('Asia/Colombo',), '1623':('America/Denver',), '1208934':('America/Denver',), '1626':('America/Los_Angeles',), '1208936':('America/Denver',), '1208938':('America/Denver',), '1208939':('America/Denver',), '441386':('Europe/London',), '441387':('Europe/London',), '441384':('Europe/London',), '1812939':('America/New_York',), '441383':('Europe/London',), '441380':('Europe/London',), '1605837':('America/Denver',), '1812934':('America/New_York',), '1812937':('America/Chicago',), '1812936':('America/New_York',), '1812933':('America/New_York',), '1812932':('America/New_York',), '1208887':('America/Denver',), '558':('America/Sao_Paulo',), '555':('America/Sao_Paulo',), '554':('America/Sao_Paulo',), '557':('America/Sao_Paulo',), '551':('America/Sao_Paulo',), '553':('America/Sao_Paulo',), '552':('America/Sao_Paulo',), '1758':('America/St_Lucia',), '1757':('America/New_York',), '1754':('America/New_York',), '1208440':('America/Denver',), '1208442':('America/Denver',), '1208443':('America/Los_Angeles',), '1208448':('America/Los_Angeles',), '178589':('America/Denver',), '178588':('America/Chicago',), '178587':('America/Chicago',), '178586':('America/Chicago',), '178584':('America/Chicago',), '178583':('America/Chicago',), '178582':('America/Chicago',), '170147':('America/Chicago',), '1284':('America/Tortola',), '170144':('America/Chicago',), '1281':('America/Chicago',), '170140':('America/Chicago',), '1289':('America/Toronto',), '170149':('America/Chicago',), '170148':('America/Denver',), '689':('Pacific/Gambier', 'Pacific/Marquesas', 'Pacific/Tahiti'), '688':('Pacific/Funafuti',), '1573':('America/Chicago',), '1571':('America/New_York',), '1570':('America/New_York',), '681':('Pacific/Wallis',), '680':('Pacific/Palau',), '1575':('America/Denver',), '682':('Pacific/Rarotonga',), '622':('Asia/Jakarta',), '1850593':('America/Chicago',), '1850592':('America/Chicago',), '1850595':('America/Chicago',), '1850597':('America/New_York',), '180759':('America/Toronto',), '1850685':('America/Chicago',), '180754':('America/Winnipeg',), '180757':('America/Toronto',), '1850681':('America/New_York',), '1850683':('America/Chicago',), '1850682':('America/Chicago',), '5994':('America/Kralendijk',), '5997':('America/Kralendijk',), '1850664':('America/Chicago',), '1850663':('America/New_York',), '5993':('America/Kralendijk',), '5999':('America/Curacao',), '1850668':('America/New_York',), '379':('Europe/Vatican',), '378':('Europe/San_Marino',), '130878':('America/Chicago',), '371':('Europe/Bucharest',), '130872':('America/Chicago',), '373':('Europe/Bucharest',), '372':('Europe/Bucharest',), '130877':('America/Denver',), '130876':('America/Denver',), '130875':('America/Chicago',), '130874':('America/Chicago',), '44139':('Europe/London',), '7843':('Europe/Moscow',), '127024':('America/Chicago',), '127025':('America/Chicago',), '127026':('America/Chicago',), '16067':('America/New_York',), '16062':('America/New_York',), '127028':('America/Chicago',), '127029':('America/Chicago',), '963':('Asia/Damascus',), '44130':('Europe/London',), '245':('Atlantic/Reykjavik',), '244':('Africa/Luanda',), '247':('Atlantic/St_Helena',), '246':('Indian/Chagos',), '241':('Africa/Libreville',), '240':('Africa/Malabo',), '243':('Africa/Kinshasa', 'Africa/Lubumbashi'), '242':('Africa/Brazzaville',), '249':('Africa/Nairobi',), '248':('Indian/Mahe',), '1250412':('America/Vancouver',), '1250417':('America/Edmonton',), '646':('Pacific/Auckland',), '174':('America/New_York',), '1812474':('America/Chicago',), '1812475':('America/Chicago',), '1812476':('America/Chicago',), '1501':('America/Chicago',), '1812471':('America/Chicago',), '1812473':('America/Chicago',), '1506':('America/Halifax',), '1812478':('America/New_York',), '1812479':('America/Chicago',), '1504':('America/Chicago',), '1505':('America/Denver',), '1850763':('America/Chicago',), '170174':('America/Chicago',), '170928':('America/Halifax',), '170175':('America/Chicago',), '1850942':('America/New_York',), '170923':('America/St_Johns',), '170922':('America/St_Johns',), '170925':('America/St_Johns',), '170927':('America/St_Johns',), '1850941':('America/Chicago',), '1972':('America/Chicago',), '1607':('America/New_York',), '13067':('America/Winnipeg',), '13066':('America/Winnipeg',), '13065':('America/Winnipeg',), '13064':('America/Winnipeg',), '13063':('America/Winnipeg',), '13062':('America/Winnipeg',), '1618':('America/Chicago',), '1619':('America/Los_Angeles',), '1616':('America/New_York',), '1617':('America/New_York',), '1614':('America/New_York',), '1615':('America/Chicago',), '1612':('America/Chicago',), '1613':('America/Toronto',), '13069':('America/Winnipeg',), '1605374':('America/Denver',), '1605371':('America/Chicago',), '1605373':('America/Chicago',), '1812923':('America/New_York',), '1812926':('America/New_York',), '1812925':('America/Chicago',), '142345':('America/New_York',), '142346':('America/New_York',), '142347':('America/New_York',), '142342':('America/New_York',), '160536':('America/Chicago',), '157485':('America/New_York',), '157484':('America/New_York',), '157487':('America/New_York',), '157486':('America/New_York',), '142348':('America/New_York',), '142349':('America/New_York',), '157483':('America/New_York',), '157482':('America/New_York',), '1769':('America/Chicago',), '1361':('America/Chicago',), '1763':('America/Chicago',), '1760':('America/Los_Angeles',), '1767':('America/Dominica',), '1765':('America/New_York',), '1208473':('America/Denver',), '1208475':('America/Denver',), '1208476':('America/Los_Angeles',), '1208478':('America/Denver',), '1208875':('America/Los_Angeles',), '690':('Pacific/Fakaofo',), '691':('Pacific/Kosrae', 'Pacific/Ponape', 'Pacific/Truk'), '1208870':('America/Denver',), '1540':('America/New_York',), '1208879':('America/Denver',), '1208878':('America/Denver',), '1208765':('America/Los_Angeles',), '1208766':('America/Denver',), '1850878':('America/New_York',), '1850877':('America/New_York',), '1850874':('America/Chicago',), '1850875':('America/New_York',), '1850872':('America/Chicago',), '1850697':('America/New_York',), '1850871':('America/Chicago',), '1850674':('America/Chicago',), '1850675':('America/Chicago',), '1850670':('America/New_York',), '1850671':('America/New_York',), '1850678':('America/Chicago',), '62519':('Asia/Jakarta',), '62518':('Asia/Makassar',), '1850587':('America/Chicago',), '1850584':('America/New_York',), '1850585':('America/Chicago',), '1850580':('America/New_York',), '1850581':('America/Chicago',), '1850588':('America/Chicago',), '380':('Europe/Bucharest',), '381':('Europe/Belgrade',), '382':('Europe/Podgorica',), '385':('Europe/Zagreb',), '386':('Europe/Ljubljana',), '387':('Europe/Sarajevo',), '389':('Europe/Skopje',), '127033':('America/Chicago',), '1308874':('America/Denver',), '127034':('America/Chicago',), '127039':('America/Chicago',), '127038':('America/Chicago',), '34926':('Europe/Madrid',), '258':('Africa/Maputo',), '252':('Africa/Mogadishu',), '253':('Africa/Djibouti',), '250':('Africa/Kigali',), '251':('Africa/Addis_Ababa',), '256':('Africa/Kampala',), '257':('Africa/Bujumbura',), '254':('Africa/Nairobi',), '255':('Africa/Dar_es_Salaam',), '120898':('America/Los_Angeles',), '120899':('America/Denver',), '168':('America/Chicago',), '7848':('Europe/Moscow',), '120890':('America/Denver',), '7844':('Europe/Moscow',), '7847':('Europe/Moscow',), '7846':('Europe/Moscow',), '120894':('America/Denver',), '120895':('America/Denver',), '120896':('America/Los_Angeles',), '7842':('Europe/Moscow',), '1605589':('America/Chicago',), '1605582':('America/Chicago',), '1605584':('America/Denver',), '1605745':('America/Denver',), '1605747':('America/Chicago',), '62401':('Asia/Makassar',), '62403':('Asia/Jayapura', 'Asia/Makassar'), '62402':('Asia/Makassar',), '62405':('Asia/Makassar',), '62404':('Asia/Makassar',), '5296':('America/Mexico_City',), '5297':('America/Mexico_City',), '5295':('America/Mexico_City',), '5292':('America/Mexico_City',), '5293':('America/Mexico_City',), '5291':('America/Mexico_City',), '52669':('America/Mazatlan',), '1814':('America/New_York',), '1815':('America/Chicago',), '1816':('America/Chicago',), '1817':('America/Chicago',), '1810':('America/New_York',), '1813':('America/New_York',), '1818':('America/Los_Angeles',), '1819':('America/Toronto',), '675':('Pacific/Port_Moresby',), '676':('Pacific/Tongatapu',), '1270825':('America/Chicago',), '1270824':('America/Chicago',), '1270827':('America/Chicago',), '1270826':('America/Chicago',), '1270821':('America/Chicago',), '44135':('Europe/London',), '44134':('Europe/London',), '44137':('Europe/London',), '44136':('Europe/London',), '44131':('Europe/London',), '1270828':('America/New_York',), '44133':('Europe/London',), '44132':('Europe/London',), '1620378':('America/Chicago',), '672':('Pacific/Norfolk',), '1620375':('America/Chicago',), '1620376':('America/Denver',), '1609':('America/New_York',), '1608':('America/Chicago',), '1979':('America/Chicago',), '1978':('America/New_York',), '1601':('America/Chicago',), '1603':('America/New_York',), '1602':('America/Denver',), '1973':('America/New_York',), '1604':('America/Vancouver',), '1971':('America/Los_Angeles',), '1970':('America/Denver',), '1208682':('America/Los_Angeles',), '1208683':('America/Los_Angeles',), '1208686':('America/Los_Angeles',), '1208687':('America/Los_Angeles',), '1208684':('America/Denver',), '1775':('America/Los_Angeles',), '1774':('America/New_York',), '1701223':('America/Chicago',), '1770':('America/New_York',), '1773':('America/Chicago',), '1772':('America/New_York',), '1701224':('America/Chicago',), '1779':('America/Chicago',), '1778':('America/Vancouver',), '1701225':('America/Denver',), '1812867':('America/Chicago',), '1812866':('America/New_York',), '1812865':('America/New_York',), '1701227':('America/Denver',), '58':('America/Caracas',), '2998':('America/Godthab',), '7346':('Asia/Yekaterinburg',), '170123':('America/Chicago',), '170125':('America/Chicago',), '170124':('America/Chicago',), '170127':('America/Chicago',), '170126':('America/Chicago',), '55':('America/Manaus', 'America/Noronha', 'America/Rio_Branco', 'America/Sao_Paulo'), '170128':('America/Chicago',), '57':('America/Bogota',), '2993':('America/Godthab',), '51':('America/Lima',), '2996':('America/Godthab',), '52':('America/Hermosillo', 'America/Mazatlan', 'America/Mexico_City', 'America/New_York', 'America/Santa_Isabel', 'America/Tijuana'), '54':('America/Buenos_Aires',), '1709891':('America/St_Johns',), '1709896':('America/Halifax',), '1709895':('America/St_Johns',), '56':('America/Santiago', 'Pacific/Easter'), '7341':('Europe/Moscow',), '181264':('America/Chicago',), '1867668':('America/Vancouver',), '53':('America/Havana',), '125030':('America/Vancouver',), '1701452':('America/Chicago',), '1701456':('America/Denver',), '1701454':('America/Chicago',), '180776':('America/Toronto',), '180773':('America/Winnipeg',), '180772':('America/Winnipeg',), '1902':('America/Halifax',), '125034':('America/Edmonton',), '421':('Europe/Bratislava',), '420':('Europe/Prague',), '423':('Europe/Vaduz',), '1901':('America/Chicago',), '1850488':('America/New_York',), '1850484':('America/Chicago',), '1850482':('America/Chicago',), '1850481':('America/Chicago',), '1850640':('America/Chicago',), '1850643':('America/New_York',), '1850644':('America/New_York',), '52384':('America/Mexico_City',), '1701857':('America/Chicago',), '1701854':('America/Denver',), '1701852':('America/Chicago',), '7347':('Asia/Yekaterinburg',), '853':('Asia/Shanghai',), '1701858':('America/Chicago',), '185024':('America/Chicago',), '185026':('America/Chicago',), '185027':('America/Chicago',), '185020':('America/Chicago',), '185023':('America/Chicago',), '185028':('America/Chicago',), '185029':('America/New_York',), '7349':('Asia/Yekaterinburg',), '229':('Africa/Porto-Novo',), '228':('Africa/Lome',), '227':('Africa/Niamey',), '226':('Africa/Ouagadougou',), '225':('Africa/Abidjan',), '224':('Africa/Conakry',), '223':('Africa/Bamako',), '222':('Africa/Nouakchott',), '221':('Africa/Dakar',), '220':('Africa/Banjul',), '120886':('America/Denver',), '120885':('America/Denver',), '120884':('America/Denver',), '155':('America/Los_Angeles',), '2432':('Africa/Lubumbashi',), '2431':('Africa/Kinshasa',), '2436':('Africa/Kinshasa',), '120889':('America/Denver',), '5937':('America/Guayaquil',), '5936':('America/Guayaquil',), '441388':('Europe/London',), '1806':('America/Chicago',), '1805':('America/Los_Angeles',), '1804':('America/New_York',), '1803':('America/New_York',), '1802':('America/New_York',), '1801':('America/Denver',), '1605484':('America/Denver',), '1809':('America/Halifax',), '1808':('Pacific/Honolulu',), '6238':('Asia/Makassar',), '6235':('Asia/Jakarta',), '6234':('Asia/Jakarta',), '6237':('Asia/Makassar',), '6236':('Asia/Makassar',), '6231':('Asia/Jakarta',), '6233':('Asia/Jakarta',), '6232':('Asia/Jakarta',), '441488':('Europe/London',), '125075':('America/Vancouver',), '125074':('America/Vancouver',), '125077':('America/Vancouver',), '125076':('America/Vancouver',), '125071':('America/Vancouver',), '125070':('America/Vancouver',), '125072':('America/Vancouver',), '125078':('America/Edmonton',), '7302':('Asia/Yakutsk',), '1949':('America/Los_Angeles',), '13089':('America/Chicago',), '13085':('America/Chicago',), '1940':('America/Chicago',), '1941':('America/New_York',), '170178':('America/Chicago',), '1605399':('America/Denver',), '1605397':('America/Chicago',), '1605394':('America/Denver',), '1605393':('America/Denver',), '1208678':('America/Denver',), '1208677':('America/Denver',), '1208676':('America/Los_Angeles',), '1208672':('America/Denver',), '1208935':('America/Los_Angeles',), '1702':('America/Los_Angeles',), '1703':('America/New_York',), '1704':('America/New_York',), '1705':('America/Toronto',), '1706':('America/New_York',), '1707':('America/Los_Angeles',), '1708':('America/Chicago',), '5269':('America/Mazatlan',), '1812874':('America/Chicago',), '1812875':('America/New_York',), '1812876':('America/New_York',), '1812877':('America/New_York',), '130828':('America/Denver',), '1208788':('America/Denver',), '1208783':('America/Los_Angeles',), '1208782':('America/Denver',), '1208787':('America/Denver',), '1208785':('America/Denver',), '1208784':('America/Los_Angeles',), '524':('America/Mexico_City',), '525':('America/Mexico_City',), '527':('America/Mexico_City',), '522':('America/Mexico_City',), '441382':('Europe/London',), '528':('America/Mexico_City',), '1208255':('America/Los_Angeles',), '1208251':('America/Denver',), '1208253':('America/Denver',), '441381':('Europe/London',), '35122':('Europe/Lisbon',), '35123':('Europe/Lisbon',), '35121':('Europe/Lisbon',), '35126':('Europe/Lisbon',), '35127':('Europe/Lisbon',), '35124':('Europe/Lisbon',), '35125':('Europe/Lisbon',), '35128':('Europe/Lisbon',), '441389':('Europe/London',), '1850656':('America/New_York',), '1850654':('America/Chicago',), '1850653':('America/New_York',), '1850650':('America/Chicago',), '1850651':('America/Chicago',), '441489':('Europe/London',), '1701845':('America/Chicago',), '1701842':('America/Denver',), '1701843':('America/Chicago',), '441481':('Atlantic/Reykjavik',), '160521':('America/Chicago',), '441483':('Europe/London',), '160523':('America/Chicago',), '441485':('Europe/London',), '160525':('America/Chicago',), '160526':('America/Chicago',), '1850250':('America/Chicago',), '1850251':('America/New_York',), '1850256':('America/Chicago',), '749':('Europe/Moscow',), '1850258':('America/Chicago',), '1850259':('America/Chicago',), '130688':('America/Winnipeg',), '130689':('America/Edmonton',), '130682':('America/Edmonton',), '130683':('America/Winnipeg',), '130684':('America/Winnipeg',), '130686':('America/Winnipeg',), '130687':('America/Winnipeg',), '185033':('America/Chicago',), '4777':('Europe/Oslo',), '185031':('America/Chicago',), '4775':('Europe/Oslo',), '185037':('America/Chicago',), '185036':('America/Chicago',), '4770':('Europe/Oslo',), '4771':('Europe/Oslo',), '185038':('America/New_York',), '4778':('Europe/Oslo',), '4779':('Europe/Paris',), '6895':('Pacific/Tahiti',), '6894':('Pacific/Tahiti',), '995':('Asia/Tbilisi',), '994':('Asia/Baku',), '6896':('Pacific/Tahiti',), '238':('Atlantic/Cape_Verde',), '239':('Africa/Sao_Tome',), '234':('Africa/Lagos',), '235':('Africa/Ndjamena',), '236':('Africa/Bangui',), '237':('Africa/Douala',), '230':('Indian/Mauritius',), '231':('Atlantic/Reykjavik',), '232':('Africa/Freetown',), '233':('Africa/Accra',), '1':('America/Anguilla', 'America/Antigua', 'America/Barbados', 'America/Cayman', 'America/Chicago', 'America/Denver', 'America/Dominica', 'America/Edmonton', 'America/Grand_Turk', 'America/Grenada', 'America/Halifax', 'America/Jamaica', 'America/Juneau', 'America/Los_Angeles', 'America/Lower_Princes', 'America/Montserrat', 'America/Nassau', 'America/New_York', 'America/Port_of_Spain', 'America/Puerto_Rico', 'America/St_Johns', 'America/St_Kitts', 'America/St_Lucia', 'America/St_Thomas', 'America/St_Vincent', 'America/Toronto', 'America/Tortola', 'America/Vancouver', 'America/Winnipeg', 'Atlantic/Bermuda', 'Pacific/Guam', 'Pacific/Honolulu', 'Pacific/Pago_Pago', 'Pacific/Saipan'), '146':('America/Chicago',), '145':('America/Toronto',), '69135':('Pacific/Truk',), '1812428':('America/Chicago',), '1812423':('America/Chicago',), '1812422':('America/Chicago',), '1812421':('America/Chicago',), '1812427':('America/New_York',), '1812426':('America/Chicago',), '1812425':('America/Chicago',), '1812424':('America/Chicago',), '1832':('America/Chicago',), '1830':('America/Chicago',), '1831':('America/Los_Angeles',), '6248':('Asia/Makassar',), '52686':('America/Tijuana',), '52687':('America/Mazatlan',), '6241':('Asia/Makassar',), '6242':('Asia/Makassar',), '6243':('Asia/Makassar',), '6245':('Asia/Makassar',), '6246':('Asia/Makassar',), '6247':('Asia/Makassar',), '4415395':('Europe/London',), '4415394':('Europe/London',), '4415396':('Europe/London',), '1954':('America/New_York',), '125042':('America/Edmonton',), '1956':('America/Chicago',), '125044':('America/Vancouver',), '125045':('America/Vancouver',), '125046':('America/Vancouver',), '1952':('America/Chicago',), '125049':('America/Vancouver',), '4415242':('Europe/London',), '1605388':('America/Denver',), '190687':('America/Chicago',), '190684':('America/New_York',), '1605384':('America/Chicago',), '190688':('America/New_York',), '1605387':('America/Chicago',), '1480':('America/Denver',), '1484':('America/New_York',), '1574772':('America/Chicago',), '1574773':('America/New_York',), '1713':('America/Chicago',), '1712':('America/Chicago',), '1717':('America/New_York',), '1716':('America/New_York',), '1715':('America/Chicago',), '1714':('America/Los_Angeles',), '1719':('America/Denver',), '1718':('America/New_York',), '1812849':('America/New_York',), '613':('Australia/Sydney',), '1812847':('America/New_York',), '1812842':('America/Chicago',), '1208798':('America/Los_Angeles',), '1208799':('America/Los_Angeles',), '1208297':('America/Denver',), '1208794':('America/Denver',), '1208888':('America/Denver',), '1208884':('America/Denver',), '1208885':('America/Los_Angeles',), '1208886':('America/Denver',), '1605835':('America/Chicago',), '1208880':('America/Denver',), '1208881':('America/Denver',), '1208882':('America/Los_Angeles',), '1208883':('America/Los_Angeles',), '1701298':('America/Chicago',), '1701297':('America/Chicago',), '1701290':('America/Denver',), '1701293':('America/Chicago',), '1226':('America/Toronto',), '1225':('America/Chicago',), '1224':('America/Chicago',), '627':('Asia/Jakarta',), '626':('Asia/Jakarta',), '1229':('America/New_York',), '1228':('America/Chicago',), '1850391':('America/New_York',), '1850398':('America/Chicago',), '170146':('America/Chicago',), '262269':('Indian/Mayotte',), '160533':('America/Chicago',), '160532':('America/Chicago',), '1850623':('America/Chicago',), '1850622':('America/Chicago',), '262262':('Indian/Reunion',), '1850627':('America/New_York',), '1850626':('America/Chicago',), '1308425':('America/Chicago',), '1701873':('America/Chicago',), '1701872':('America/Denver',), '1850537':('America/Chicago',), '1850535':('America/Chicago',), '1850539':('America/New_York',), '181288':('America/New_York',), '181289':('America/Chicago',), '181282':('America/New_York',), '181283':('America/Chicago',), '852':('Asia/Hong_Kong',), '181266':('America/New_York',), '16202':('America/Chicago',), '181265':('America/New_York',), '16204':('America/Chicago',), '16205':('America/Chicago',), '16206':('America/Chicago',), '16207':('America/Chicago',), '16208':('America/Chicago',), '16209':('America/Chicago',), '181268':('America/New_York',), '685':('Pacific/Apia',), '687':('Pacific/Noumea',), '686':('Pacific/Enderbury', 'Pacific/Kiritimati', 'Pacific/Tarawa'), '154138':('America/Los_Angeles',), '154139':('America/Los_Angeles',), '5935':('America/Guayaquil',), '5934':('America/Guayaquil',), '5933':('America/Guayaquil',), '5932':('America/Guayaquil',), '154130':('America/Los_Angeles',), '154131':('America/Los_Angeles',), '154132':('America/Los_Angeles',), '154133':('America/Los_Angeles',), '154134':('America/Los_Angeles',), '154135':('America/Los_Angeles',), '154136':('America/Los_Angeles',), '154137':('America/Denver',), '3498':('Europe/Madrid',), '3496':('Europe/Madrid',), '3497':('Europe/Madrid',), '3494':('Europe/Madrid',), '3495':('Europe/Madrid',), '3493':('Europe/Madrid',), '3491':('Europe/Madrid',), '1807229':('America/Toronto',), '1812438':('America/New_York',), '1812432':('America/New_York',), '1812435':('America/Chicago',), '1812437':('America/Chicago',), '1807222':('America/Winnipeg',), '1829':('America/Halifax',), '1828':('America/New_York',), '142387':('America/New_York',), '6253':('Asia/Jakarta',), '6256':('Asia/Jakarta',), '6255':('Asia/Makassar',), '6254':('Asia/Makassar',), '1920':('America/Chicago',), '1925':('America/Los_Angeles',), '1308423':('America/Denver',), '1928':('America/Denver',), '14237':('America/New_York',), '14235':('America/New_York',), '14233':('America/New_York',), '14232':('America/New_York',), '785':('Europe/Moscow',), '1812988':('America/New_York',), '52995':('America/Mexico_City',), '1812985':('America/Chicago',), '142388':('America/New_York',), '142389':('America/New_York',), '786':('Europe/Moscow',), '787':('Europe/Moscow',), '781':('Europe/Moscow',), '782':('Europe/Moscow',), '783':('Europe/Moscow',), '1727':('America/New_York',), '1724':('America/New_York',), '142383':('America/Chicago',), '142384':('America/New_York',), '142385':('America/New_York',), '1720':('America/Denver',), '1721':('America/Lower_Princes',), '1812858':('America/Chicago',), '1812853':('America/Chicago',), '1812855':('America/New_York',), '60':('Asia/Kuching',), '61':('Antarctica/Macquarie', 'Australia/Adelaide', 'Australia/Eucla', 'Australia/Lord_Howe', 'Australia/Perth', 'Australia/Sydney', 'Indian/Christmas', 'Indian/Cocos'), '62':('Asia/Jakarta', 'Asia/Jayapura', 'Asia/Makassar'), '152':('America/Denver',), '64':('Pacific/Auckland', 'Pacific/Chatham'), '65':('Asia/Singapore',), '66':('Asia/Bangkok',), '1580':('America/Chicago',), '1581':('America/Toronto',), '1586':('America/New_York',), '1587':('America/Edmonton',), '1585':('America/New_York',), '508':('America/Miquelon',), '509':('America/Port-au-Prince',), '506':('America/Costa_Rica',), '1850689':('America/Chicago',), '504':('America/Tegucigalpa',), '505':('America/Chicago',), '502':('America/Guatemala',), '503':('America/El_Salvador',), '500':('Atlantic/Stanley',), '501':('America/Belize',), '1212':('America/New_York',), '1213':('America/Los_Angeles',), '1210':('America/Chicago',), '1216':('America/New_York',), '1217':('America/Chicago',), '1214':('America/Chicago',), '1215':('America/New_York',), '1218':('America/Chicago',), '4417684':('Europe/London',), '1850687':('America/Chicago',), '1850833':('America/Chicago',), '1850830':('America/Chicago',), '180728':('America/Toronto',), '1850837':('America/Chicago',), '1850835':('America/Chicago',), '1850838':('America/New_York',), '180727':('America/Winnipeg',), '1208437':('America/Los_Angeles',), '1208436':('America/Denver',), '4417683':('Europe/London',), '1208433':('America/Denver',), '142382':('America/New_York',), '1402376':('America/Denver',), '1402374':('America/Chicago',), '1402375':('America/Chicago',), '1402372':('America/Chicago',), '1402373':('America/Chicago',), '1402370':('America/Chicago',), '1402371':('America/Chicago',), '1402379':('America/Chicago',), '160542':('America/Chicago',), '160543':('America/Chicago',), '142386':('America/New_York',), '160544':('America/Chicago',), '160545':('America/Denver',), '160549':('America/Chicago',), '35818':('Europe/Mariehamn',), '35819':('Europe/Helsinki',), '35813':('Europe/Helsinki',), '35816':('Europe/Helsinki',), '35817':('Europe/Helsinki',), '35814':('Europe/Helsinki',), '35815':('Europe/Helsinki',), '1850526':('America/Chicago',), '1850527':('America/Chicago',), '1850522':('America/Chicago',), '1850523':('America/New_York',), '683':('Pacific/Niue',), '181291':('America/New_York',), '181295':('America/New_York',), '181294':('America/New_York',), '216':('Africa/Tunis',), '181274':('America/Chicago',), '181276':('America/Chicago',), '212':('Atlantic/Canary',), '213':('Europe/Paris',), '181273':('America/New_York',), '211':('Africa/Nairobi',), '181279':('America/New_York',), '218':('Europe/Bucharest',), '3488':('Europe/Madrid',), '3481':('Europe/Madrid',), '3483':('Europe/Madrid',), '3485':('Europe/Madrid',), '3484':('Europe/Madrid',), '3487':('Europe/Madrid',), '63':('Asia/Manila',), '62918':('Asia/Jayapura',), '62913':('Asia/Jayapura',), '62911':('Asia/Jayapura',), '62910':('Asia/Jakarta', 'Asia/Jayapura'), '62917':('Asia/Jayapura',), '62916':('Asia/Jayapura',), '62915':('Asia/Jakarta', 'Asia/Jayapura'), '62914':('Asia/Jayapura',), '1541884':('America/Los_Angeles',), '1541885':('America/Los_Angeles',), '1541882':('America/Los_Angeles',), '1541883':('America/Los_Angeles',), '1541881':('America/Denver',), '1423652':('America/New_York',), '1541889':('America/Denver',), '1858':('America/Los_Angeles',), '1859':('America/New_York',), '4418':('Europe/London',), '4412':('Europe/London',), '1856':('America/New_York',), '1857':('America/New_York',), '17093':('America/St_Johns',), '17096':('America/St_Johns',), '17097':('America/St_Johns',), '17094':('America/St_Johns',), '17095':('America/St_Johns',), '17099':('America/Halifax',), '157475':('America/New_York',), '1931':('America/Chicago',), '125029':('America/Vancouver',), '1937':('America/New_York',), '1936':('America/Chicago',), '370':('Europe/Bucharest',), '125022':('America/Vancouver',), '1939':('America/Puerto_Rico',), '125021':('America/Vancouver',), '125025':('America/Vancouver',), '375':('Europe/Minsk',), '374':('Asia/Yerevan',), '377':('Europe/Monaco',), '376':('Europe/Andorra',), '1605928':('America/Chicago',), '142397':('America/New_York',), '142396':('America/New_York',), '142395':('America/New_York',), '142394':('America/Chicago',), '142392':('America/New_York',), '142391':('America/New_York',), '142398':('America/New_York',), '5239':('America/Mexico_City',), '1731':('America/Chicago',), '1308327':('America/Denver',), '1308324':('America/Chicago',), '1732':('America/New_York',), '5234':('America/Mexico_City',), '1734':('America/New_York',), '5237':('America/Mexico_City',), '44170':('Europe/London',), '44173':('Europe/London',), '44172':('Europe/London',), '44175':('Europe/London',), '44174':('Europe/London',), '44177':('Europe/London',), '44179':('Europe/London',), '44178':('Europe/London',), '507':('America/Panama',), '1605487':('America/Chicago',), '181262':('America/New_York',), '1605859':('America/Denver',), '1605852':('America/Chicago',), '1605853':('America/Chicago',), '1605854':('America/Chicago',), '1605856':('America/Chicago',), '181263':('America/New_York',), '1209':('America/Los_Angeles',), '1205':('America/Chicago',), '1204':('America/Winnipeg',), '1207':('America/New_York',), '1206':('America/Los_Angeles',), '1201':('America/New_York',), '1203':('America/New_York',), '1202':('America/New_York',), '1443':('America/New_York',), '1441':('Atlantic/Bermuda',), '1440':('America/New_York',), '441698':('Europe/London',), '441694':('Europe/London',), '441695':('Europe/London',), '441697':('Europe/London',), '441690':('Europe/London',), '441691':('Europe/London',), '441692':('Europe/London',), '160553':('America/Chicago',), '160552':('America/Chicago',), '160554':('America/Chicago',), '160557':('America/Denver',), '160559':('America/Chicago',), '154145':('America/Los_Angeles',), '121922':('America/Chicago',), '121926':('America/Chicago',), '4413873':('Europe/London',), '121925':('America/New_York',), '127083':('America/Chicago',), '127084':('America/Chicago',), '127085':('America/Chicago',), '127088':('America/Chicago',), '127089':('America/Chicago',), '16066':('America/New_York',), '127027':('America/Chicago',), '181240':('America/Chicago',), '181244':('America/New_York',), '181245':('America/Chicago',), '181249':('America/Chicago',), '16068':('America/New_York',), '1574893':('America/New_York',), '16069':('America/New_York',), '1574892':('America/New_York',), '1574896':('America/Chicago',), '3582':('Europe/Helsinki',), '3583':('Europe/Helsinki',), '3585':('Europe/Helsinki',), '3586':('Europe/Helsinki',), '3588':('Europe/Helsinki',), '3589':('Europe/Helsinki',), '1515':('America/Chicago',), '7301':('Asia/Irkutsk',), '1514':('America/Toronto',), '1849':('America/Halifax',), '1843':('America/New_York',), '1847':('America/Chicago',), '1845':('America/New_York',), '1208438':('America/Denver',), '12085':('America/Denver',), '12083':('America/Denver',), '1512':('America/Chicago',), '1423447':('America/Chicago',), '1423442':('America/New_York',), '1620382':('America/Chicago',), '1620384':('America/Denver',), '125039':('America/Vancouver',), '125038':('America/Vancouver',), '7345':('Asia/Yekaterinburg',), '7342':('Asia/Yekaterinburg',), '7343':('Asia/Yekaterinburg',), '1908':('America/New_York',), '1909':('America/Los_Angeles',), '125031':('America/Vancouver',), '1907':('America/Juneau',), '125033':('America/Vancouver',), '1905':('America/Toronto',), '125035':('America/Vancouver',), '1903':('America/Chicago',), '125037':('America/Vancouver',), '125036':('America/Vancouver',), '120824':('America/Los_Angeles',), '120827':('America/Denver',), '120826':('America/Los_Angeles',), '850':('Asia/Pyongyang',), '120823':('America/Denver',), '120822':('America/Denver',), '120828':('America/Denver',), '1270988':('America/Chicago',), '1270982':('America/New_York',), '190656':('America/Chicago',), '190652':('America/New_York',), '190658':('America/New_York',), '44163':('Europe/London',), '44160':('Europe/London',), '44161':('Europe/London',), '44166':('Europe/London',), '44167':('Europe/London',), '44164':('Europe/London',), '44165':('Europe/London',), '44168':('Europe/London',), '160547':('America/Chicago',), '740':('Europe/Bucharest',), '747':('Europe/Moscow',), '748':('Europe/Moscow',), '1208746':('America/Los_Angeles',), '1208745':('America/Denver',), '1208743':('America/Los_Angeles',), '6919':('Pacific/Ponape',), '1308233':('America/Chicago',), '1308234':('America/Chicago',), '1308235':('America/Denver',), '1308236':('America/Chicago',), '1308237':('America/Chicago',), '6898':('Pacific/Tahiti',), '855':('Asia/Phnom_Penh',), '1270369':('America/New_York',), '1270365':('America/Chicago',), '1270362':('America/Chicago',), '1270360':('America/New_York',), '52311':('America/Mazatlan',), '618':('Australia/Perth', 'Australia/Adelaide'), '5235':('America/Mexico_City',), '612':('Australia/Sydney',), '1208290':('America/Los_Angeles',), '1208292':('America/Los_Angeles',), '617':('Australia/Sydney',), '1276':('America/New_York',), '1473':('America/Grenada',), '1478':('America/New_York',), '1479':('America/Chicago',), '1304':('America/New_York',), '1305':('America/New_York',), '1307':('America/Denver',), '1301':('America/New_York',), '1302':('America/New_York',), '1303':('America/Denver',), '1807226':('America/Winnipeg',), '1807223':('America/Winnipeg',), '1309':('America/Chicago',), '160568':('America/Denver',), '160569':('America/Chicago',), '160567':('America/Denver',), '160561':('America/Chicago',), '160562':('America/Chicago',), '27':('Africa/Johannesburg',), '20':('Africa/Cairo',), '1850727':('America/New_York',), '1850722':('America/Chicago',), '1850729':('America/Chicago',), '1850508':('America/New_York',), '1850509':('America/New_York',), '5263':('America/Mazatlan',), '1850505':('America/Chicago',), '1850502':('America/Chicago',), '5262':('America/Mazatlan',), '127090':('America/Chicago',), '127093':('America/Chicago',), '127092':('America/Chicago',), '19064':('America/New_York',), '127096':('America/Chicago',), '19069':('America/Chicago',), '160577':('America/Chicago',), '181259':('America/New_York',), '181253':('America/New_York',), '181252':('America/New_York',), '154141':('America/Los_Angeles',), '154140':('America/Los_Angeles',), '154143':('America/Los_Angeles',), '121928':('America/Chicago',), '1250489':('America/Edmonton',), '154144':('America/Los_Angeles',), '154146':('America/Los_Angeles',), '154149':('America/Los_Angeles',), '154148':('America/Los_Angeles',), '1250487':('America/Vancouver',), '121927':('America/New_York',), '1250480':('America/Vancouver',), '1250483':('America/Vancouver',), '160586':('America/Denver',), '160587':('America/Chicago',), '160584':('America/Chicago',), '160582':('America/Denver',), '7':('Asia/Almaty', 'Asia/Aqtobe', 'Asia/Irkutsk', 'Asia/Kamchatka', 'Asia/Krasnoyarsk', 'Asia/Magadan', 'Asia/Novosibirsk', 'Asia/Omsk', 'Asia/Sakhalin', 'Asia/Vladivostok', 'Asia/Yakutsk', 'Asia/Yekaterinburg', 'Europe/Bucharest', 'Europe/Moscow', 'Europe/Samara'), '1250248':('America/Vancouver',), '1250247':('America/Vancouver',), '1250246':('America/Vancouver',), '1250245':('America/Vancouver',), '1250242':('America/Edmonton',), '160588':('America/Chicago',), '160589':('America/Denver',), '1605722':('America/Denver',), '1605723':('America/Denver',), '1605720':('America/Denver',), '1605721':('America/Denver',), '1605724':('America/Chicago',), '1605725':('America/Chicago',), '52312':('America/Mexico_City',), '52313':('America/Mexico_City',), '140238':('America/Chicago',), '140239':('America/Chicago',), '52316':('America/Mexico_City',), '52317':('America/Mexico_City',), '52314':('America/Mexico_City',), '52315':('America/Mexico_City',), '140232':('America/Chicago',), '140233':('America/Chicago',), '52319':('America/Mazatlan',), '140236':('America/Chicago',), '140234':('America/Chicago',), '140235':('America/Chicago',), '1876':('America/Jamaica',), '1870':('America/Chicago',), '977':('Asia/Katmandu',), '976':('Asia/Choibalsan', 'Asia/Hovd', 'Asia/Ulaanbaatar'), '975':('Asia/Thimphu',), '974':('Asia/Qatar',), '973':('Asia/Bahrain',), '972':('Asia/Jerusalem',), '971':('Asia/Dubai',), '970':('Europe/Bucharest',), '1812477':('America/Chicago',), '14022':('America/Chicago',), '14026':('America/Chicago',), '14027':('America/Chicago',), '14024':('America/Chicago',), '14025':('America/Chicago',), '14028':('America/Chicago',), '14029':('America/Chicago',), '1919':('America/New_York',), '1918':('America/Chicago',), '1910':('America/New_York',), '1913':('America/Chicago',), '1912':('America/New_York',), '1915':('America/Denver',), '1914':('America/New_York',), '1917':('America/New_York',), '1916':('America/Los_Angeles',), '880':('Asia/Dhaka',), '886':('Asia/Taipei',), '34864':('Europe/Madrid',), '34865':('Europe/Madrid',), '190663':('America/New_York',), '190664':('America/New_York',), '190666':('America/Chicago',), '34868':('Europe/Madrid',), '34869':('Europe/Madrid', 'Atlantic/Canary'), '44159':('Europe/London',), '44158':('Europe/London',), '44157':('Europe/London',), '44156':('Europe/London',), '44155':('Europe/London',), '44154':('Europe/London',), '44151':('Europe/London',), '44150':('Europe/London',), '1208755':('America/Los_Angeles',), '1208756':('America/Denver',), '1850995':('America/Chicago',), '1850994':('America/Chicago',), '1850997':('America/New_York',), '68991':('Pacific/Marquesas',), '68990':('Pacific/Tahiti',), '68993':('Pacific/Tahiti',), '68992':('Pacific/Marquesas',), '68995':('Pacific/Tahiti',), '68994':('Pacific/Tahiti',), '68997':('Pacific/Gambier',), '68996':('Pacific/Tahiti',), '68998':('Pacific/Tahiti',), '4419467':('Europe/London',), '1208622':('America/Denver',), '1208623':('America/Los_Angeles',), '1208624':('America/Denver',), '1208628':('America/Los_Angeles',), '1208629':('America/Denver',), '1269':('America/New_York',), '1268':('America/Antigua',), '1262':('America/Chicago',), '1260':('America/New_York',), '1267':('America/New_York',), '1264':('America/Anguilla',), '181272':('America/New_York',), '1317':('America/New_York',), '1316':('America/Chicago',), '1315':('America/New_York',), '1314':('America/Chicago',), '1313':('America/New_York',), '1312':('America/Chicago',), '1310':('America/Los_Angeles',), '1319':('America/Chicago',), '1318':('America/Chicago',), '1867667':('America/Vancouver',), '160578':('America/Denver',), '160573':('America/Chicago',), '1270763':('America/New_York',), '160571':('America/Denver',), '1270761':('America/Chicago',), '1867669':('America/Edmonton',), '160576':('America/Chicago',), '160575':('America/Chicago',), '1270765':('America/New_York',), '5591':('America/Manaus',), '5593':('America/Manaus',), '5592':('America/Manaus',), '5595':('America/Manaus',), '5594':('America/Manaus',), '5597':('America/Manaus',), '5596':('America/Sao_Paulo',), '5599':('America/Sao_Paulo',), '5598':('America/Sao_Paulo',), '7728':('Asia/Almaty',), '7729':('Asia/Aqtobe',), '4417687':('Europe/London',), '7721':('Asia/Almaty',), '7722':('Asia/Almaty',), '7723':('Asia/Almaty',), '7724':('Asia/Almaty',), '7725':('Asia/Almaty',), '7726':('Asia/Almaty',), '7727':('Asia/Almaty',), '185081':('America/Chicago',), '185086':('America/Chicago',), '185085':('America/Chicago',), '185088':('America/Chicago',), '1785852':('America/Denver',), '1785856':('America/Chicago',), '12505':('America/Vancouver',), '12509':('America/Vancouver',), '12508':('America/Vancouver',), '170143':('America/Chicago',), '442':('Europe/London',), '127046':('America/New_York',), '127047':('America/Chicago',), '127044':('America/Chicago',), '127042':('America/New_York',), '127043':('America/Chicago',), '127041':('America/Chicago',), '127048':('America/Chicago',), '441763':('Europe/London',), '1812546':('America/New_York',), '1812547':('America/Chicago',), '1812386':('America/Chicago',), '1812384':('America/New_York',), '1812385':('America/Chicago',), '160594':('America/Chicago',), '1270786':('America/Chicago',), '160596':('America/Denver',), '1270780':('America/Chicago',), '1270781':('America/Chicago',), '1270782':('America/Chicago',), '1270783':('America/Chicago',), '160599':('America/Chicago',), '1270789':('America/New_York',), '52983':('America/Mexico_City', 'America/New_York'), '52982':('America/Mexico_City',), '52981':('America/Mexico_City',), '52987':('America/New_York',), '52986':('America/Mexico_City',), '52329':('America/Mazatlan', 'America/Mexico_City'), '52328':('America/Mexico_City',), '52327':('America/Mazatlan',), '52326':('America/Mexico_City',), '52325':('America/Mazatlan',), '52324':('America/Mazatlan',), '52323':('America/Mazatlan',), '52322':('America/Mexico_City',), '52321':('America/Mexico_City',), '1860':('America/New_York',), '1863':('America/New_York',), '1862':('America/New_York',), '1865':('America/New_York',), '1864':('America/New_York',), '1869':('America/St_Kitts',), '1868':('America/Port_of_Spain',), '964':('Asia/Baghdad',), '965':('Asia/Kuwait',), '966':('Asia/Riyadh',), '967':('Asia/Aden',), '960':('Indian/Maldives',), '1605666':('America/Denver',), '1605665':('America/Chicago',), '6298':('Asia/Jayapura',), '6297':('Asia/Jayapura',), '1605669':('America/Chicago',), '1605668':('America/Chicago',), '968':('Asia/Muscat',), '6292':('Asia/Jayapura',), '6290':('Asia/Jayapura',), '34860':('Europe/Madrid',), '62517':('Asia/Makassar',), '36':('Europe/Budapest',), '52613':('America/Mazatlan',), '52612':('America/Mazatlan',), '52616':('America/Tijuana', 'America/Mazatlan'), '52615':('America/Mazatlan',), '52614':('America/Mazatlan',), '62516':('Asia/Jakarta',), '52618':('America/Mexico_City',), '170182':('America/Denver',), '643':('Pacific/Auckland',), '62962':('Asia/Jayapura',), '62963':('Asia/Jakarta',), '1534':('America/Chicago',), '62966':('Asia/Jayapura',), '62967':('Asia/Jayapura',), '62969':('Asia/Jayapura',), '644':('Pacific/Auckland',), '1530':('America/Los_Angeles',), '190678':('America/New_York',), '190677':('America/Chicago',), '190675':('America/Chicago',), '52389':('America/Mazatlan',), '52388':('America/Mexico_City',), '44149':('Europe/London',), '44140':('Europe/London',), '44141':('Europe/London',), '44142':('Europe/London',), '44143':('Europe/London',), '44144':('Europe/London',), '44145':('Europe/London',), '44146':('Europe/London',), '44147':('Europe/London',), '1208769':('America/Los_Angeles',), '1780':('America/Edmonton',), '1781':('America/New_York',), '1208762':('America/Los_Angeles',), '1784':('America/St_Vincent',), '1786':('America/New_York',), '1787':('America/Puerto_Rico',), '1610':('America/New_York',), '52383':('America/Mexico_City',), '52382':('America/Mexico_City',), '34928':('Atlantic/Canary',), '34925':('Europe/Madrid',), '34924':('Europe/Madrid',), '34927':('Europe/Madrid',), '1850936':('America/Chicago',), '34921':('Europe/Madrid',), '34920':('Europe/Madrid',), '34923':('Europe/Madrid',), '34922':('Atlantic/Canary',), '1678':('America/New_York',), '1670':('Pacific/Saipan',), '1671':('Pacific/Guam',), '678':('Pacific/Efate',), '679':('Pacific/Fiji',), '674':('Pacific/Nauru',), '351295':('Atlantic/Azores',), '351296':('Atlantic/Azores',), '677':('Pacific/Guadalcanal',), '670':('Asia/Dili',), '351291':('Atlantic/Madeira',), '351292':('Atlantic/Azores',), '673':('Asia/Brunei',), '1701228':('America/Chicago',), '29998':('America/Godthab',), '29999':('America/Scoresbysund',), '1701221':('America/Chicago',), '1701222':('America/Chicago',), '29991':('America/Godthab',), '29996':('America/Godthab',), '29997':('America/Thule',), '29994':('America/Godthab',), '29995':('America/Godthab',), '1270234':('America/New_York',), '1270236':('America/Chicago',), '1270237':('America/Chicago',), '1270230':('America/Chicago',), '17859':('America/Chicago',), '17856':('America/Chicago',), '17857':('America/Chicago',), '17854':('America/Chicago',), '17855':('America/Chicago',), '17852':('America/Chicago',), '17853':('America/Chicago',), '1418':('America/Toronto',), '1419':('America/New_York',), '1410':('America/New_York',), '1412':('America/New_York',), '1413':('America/New_York',), '1414':('America/Chicago',), '1415':('America/Los_Angeles',), '1416':('America/Toronto',), '1417':('America/Chicago',), '1323':('America/Los_Angeles',), '1320':('America/Chicago',), '1321':('America/New_York',), '1325':('America/Chicago',), '856':('Asia/Vientiane',), '5568':('America/Rio_Branco',), '5569':('America/Manaus',), '5564':('America/Sao_Paulo',), '5565':('America/Manaus',), '5566':('America/Manaus',), '5567':('America/Manaus',), '5561':('America/Sao_Paulo',), '5562':('America/Sao_Paulo',), '5563':('America/Sao_Paulo',), '1256':('America/Chicago',), '1254':('America/Chicago',), '1208814':('America/Denver',), '1252':('America/New_York',), '1253':('America/Los_Angeles',), '1251':('America/Chicago',), '1208818':('America/Los_Angeles',), '185091':('America/Chicago',), '185092':('America/New_York',), '185095':('America/Chicago',), '185097':('America/New_York',), '185096':('America/Chicago',), '185098':('America/Chicago',), '130824':('America/Denver',), '130825':('America/Denver',), '130826':('America/Denver',), '7427':('Asia/Magadan',), '7426':('Asia/Vladivostok',), '7424':('Asia/Vladivostok',), '7423':('Asia/Vladivostok',), '7421':('Asia/Vladivostok',), '441480':('Europe/London',), '441482':('Europe/London',), '181231':('America/New_York',), '86':('Asia/Shanghai',), '181233':('America/New_York',), '181232':('America/New_York',), '181235':('America/New_York',), '181234':('America/New_York',), '181237':('America/New_York',), '181236':('America/New_York',), '441487':('Europe/London',), '62526':('Asia/Makassar',), '62527':('Asia/Makassar',), '1250261':('America/Edmonton',), '1250260':('America/Vancouver',), '1250263':('America/Edmonton',), '1250262':('America/Edmonton',), '1250265':('America/Vancouver',), '154185':('America/Los_Angeles',), '154184':('America/Los_Angeles',), '154187':('America/Los_Angeles',), '154186':('America/Los_Angeles',), '154181':('America/Los_Angeles',), '154180':('America/Los_Angeles',), '154183':('America/Los_Angeles',), '154182':('America/Los_Angeles',), '154189':('America/Los_Angeles',), '142343':('America/New_York',), '52998':('America/Mexico_City', 'America/New_York'), '52999':('America/Mexico_City',), '52994':('America/Mexico_City',), '1423658':('America/Chicago',), '52996':('America/Mexico_City',), '52997':('America/Mexico_City',), '52991':('America/Mexico_City',), '52992':('America/Mexico_City',), '52993':('America/Mexico_City',), '1605279':('America/Denver',), '1605274':('America/Chicago',), '1605275':('America/Chicago',), '69137':('Pacific/Kosrae', 'Pacific/Truk'), '1605271':('America/Chicago',), '69132':('Pacific/Ponape',), '69133':('Pacific/Truk',), '441484':('Europe/London',), '1812753':('America/Chicago',), '1812752':('America/New_York',), '1541888':('America/Los_Angeles',), '4776':('Europe/Oslo',), '120872':('America/Denver',), '120873':('America/Denver',), '120870':('America/Denver',), '120871':('America/Denver',), '120877':('America/Los_Angeles',), '475':('Europe/Oslo',), '4772':('Europe/Oslo',), '4773':('Europe/Oslo',), '4411':('Europe/London',), '18673':('America/Vancouver',), '18677':('America/Edmonton',), '18674':('America/Vancouver',), '18678':('America/Edmonton',), '44193':('Europe/London',), '44192':('Europe/London',), '44191':('Europe/London',), '44190':('Europe/London',), '44197':('Europe/London',), '44196':('Europe/London',), '44195':('Europe/London',), '44199':('Europe/London',), '44198':('Europe/London',), '1664':('America/Montserrat',), '1662':('America/Chicago',), '1661':('America/Los_Angeles',), '1660':('America/Chicago',), '591':('America/La_Paz',), '590':('America/Guadeloupe', 'America/Halifax', 'America/Marigot'), '593':('America/Guayaquil', 'Pacific/Galapagos'), '592':('America/Guyana',), '595':('America/Asuncion',), '594':('America/Cayenne',), '597':('America/Paramaribo',), '596':('America/Martinique',), '599':('America/Curacao', 'America/Kralendijk'), '598':('America/Montevideo',), '18079':('America/Winnipeg',), '18078':('America/Toronto',), '18073':('America/Toronto',), '18076':('America/Toronto',), '160632':('America/New_York',), '160633':('America/New_York',), '160634':('America/New_York',), '160635':('America/New_York',), '160636':('America/New_York',), '160637':('America/New_York',), '160638':('America/Chicago',), '1409':('America/Chicago',), '1408':('America/Los_Angeles',), '1403':('America/Edmonton',), '1401':('America/New_York',), '1407':('America/New_York',), '1406':('America/Denver',), '1405':('America/Chicago',), '1404':('America/New_York',), '692':('Pacific/Majuro',), '1334':('America/Chicago',), '1337':('America/Chicago',), '1336':('America/New_York',), '1330':('America/New_York',), '39':('Europe/Rome',), '33':('Europe/Paris',), '32':('Europe/Brussels',), '31':('Europe/Amsterdam',), '30':('Europe/Athens',), '125028':('America/Vancouver',), '34':('Atlantic/Canary', 'Europe/Madrid'), '170183':('America/Chicago',), '1240':('America/New_York',), '186764':('America/Winnipeg',), '1242':('America/Nassau',), '186763':('America/Vancouver',), '647':('Pacific/Auckland',), '1246':('America/Barbados',), '649':('Pacific/Auckland',), '1248':('America/New_York',), '170188':('America/Chicago',), '186769':('America/Edmonton',), '1850227':('America/Chicago',), '1850226':('America/Chicago',), '1850224':('America/New_York',), '1850223':('America/New_York',), '1850222':('America/New_York',), '1850229':('America/Chicago',), '441528':('Europe/London',), '441529':('Europe/London',), '441526':('Europe/London',), '441527':('Europe/London',), '441524':('Europe/London',), '441525':('Europe/London',), '441522':('Europe/London',), '441520':('Europe/London',), '7413':('Asia/Vladivostok',), '7411':('Asia/Yakutsk',), '7416':('Asia/Yakutsk',), '7415':('Asia/Magadan',), '127062':('America/Chicago',), '127063':('America/Chicago',), '127064':('America/Chicago',), '127065':('America/Chicago',), '127066':('America/Chicago',), '127067':('America/Chicago',), '127068':('America/Chicago',), '1850421':('America/New_York',), '1850423':('America/Chicago',), '1850422':('America/New_York',), '1850425':('America/New_York',), '1850424':('America/Chicago',), '1850429':('America/Chicago',), '130836':('America/Chicago',), '130835':('America/Denver',), '130834':('America/Chicago',), '130838':('America/Chicago',), '12198':('America/Chicago',), '12199':('America/Chicago',), '12193':('America/Chicago',), '12194':('America/Chicago',), '12195':('America/Chicago',), '12196':('America/Chicago',), '12197':('America/Chicago',), '185046':('America/Chicago',), '185047':('America/Chicago',), '185044':('America/Chicago',), '185045':('America/Chicago',), '185040':('America/New_York',), '185041':('America/Chicago',), '185049':('America/Chicago',), '441760':('Europe/London',), '170988':('America/St_Johns',), '170983':('America/St_Johns',), '1605791':('America/Denver',), '1605796':('America/Chicago',), '1605649':('America/Chicago',), '1605644':('America/Denver',), '1605647':('America/Chicago',), '1605642':('America/Denver',), '1270769':('America/New_York',), '1270762':('America/Chicago',), '19063':('America/New_York',), '135':('America/New_York',), '52671':('America/Mexico_City',), '52673':('America/Mazatlan',), '52672':('America/Mazatlan',), '52675':('America/Mexico_City',), '52674':('America/Mexico_City',), '52677':('America/Mexico_City',), '52676':('America/Mexico_City',), '120869':('America/Los_Angeles',), '120861':('America/Los_Angeles',), '120860':('America/Denver',), '120863':('America/Denver',), '120865':('America/Denver',), '120864':('America/Denver',), '120866':('America/Los_Angeles',), '1308398':('America/Chicago',), '1308394':('America/Denver',), '15744':('America/New_York',), '15745':('America/New_York',), '15746':('America/New_York',), '15742':('America/New_York',), '15743':('America/New_York',), '15749':('America/New_York',), '186799':('America/Vancouver',), '186798':('America/Edmonton',), '82':('Asia/Seoul',), '186792':('America/Edmonton',), '81':('Asia/Tokyo',), '186797':('America/Toronto',), '84':('Asia/Saigon',), '1812967':('America/New_York',), '1812963':('America/Chicago',), '1650':('America/Los_Angeles',), '1651':('America/Chicago',), '1208922':('America/Denver',), '1850891':('America/New_York',), '1850892':('America/Chicago',), '1850893':('America/New_York',), '1850894':('America/New_York',), '1385':('America/Denver',), '1386':('America/New_York',), '1434':('America/New_York',), '1435':('America/Denver',), '1432':('America/Chicago',), '1636':('America/Chicago',), '1850696':('America/Chicago',), '1438':('America/Toronto',), '1850309':('America/New_York',), '5233':('America/Mexico_City',), '1340':('America/St_Thomas',), '1850301':('America/Chicago',), '1345':('America/Cayman',), '1347':('America/New_York',), '15419':('America/Los_Angeles',), '15415':('America/Los_Angeles',), '15416':('America/Los_Angeles',), '15417':('America/Los_Angeles',), '15412':('America/Los_Angeles',), '7711':('Asia/Aqtobe',), '7710':('Asia/Almaty',), '7713':('Asia/Aqtobe',), '7712':('Asia/Aqtobe',), '7715':('Asia/Almaty',), '7714':('Asia/Almaty',), '7717':('Asia/Almaty',), '7716':('Asia/Almaty',), '7718':('Asia/Almaty',), '1502':('America/New_York',), '1503':('America/Los_Angeles',), '1850769':('America/Chicago',), '1208835':('America/Los_Angeles',), '1507':('America/Chicago',), '1208837':('America/Denver',), '170179':('America/Chicago',), '170177':('America/Chicago',), '1508':('America/New_York',), '1509':('America/Los_Angeles',), '170172':('America/Chicago',), '170173':('America/Chicago',), '1850765':('America/New_York',), '1850547':('America/Chicago',), '1850545':('America/New_York',), '441539':('Europe/London',), '441538':('Europe/London',), '441535':('Europe/London',), '441534':('Atlantic/Reykjavik',), '441536':('Europe/London',), '441531':('Europe/London',), '441530':('Europe/London',), '1270766':('America/New_York',), '127079':('America/Chicago',), '127077':('America/Chicago',), '127075':('America/Chicago',), '127074':('America/Chicago',), '127073':('America/New_York',), '127072':('America/Chicago',), '1270767':('America/Chicago',), '127070':('America/Chicago',), '1850438':('America/Chicago',), '1850439':('America/Chicago',), '1850436':('America/Chicago',), '1850437':('America/Chicago',), '1850434':('America/Chicago',), '1850435':('America/Chicago',), '1850432':('America/Chicago',), '1850433':('America/Chicago',), '1850431':('America/New_York',), '441629':('Europe/London',), '441628':('Europe/London',), '441621':('Europe/London',), '441620':('Europe/London',), '441623':('Europe/London',), '441622':('Europe/London',), '441625':('Europe/London',), '441624':('Atlantic/Reykjavik',), '441626':('Europe/London',), '130843':('America/Denver',), '130846':('America/Chicago',), '130845':('America/Chicago',), '1904':('America/New_York',), '563':('America/Santiago',), '12506':('America/Vancouver',), '185057':('America/New_York',), '185056':('America/New_York',), '185051':('America/New_York',), '7395':('Asia/Irkutsk',), '7394':('Asia/Krasnoyarsk',), '996':('Asia/Bishkek',), '7391':('Asia/Krasnoyarsk',), '7390':('Asia/Krasnoyarsk',), '993':('Asia/Ashgabat',), '992':('Asia/Dushanbe',), '998':('Asia/Tashkent',), '52668':('America/Mazatlan',), '123':('America/New_York',), '52662':('America/Mazatlan',), '17013':('America/Chicago',), '52661':('America/Tijuana',), '17016':('America/Chicago',), '52667':('America/Mazatlan',), '52664':('America/Tijuana',), '52665':('America/Tijuana',), '190624':('America/New_York',), '190625':('America/New_York',), '190626':('America/Chicago',), '190622':('America/New_York',), '190623':('America/New_York',), '190629':('America/New_York',), '34828':('Atlantic/Canary',), '4416973':('Europe/London',), '4416974':('Europe/London',), '4416977':('Europe/London',), '34821':('Europe/Madrid',), '34822':('Atlantic/Canary',), '34823':('Europe/Madrid',), '34824':('Europe/Madrid',), '34825':('Europe/Madrid',), '34826':('Europe/Madrid',), '34827':('Europe/Madrid',), '16064':('America/New_York',), '1270862':('America/New_York',), '1270864':('America/Chicago',), '1270866':('America/Chicago',), '16065':('America/New_York',), '1647':('America/Toronto',), '1646':('America/New_York',), '1641':('America/Chicago',), '1649':('America/Grand_Turk',), '1701764':('America/Denver',), '1701766':('America/Chicago',), '1605925':('America/Chicago',), '1425':('America/Los_Angeles',), '1424':('America/Los_Angeles',), '157472':('America/New_York',), '1605923':('America/Denver',), '157478':('America/New_York',), '733':('Asia/Almaty',), '735':('Asia/Yekaterinburg',), '738':('Asia/Omsk',), '18122':('America/New_York',), '1850939':('America/Chicago',), '1519':('America/Toronto',), '1518':('America/New_York',), '1850933':('America/New_York',), '1850932':('America/Chicago',), '1517':('America/New_York',), '1516':('America/New_York',), '1850937':('America/Chicago',), '1510':('America/Los_Angeles',), '1513':('America/New_York',), '1850934':('America/Chicago',), '1270699':('America/New_York',), '1270692':('America/New_York',), '1270691':('America/Chicago',), '1308872':('America/Chicago',), '1850944':('America/Chicago',), '160597':('America/Chicago',), '643305':('Pacific/Chatham',), '127035':('America/New_York',), '4774':('Europe/Oslo',), '160598':('America/Chicago',), '357':('Asia/Nicosia',), '356':('Europe/Malta',), '355':('Europe/Tirane',), '354':('Atlantic/Reykjavik',), '353':('Europe/Dublin',), '352':('Europe/Luxembourg',), '351':('Europe/Lisbon', 'Atlantic/Azores', 'Atlantic/Madeira'), '350':('Europe/Gibraltar',), '1951':('America/Los_Angeles',), '359':('Europe/Sofia',), '358':('Europe/Helsinki', 'Europe/Mariehamn'), '125047':('America/Vancouver',), '185060':('America/Chicago',), '441761':('Europe/London',), '185063':('America/Chicago',), '441764':('Europe/London',), '441765':('Europe/London',), '441766':('Europe/London',), '441767':('Europe/London',), '441768':('Europe/London',), '441769':('Europe/London',), '263':('Africa/Harare',), '262':('Indian/Mayotte', 'Indian/Reunion'), '261':('Indian/Antananarivo',), '260':('Africa/Lusaka',), '267':('Africa/Gaborone',), '266':('Africa/Maseru',), '265':('Africa/Blantyre',), '264':('Africa/Lagos',), '52381':('America/Mexico_City',), '269':('Indian/Comoro',), '268':('Africa/Mbabane',), '52385':('America/Mexico_City',), '1562':('America/Los_Angeles',), '52387':('America/Mexico_City',), '52386':('America/Mexico_City',), '1563':('America/Chicago',), '170154':('America/Chicago',), '62522':('Asia/Jakarta',), '52656':('America/Mazatlan',), '52653':('America/Mazatlan',), '52652':('America/Mazatlan',), '52651':('America/Mazatlan',), '62525':('Asia/Jakarta',), '62528':('Asia/Jakarta',), '52659':('America/Mazatlan',), '52658':('America/Tijuana',), '120842':('America/Denver',), '120841':('America/Denver',), '120846':('America/Denver',), '120849':('America/Denver',), '120848':('America/Denver',), '1605223':('America/Denver',), '1605221':('America/Chicago',), '1605226':('America/Chicago',), '1605225':('America/Chicago',), '1605224':('America/Chicago',), '190686':('America/Chicago',), '1605229':('America/Chicago',), '1605359':('America/Chicago',), '1605353':('America/Chicago',), '1605352':('America/Chicago',), '1605357':('America/Chicago',), '1605356':('America/Chicago',), '1605355':('America/Denver',), '1270879':('America/Chicago',), '1270877':('America/New_York',), '1630':('America/Chicago',), '1631':('America/New_York',), '1208921':('America/Denver',), '1985':('America/Chicago',), '1208926':('America/Los_Angeles',), '1980':('America/New_York',), '160529':('America/Chicago',), '1989':('America/New_York',), '52985':('America/Mexico_City',), '562':('America/Santiago',), '52984':('America/Mexico_City', 'America/New_York'), '564':('America/Santiago',), '565':('America/Santiago',), '566':('America/Santiago',), '567':('America/Santiago',), '1701575':('America/Denver',), '1701577':('America/Chicago',), '1701572':('America/Chicago',), '52988':('America/Mexico_City',), '142369':('America/New_York',), '142362':('America/New_York',), '142363':('America/New_York',), '142361':('America/New_York',), '142366':('America/New_York',), '142364':('America/New_York',), '1208457':('America/Los_Angeles',), '1208455':('America/Denver',), '1208454':('America/Denver',), '1208453':('America/Denver',), '1208452':('America/Denver',), '1208459':('America/Denver',), '17019':('America/Chicago',), '48':('Europe/Warsaw',), '49':('Europe/Berlin',), '46':('Europe/Stockholm',), '47':('Europe/Oslo', 'Europe/Paris'), '44':('Europe/London',), '45':('Europe/Copenhagen',), '43':('Europe/Vienna',), '40':('Europe/Bucharest',), '41':('Europe/Zurich',), '170158':('America/Denver',), '154142':('America/Los_Angeles',), '186758':('America/Edmonton',), '1561':('America/New_York',), '170152':('America/Denver',), '170153':('America/Chicago',), '186753':('America/Vancouver',), '170156':('America/Denver',), '1360':('America/Los_Angeles',), '1850219':('America/New_York',), '1850216':('America/New_York',), '1850215':('America/Chicago',), '1850210':('America/New_York',), '1541476':('America/Los_Angeles',), '1541475':('America/Los_Angeles',), '1541474':('America/Los_Angeles',), '1541473':('America/Denver',), '1541472':('America/Los_Angeles',), '1541471':('America/Los_Angeles',), '1541479':('America/Los_Angeles',), '472':('Europe/Oslo',), '473':('Europe/Oslo',), '476':('Europe/Oslo',), '1850327':('America/Chicago',), '180748':('America/Winnipeg',), '1850325':('America/New_York',), '180746':('America/Winnipeg',), '180747':('America/Toronto',), '1850329':('America/New_York',), }
apache-2.0
Edraak/edraak-platform
common/djangoapps/xblock_django/models.py
22
2342
""" Models. """ from config_models.models import ConfigurationModel from django.db import models from django.utils.translation import ugettext_lazy as _ class XBlockConfiguration(ConfigurationModel): """ XBlock configuration used by both LMS and Studio, and not specific to a particular template. """ KEY_FIELDS = ('name',) # xblock name is unique class Meta(ConfigurationModel.Meta): app_label = 'xblock_django' # boolean field 'enabled' inherited from parent ConfigurationModel name = models.CharField(max_length=255, null=False, db_index=True) deprecated = models.BooleanField( default=False, verbose_name=_('show deprecation messaging in Studio') ) def __unicode__(self): return ( "XBlockConfiguration(name={}, enabled={}, deprecated={})" ).format(self.name, self.enabled, self.deprecated) class XBlockStudioConfigurationFlag(ConfigurationModel): """ Enables site-wide Studio configuration for XBlocks. """ class Meta(object): app_label = "xblock_django" # boolean field 'enabled' inherited from parent ConfigurationModel def __unicode__(self): return "XBlockStudioConfigurationFlag(enabled={})".format(self.enabled) class XBlockStudioConfiguration(ConfigurationModel): """ Studio editing configuration for a specific XBlock/template combination. """ KEY_FIELDS = ('name', 'template') # xblock name/template combination is unique FULL_SUPPORT = 'fs' PROVISIONAL_SUPPORT = 'ps' UNSUPPORTED = 'us' SUPPORT_CHOICES = ( (FULL_SUPPORT, _('Fully Supported')), (PROVISIONAL_SUPPORT, _('Provisionally Supported')), (UNSUPPORTED, _('Unsupported')) ) # boolean field 'enabled' inherited from parent ConfigurationModel name = models.CharField(max_length=255, null=False, db_index=True) template = models.CharField(max_length=255, blank=True, default='') support_level = models.CharField(max_length=2, choices=SUPPORT_CHOICES, default=UNSUPPORTED) class Meta(object): app_label = "xblock_django" def __unicode__(self): return ( "XBlockStudioConfiguration(name={}, template={}, enabled={}, support_level={})" ).format(self.name, self.template, self.enabled, self.support_level)
agpl-3.0
gaopeiliang/aiodocker
aiodocker/networks.py
2
1657
import json class DockerNetworks: def __init__(self, docker): self.docker = docker async def list(self): data = await self.docker._query_json("networks") return data async def create(self, config): config = json.dumps(config, sort_keys=True).encode("utf-8") data = await self.docker._query_json( "networks/create", method="POST", data=config ) return DockerNetwork(self.docker, data["Id"]) async def get(self, net_specs): data = await self.docker._query_json( "networks/{net_specs}".format(net_specs=net_specs), method="GET", ) return DockerNetwork(self.docker, data["Id"]) class DockerNetwork: def __init__(self, docker, id_): self.docker = docker self.id = id_ async def show(self): data = await self.docker._query_json("networks/{self.id}".format(self=self)) return data async def delete(self): response = await self.docker._query( "networks/{self.id}".format(self=self), method="DELETE" ) await response.release() return async def connect(self, config): config = json.dumps(config, sort_keys=True).encode("utf-8") await self.docker._query_json( "networks/{self.id}/connect".format(self=self), method="POST", data=config ) async def disconnect(self, config): config = json.dumps(config, sort_keys=True).encode("utf-8") await self.docker._query_json( "networks/{self.id}/disconnect".format(self=self), method="POST", data=config, )
apache-2.0
simon-pepin/scikit-learn
sklearn/utils/graph.py
289
6239
""" Graph utilities and algorithms Graphs are represented with their adjacency matrices, preferably using sparse matrices. """ # Authors: Aric Hagberg <hagberg@lanl.gov> # Gael Varoquaux <gael.varoquaux@normalesup.org> # Jake Vanderplas <vanderplas@astro.washington.edu> # License: BSD 3 clause import numpy as np from scipy import sparse from .validation import check_array from .graph_shortest_path import graph_shortest_path ############################################################################### # Path and connected component analysis. # Code adapted from networkx def single_source_shortest_path_length(graph, source, cutoff=None): """Return the shortest path length from source to all reachable nodes. Returns a dictionary of shortest path lengths keyed by target. Parameters ---------- graph: sparse matrix or 2D array (preferably LIL matrix) Adjacency matrix of the graph source : node label Starting node for path cutoff : integer, optional Depth to stop the search - only paths of length <= cutoff are returned. Examples -------- >>> from sklearn.utils.graph import single_source_shortest_path_length >>> import numpy as np >>> graph = np.array([[ 0, 1, 0, 0], ... [ 1, 0, 1, 0], ... [ 0, 1, 0, 1], ... [ 0, 0, 1, 0]]) >>> single_source_shortest_path_length(graph, 0) {0: 0, 1: 1, 2: 2, 3: 3} >>> single_source_shortest_path_length(np.ones((6, 6)), 2) {0: 1, 1: 1, 2: 0, 3: 1, 4: 1, 5: 1} """ if sparse.isspmatrix(graph): graph = graph.tolil() else: graph = sparse.lil_matrix(graph) seen = {} # level (number of hops) when seen in BFS level = 0 # the current level next_level = [source] # dict of nodes to check at next level while next_level: this_level = next_level # advance to next level next_level = set() # and start a new list (fringe) for v in this_level: if v not in seen: seen[v] = level # set the level of vertex v next_level.update(graph.rows[v]) if cutoff is not None and cutoff <= level: break level += 1 return seen # return all path lengths as dictionary if hasattr(sparse, 'connected_components'): connected_components = sparse.connected_components else: from .sparsetools import connected_components ############################################################################### # Graph laplacian def graph_laplacian(csgraph, normed=False, return_diag=False): """ Return the Laplacian matrix of a directed graph. For non-symmetric graphs the out-degree is used in the computation. Parameters ---------- csgraph : array_like or sparse matrix, 2 dimensions compressed-sparse graph, with shape (N, N). normed : bool, optional If True, then compute normalized Laplacian. return_diag : bool, optional If True, then return diagonal as well as laplacian. Returns ------- lap : ndarray The N x N laplacian matrix of graph. diag : ndarray The length-N diagonal of the laplacian matrix. diag is returned only if return_diag is True. Notes ----- The Laplacian matrix of a graph is sometimes referred to as the "Kirchoff matrix" or the "admittance matrix", and is useful in many parts of spectral graph theory. In particular, the eigen-decomposition of the laplacian matrix can give insight into many properties of the graph. For non-symmetric directed graphs, the laplacian is computed using the out-degree of each node. """ if csgraph.ndim != 2 or csgraph.shape[0] != csgraph.shape[1]: raise ValueError('csgraph must be a square matrix or array') if normed and (np.issubdtype(csgraph.dtype, np.int) or np.issubdtype(csgraph.dtype, np.uint)): csgraph = check_array(csgraph, dtype=np.float64, accept_sparse=True) if sparse.isspmatrix(csgraph): return _laplacian_sparse(csgraph, normed=normed, return_diag=return_diag) else: return _laplacian_dense(csgraph, normed=normed, return_diag=return_diag) def _laplacian_sparse(graph, normed=False, return_diag=False): n_nodes = graph.shape[0] if not graph.format == 'coo': lap = (-graph).tocoo() else: lap = -graph.copy() diag_mask = (lap.row == lap.col) if not diag_mask.sum() == n_nodes: # The sparsity pattern of the matrix has holes on the diagonal, # we need to fix that diag_idx = lap.row[diag_mask] diagonal_holes = list(set(range(n_nodes)).difference(diag_idx)) new_data = np.concatenate([lap.data, np.ones(len(diagonal_holes))]) new_row = np.concatenate([lap.row, diagonal_holes]) new_col = np.concatenate([lap.col, diagonal_holes]) lap = sparse.coo_matrix((new_data, (new_row, new_col)), shape=lap.shape) diag_mask = (lap.row == lap.col) lap.data[diag_mask] = 0 w = -np.asarray(lap.sum(axis=1)).squeeze() if normed: w = np.sqrt(w) w_zeros = (w == 0) w[w_zeros] = 1 lap.data /= w[lap.row] lap.data /= w[lap.col] lap.data[diag_mask] = (1 - w_zeros[lap.row[diag_mask]]).astype( lap.data.dtype) else: lap.data[diag_mask] = w[lap.row[diag_mask]] if return_diag: return lap, w return lap def _laplacian_dense(graph, normed=False, return_diag=False): n_nodes = graph.shape[0] lap = -np.asarray(graph) # minus sign leads to a copy # set diagonal to zero lap.flat[::n_nodes + 1] = 0 w = -lap.sum(axis=0) if normed: w = np.sqrt(w) w_zeros = (w == 0) w[w_zeros] = 1 lap /= w lap /= w[:, np.newaxis] lap.flat[::n_nodes + 1] = (1 - w_zeros).astype(lap.dtype) else: lap.flat[::n_nodes + 1] = w.astype(lap.dtype) if return_diag: return lap, w return lap
bsd-3-clause
lucasmauro/statstuff
tests/test_statistics.py
1
2792
from unittest import TestCase from statstuff import statistics as stats class Tests(TestCase): def test_average(self): self.assertEqual(None, stats.average([])) self.assertEqual(1.5, stats.average([1, 2])) self.assertEqual(25.35, stats.average([10.1, 20.8, 30.4, 40.1])) def test_correlation(self): self.assertEqual(None, stats.correlation([], [])) self.assertEqual(None, stats.correlation([1, 2], [])) self.assertEqual(None, stats.correlation([], [3, 4])) self.assertEqual(None, stats.correlation([1, 2], [3])) self.assertEqual(0.9808710485773766, stats.correlation( [2, 3, 4, 5, 5, 6, 7, 8], [4, 7, 9, 10, 11, 11, 13, 15] )) self.assertEqual(0.1691457810991552, stats.correlation( [60, 58, 73, 51, 54, 75, 48, 72, 75, 83, 62, 52], [80, 62, 70, 83, 62, 92, 79, 88, 54, 82, 64, 69] )) def test_median(self): self.assertEqual(None, stats.median([])) self.assertEqual(3.01, stats.median([1.0494, 2.68, 3.34, 4.33])) self.assertEqual(3.01, stats.median([3.34, 2.68, 1.0494, 4.33])) self.assertEqual(3.35, stats.median([1.9, 2.105, 3.35, 4.401, 5.55])) self.assertEqual(3.35, stats.median([3.35, 5.55, 1.9, 4.401, 2.105])) def test_range(self): self.assertEqual(None, stats.range([])) self.assertEqual(0, stats.range([20])) self.assertEqual(0, stats.range([1, 1])) self.assertEqual(1, stats.range([1, 2])) self.assertEqual(2, stats.range([2, 1, 3])) self.assertEqual(78891, stats.range([19874, 65084, 98765])) def test_range_limits(self): self.assertEqual((None, None), stats.range_limits([])) self.assertEqual((0, 0), stats.range_limits([0])) self.assertEqual((0, 2), stats.range_limits([0, 2])) self.assertEqual((-17, 78), stats.range_limits([78, 45, -17, 39])) def test_standard_deviation(self): self.assertEqual(None, stats.stdev([])) self.assertEqual(0, stats.stdev([45])) self.assertEqual(0, stats.stdev([1, 1])) self.assertEqual(0.5, stats.stdev([1, 2])) self.assertEqual(15.5, stats.stdev([1963, 1994])) def test_sum(self): self.assertEqual(0, stats.sum([])) self.assertEqual(1, stats.sum([1])) self.assertEqual(2, stats.sum([1, 1])) self.assertEqual(230880, stats.sum([9480, 6548, 65018, 149834])) def test_variance(self): self.assertEqual(None, stats.variance([])) self.assertEqual(0, stats.variance([50])) self.assertEqual(0, stats.variance([1, 1, 1, 1, 1])) self.assertEqual(0.25, stats.variance([1, 2])) self.assertEqual(138.6875, stats.variance([1963, 1983, 1989, 1994]))
mit
VladimirPal/marshmallow
marshmallow/fields.py
3
47187
# -*- coding: utf-8 -*- """Field classes for various types of data.""" from __future__ import absolute_import, unicode_literals import datetime as dt import uuid import warnings import decimal from operator import attrgetter from marshmallow import validate, utils, class_registry from marshmallow.base import FieldABC, SchemaABC from marshmallow.utils import missing as missing_ from marshmallow.compat import text_type, basestring from marshmallow.exceptions import ValidationError __all__ = [ 'Field', 'Raw', 'Nested', 'List', 'String', 'UUID', 'Number', 'Integer', 'Decimal', 'Boolean', 'FormattedString', 'Float', 'Arbitrary', 'DateTime', 'LocalDateTime', 'Time', 'Date', 'TimeDelta', 'Fixed', 'Price', 'Url', 'URL', 'Email', 'Method', 'Function', 'Select', 'QuerySelect', 'QuerySelectList', 'Enum', 'Str', 'Bool', 'Int', 'Constant', ] class Field(FieldABC): """Basic field from which other fields should extend. It applies no formatting by default, and should only be used in cases where data does not need to be formatted before being serialized or deserialized. On error, the name of the field will be returned. :param default: If set, this value will be used during serialization if the input value is missing. If not set, the field will be excluded from the serialized output if the input value is missing. May be a value or a callable. :param str attribute: The name of the attribute to get the value from. If `None`, assumes the attribute has the same name as the field. :param str load_from: Additional key to look for when deserializing. Will only be checked if the field's name is not found on the input dictionary. If checked, it will return this parameter on error. :param str error: Error message stored upon validation failure. :param callable validate: Validator or collection of validators that are called during deserialization. Validator takes a field's input value as its only parameter and returns a boolean. If it returns `False`, an :exc:`ValidationError` is raised. :param required: Raise an :exc:`ValidationError` if the field value is not supplied during deserialization. If not a `bool` (e.g. a `str`), the provided value will be used as the message of the :exc:`ValidationError` instead of the default message. :param allow_none: Set to `True` if `None` should be considered a valid value during validation/deserialization. If not a `bool` (e.g. a `str`), the provided value will be used as the message of the :exc:`ValidationError` instead of the default message. :param bool load_only: If `True` skip this field during serialization, otherwise its value will be present in the serialized data. :param bool dump_only: If `True` skip this field during deserialization, otherwise its value will be present in the deserialized object. In the context of an HTTP API, this effectively marks the field as "read-only". :param missing: Default deserialization value for the field if the field is not found in the input data. May be a value or a callable. :param metadata: Extra arguments to be stored as metadata. .. versionchanged:: 1.0.0 Deprecated `error` parameter. Raise a :exc:`marshmallow.ValidationError` instead. .. versionchanged:: 2.0.0 Added `allow_none` parameter, which makes validation/deserialization of `None` consistent across fields. .. versionchanged:: 2.0.0 Added `load_only` and `dump_only` parameters, which allow field skipping during the (de)serialization process. .. versionchanged:: 2.0.0 Added `missing` parameter, which indicates the value for a field if the field is not found during deserialization. .. versionchanged:: 2.0.0 ``default`` value is only used if explicitly set. Otherwise, missing values inputs are excluded from serialized output. """ # Some fields, such as Method fields and Function fields, are not expected # to exists as attributes on the objects to serialize. Set this to False # for those fields _CHECK_ATTRIBUTE = True _creation_index = 0 # Used for sorting def __init__(self, default=missing_, attribute=None, load_from=None, error=None, validate=None, required=False, allow_none=False, load_only=False, dump_only=False, missing=missing_, **metadata): self.default = default self.attribute = attribute self.load_from = load_from # this flag is used by Unmarshaller if error: warnings.warn('The error parameter is deprecated. Raise a ' 'marshmallow.ValidationError in your validators ' 'instead.', category=DeprecationWarning) self.error = error self.validate = validate if utils.is_iterable_but_not_string(validate): if not utils.is_generator(validate): self.validators = validate else: self.validators = list(validate) elif callable(validate): self.validators = [validate] elif validate is None: self.validators = [] else: raise ValueError("The 'validate' parameter must be a callable " "or a collection of callables.") self.required = required self.allow_none = allow_none self.load_only = load_only self.dump_only = dump_only self.missing = missing self.metadata = metadata self._creation_index = Field._creation_index Field._creation_index += 1 self.parent = FieldABC.parent def __repr__(self): return ('<fields.{ClassName}(default={self.default!r}, ' 'attribute={self.attribute!r}, error={self.error!r}, ' 'validate={self.validate}, required={self.required}, ' 'load_only={self.load_only}, dump_only={self.dump_only}, ' 'missing={self.missing}, allow_none={self.allow_none})>' .format(ClassName=self.__class__.__name__, self=self)) def get_value(self, attr, obj, accessor=None): """Return the value for a given key from an object.""" # NOTE: Use getattr instead of direct attribute access here so that # subclasses aren't required to define `attribute` member attribute = getattr(self, 'attribute', None) accessor_func = accessor or utils.get_value check_key = attr if attribute is None else attribute return accessor_func(check_key, obj) def _validate(self, value): """Perform validation on ``value``. Raise a :exc:`ValidationError` if validation does not succeed. """ errors = [] for validator in self.validators: func_name = utils.get_callable_name(validator) msg = 'Validator {0}({1}) is False'.format( func_name, value ) try: if validator(value) is False: raise ValidationError(getattr(self, 'error', None) or msg) except ValidationError as err: if isinstance(err.messages, dict): errors.append(err.messages) else: errors.extend(err.messages) if errors: raise ValidationError(errors) def _validate_missing(self, value): """Validate missing values. Raise a :exc:`ValidationError` if `value` should be considered missing. """ if value is missing_: if hasattr(self, 'required') and self.required: default_message = 'Missing data for required field.' message = (default_message if isinstance(self.required, bool) else self.required) raise ValidationError(message) if value is None: if hasattr(self, 'allow_none') and self.allow_none is not True: default_message = 'Field may not be null.' message = (default_message if isinstance(self.allow_none, bool) else self.allow_none) raise ValidationError(message) def serialize(self, attr, obj, accessor=None): """Pulls the value for the given key from the object, applies the field's formatting and returns the result. :param str attr: The attibute or key to get from the object. :param str obj: The object to pull the key from. :param callable accessor: Function used to pull values from ``obj``. :raise ValidationError: In case of formatting problem """ if self._CHECK_ATTRIBUTE: value = self.get_value(attr, obj, accessor=accessor) if value is missing_: if hasattr(self, 'default'): if callable(self.default): return self.default() else: return self.default else: value = None return self._serialize(value, attr, obj) def deserialize(self, value): """Deserialize ``value``. :raise ValidationError: If an invalid value is passed or if a required value is missing. """ # Validate required fields, deserialize, then validate # deserialized value self._validate_missing(value) if getattr(self, 'allow_none', False) is True and value is None: return None output = self._deserialize(value) self._validate(output) return output # Methods for concrete classes to override. def _add_to_schema(self, field_name, schema): """Update field with values from its parent schema. Called by :meth:`__set_field_attrs <marshmallow.Schema.__set_field_attrs>`. :param str field_name: Field name set in schema :param Schema schema: Parent schema """ self.parent = self.parent or schema self.name = self.name or field_name def _serialize(self, value, attr, obj): """Serializes ``value`` to a basic Python datatype. Noop by default. Concrete :class:`Field` classes should implement this method. Example: :: class TitleCase(Field): def _serialize(self, value, attr, obj): if not value: return '' return unicode(value).title() :param value: The value to be serialized. :param str attr: The attribute or key on the object to be serialized. :param object obj: The object the value was pulled from. :raise ValidationError: In case of formatting or validation failure. """ return value def _deserialize(self, value): """Deserialize value. Concrete :class:`Field` classes should implement this method. :raise ValidationError: In case of formatting or validation failure. """ return value @property def context(self): """The context dictionary for the parent :class:`Schema`.""" return self.parent.context class Raw(Field): """Field that applies no formatting or validation.""" pass class Nested(Field): """Allows you to nest a :class:`Schema <marshmallow.Schema>` inside a field. Examples: :: user = fields.Nested(UserSchema) user2 = fields.Nested('UserSchema') # Equivalent to above collaborators = fields.Nested(UserSchema, many=True, only='id') parent = fields.Nested('self') :param Schema nested: The Schema class or class name (string) to nest, or ``"self"`` to nest the :class:`Schema` within itself. :param default: Default value to if attribute is missing or None :param tuple exclude: A list or tuple of fields to exclude. :param only: A tuple or string of the field(s) to marshal. If `None`, all fields will be marshalled. If a field name (string) is given, only a single value will be returned as output instead of a dictionary. This parameter takes precedence over ``exclude``. :param bool many: Whether the field is a collection of objects. :param kwargs: The same keyword arguments that :class:`Field` receives. """ def __init__(self, nested, default=missing_, exclude=tuple(), only=None, many=False, **kwargs): self.nested = nested self.only = only self.exclude = exclude self.many = many self.__schema = None # Cached Schema instance self.__updated_fields = False super(Nested, self).__init__(default=default, **kwargs) @property def schema(self): """The nested Schema object. .. versionchanged:: 1.0.0 Renamed from `serializer` to `schema` """ # Ensure that only parameter is a tuple if isinstance(self.only, basestring): only = (self.only, ) else: only = self.only if not self.__schema: if isinstance(self.nested, SchemaABC): self.__schema = self.nested elif isinstance(self.nested, type) and \ issubclass(self.nested, SchemaABC): self.__schema = self.nested(many=self.many, only=only, exclude=self.exclude) elif isinstance(self.nested, basestring): if self.nested == 'self': parent_class = self.parent.__class__ self.__schema = parent_class(many=self.many, only=only, exclude=self.exclude) else: schema_class = class_registry.get_class(self.nested) self.__schema = schema_class(many=self.many, only=only, exclude=self.exclude) else: raise ValueError('Nested fields must be passed a ' 'Schema, not {0}.'.format(self.nested.__class__)) self.__schema.ordered = getattr(self.parent, 'ordered', False) # Inherit context from parent self.__schema.context.update(getattr(self.parent, 'context', {})) return self.__schema def _serialize(self, nested_obj, attr, obj): # Load up the schema first. This allows a RegistryError to be raised # if an invalid schema name was passed schema = self.schema if nested_obj is None: return None if not self.__updated_fields: schema._update_fields(obj=nested_obj, many=self.many) self.__updated_fields = True ret = schema.dump(nested_obj, many=self.many, update_fields=not self.__updated_fields).data if isinstance(self.only, basestring): # self.only is a field name if self.many: return utils.pluck(ret, key=self.only) else: return ret[self.only] return ret def _deserialize(self, value): data, errors = self.schema.load(value) if errors: raise ValidationError(errors) return data class List(Field): """A list field, composed with another `Field` class or instance. Example: :: numbers = fields.List(fields.Float) :param Field cls_or_instance: A field class or instance. :param bool default: Default value for serialization. :param kwargs: The same keyword arguments that :class:`Field` receives. .. versionchanged:: 2.0.0 The ``allow_none`` parameter now applies to deserialization and has the same semantics as the other fields. """ def __init__(self, cls_or_instance, **kwargs): super(List, self).__init__(**kwargs) if isinstance(cls_or_instance, type): if not issubclass(cls_or_instance, FieldABC): raise ValueError('The type of the list elements ' 'must be a subclass of ' 'marshmallow.base.FieldABC') self.container = cls_or_instance() else: if not isinstance(cls_or_instance, FieldABC): raise ValueError('The instances of the list ' 'elements must be of type ' 'marshmallow.base.FieldABC') self.container = cls_or_instance def get_value(self, attr, obj, accessor=None): """Return the value for a given key from an object.""" value = super(List, self).get_value(attr, obj, accessor=accessor) if self.container.attribute: if utils.is_collection(value): return [ self.container.get_value(self.container.attribute, each) for each in value ] return self.container.get_value(self.container.attribute, value) return value def _add_to_schema(self, field_name, schema): super(List, self)._add_to_schema(field_name, schema) self.container._add_to_schema(field_name, schema) def _serialize(self, value, attr, obj): if value is None: return None if utils.is_collection(value): return [self.container._serialize(each, attr, obj) for each in value] return [self.container._serialize(value, attr, obj)] def _deserialize(self, value): if utils.is_indexable_but_not_string(value) and not isinstance(value, dict): # Convert all instances in typed list to container type return [self.container.deserialize(each) for each in value] if value is None: return [] return [self.container.deserialize(value)] class String(Field): """A string field. :param kwargs: The same keyword arguments that :class:`Field` receives. """ def __init__(self, *args, **kwargs): return super(String, self).__init__(*args, **kwargs) def _serialize(self, value, attr, obj): if value is None: return None return utils.ensure_text_type(value) def _deserialize(self, value): result = utils.ensure_text_type(value) return result class UUID(String): """A UUID field.""" def _deserialize(self, value): msg = 'Could not deserialize {0!r} to a UUID object.'.format(value) err = ValidationError(getattr(self, 'error', None) or msg) try: return uuid.UUID(value) except (ValueError, AttributeError): raise err class Number(Field): """Base class for number fields. :param bool as_string: If True, format the serialized value as a string. :param kwargs: The same keyword arguments that :class:`Field` receives. """ num_type = float def __init__(self, as_string=False, **kwargs): self.as_string = as_string super(Number, self).__init__(**kwargs) def _format_num(self, value): """Return the number value for value, given this field's `num_type`.""" if value is None: return None return self.num_type(value) def _validated(self, value): """Format the value or raise a :exc:`ValidationError` if an error occurs.""" try: return self._format_num(value) except (TypeError, ValueError) as err: raise ValidationError(getattr(self, 'error', None) or text_type(err)) def serialize(self, attr, obj, accessor=None): """Pulls the value for the given key from the object and returns the serialized number representation. Return a string if `self.as_string=True`, othewise return this field's `num_type`. Receives the same `args` and `kwargs` as `Field`. """ ret = Field.serialize(self, attr, obj, accessor=accessor) return str(ret) if (self.as_string and ret is not None) else ret def _serialize(self, value, attr, obj): return self._validated(value) def _deserialize(self, value): return self._validated(value) class Integer(Number): """An integer field. :param kwargs: The same keyword arguments that :class:`Number` receives. """ num_type = int class Decimal(Number): """A field that (de)serializes to the Python ``decimal.Decimal`` type. It's safe to use when dealing with money values, percentages, ratios or other numbers where precision is critical. .. warning:: This field serializes to a `decimal.Decimal` object by default. If you need to render your data as JSON, keep in mind that the `json` module from the standard library does not encode `decimal.Decimal`. Therefore, you must use a JSON library that can handle decimals, such as `simplejson`, or serialize to a string by passing ``as_string=True``. :param int places: How many decimal places to quantize the value. If `None`, does not quantize the value. :param rounding: How to round the value during quantize, for example `decimal.ROUND_UP`. If None, uses the rounding value from the current thread's context. :param bool allow_nan: If `True`, `NaN`, `Infinity` and `-Infinity` are allowed, even though they are illegal according to the JSON specification. :param bool as_string: If True, serialize to a string instead of a Python `decimal.Decimal` type. :param kwargs: The same keyword arguments that :class:`Number` receives. .. versionadded:: 1.2.0 """ num_type = decimal.Decimal def __init__(self, places=None, rounding=None, allow_nan=False, as_string=False, **kwargs): self.places = decimal.Decimal((0, (1,), -places)) if places is not None else None self.rounding = rounding self.allow_nan = allow_nan super(Decimal, self).__init__(as_string=as_string, **kwargs) # override Number def _format_num(self, value): if value is None: return None num = decimal.Decimal(value) if self.allow_nan: if num.is_nan(): return decimal.Decimal('NaN') # avoid sNaN, -sNaN and -NaN else: if num.is_nan() or num.is_infinite(): raise ValidationError( getattr(self, 'error', None) or 'Special numeric values are not permitted.' ) if self.places is not None and num.is_finite(): num = num.quantize(self.places, rounding=self.rounding) return num # override Number def _validated(self, value): try: return super(Decimal, self)._validated(value) except decimal.InvalidOperation: raise ValidationError( getattr(self, 'error', None) or 'Invalid decimal value.' ) class Boolean(Field): """A boolean field. :param kwargs: The same keyword arguments that :class:`Field` receives. """ #: Values that will deserialize to `True`. If an empty set, any non-falsy # value will deserialize to `True`. truthy = set() #: Values that will deserialize to `False`. falsy = set(['False', 'false', '0', 'null', 'None']) def _serialize(self, value, attr, obj): if value is None: return None return bool(value) def _deserialize(self, value): if not value: return False try: value_str = text_type(value) except TypeError as error: msg = getattr(self, 'error', None) or text_type(error) raise ValidationError(msg) if value_str in self.falsy: return False elif self.truthy: if value_str in self.truthy: return True else: default_message = '{0!r} is not in {1} nor {2}'.format( value_str, self.truthy, self.falsy ) msg = getattr(self, 'error', None) or default_message raise ValidationError(msg) return True class FormattedString(Field): """Interpolate other values from the object into this field. The syntax for the source string is the same as the string `str.format` method from the python stdlib. :: class UserSchema(Schema): name = fields.String() greeting = fields.FormattedString('Hello {name}') ser = UserSchema() res = ser.dump(user) res.data # => {'name': 'Monty', 'greeting': 'Hello Monty'} """ def __init__(self, src_str, *args, **kwargs): Field.__init__(self, *args, **kwargs) self.src_str = text_type(src_str) def _serialize(self, value, attr, obj): try: data = utils.to_marshallable_type(obj) return self.src_str.format(**data) except (TypeError, IndexError) as error: raise ValidationError(getattr(self, 'error', None) or error) class Float(Number): """ A double as IEEE-754 double precision string. :param bool as_string: If True, format the value as a string. :param kwargs: The same keyword arguments that :class:`Number` receives. """ num_type = float class Arbitrary(Number): """A floating point number with an arbitrary precision, formatted as as string. ex: 634271127864378216478362784632784678324.23432 :param args: The same positional arguments that :class:`Number` receives. :param kwargs: The same keyword arguments that :class:`Number` receives. .. deprecated:: 1.2.0 Use `Decimal` instead. """ # No as_string param def __init__(self, *args, **kwargs): warnings.warn( 'The Arbitrary field is deprecated. Use the Decimal field instead.', category=DeprecationWarning ) super(Arbitrary, self).__init__(*args, **kwargs) def _validated(self, value): """Format ``value`` or raise ``exception_class`` if an error occurs.""" try: if value is None: return self.default return text_type(utils.float_to_decimal(float(value))) except ValueError as ve: raise ValidationError(text_type(ve)) def _serialize(self, value, attr, obj): return self._validated(value) def _deserialize(self, value): return self._validated(value) class DateTime(Field): """A formatted datetime string in UTC. Example: ``'2014-12-22T03:12:58.019077+00:00'`` :param str format: Either ``"rfc"`` (for RFC822), ``"iso"`` (for ISO8601), or a date format string. If `None`, defaults to "iso". :param kwargs: The same keyword arguments that :class:`Field` receives. """ DATEFORMAT_SERIALIZATION_FUNCS = { 'iso': utils.isoformat, 'iso8601': utils.isoformat, 'rfc': utils.rfcformat, 'rfc822': utils.rfcformat, } DATEFORMAT_DESERIALIZATION_FUNCS = { 'iso': utils.from_iso, 'iso8601': utils.from_iso, 'rfc': utils.from_rfc, 'rfc822': utils.from_rfc, } DEFAULT_FORMAT = 'iso' localtime = False def __init__(self, format=None, **kwargs): super(DateTime, self).__init__(**kwargs) # Allow this to be None. It may be set later in the ``_serialize`` # or ``_desrialize`` methods This allows a Schema to dynamically set the # dateformat, e.g. from a Meta option self.dateformat = format def _add_to_schema(self, field_name, schema): super(DateTime, self)._add_to_schema(field_name, schema) self.dateformat = self.dateformat or schema.opts.dateformat def _serialize(self, value, attr, obj): if value is None: return None self.dateformat = self.dateformat or self.DEFAULT_FORMAT format_func = self.DATEFORMAT_SERIALIZATION_FUNCS.get(self.dateformat, None) if format_func: try: return format_func(value, localtime=self.localtime) except (AttributeError, ValueError) as err: raise ValidationError(getattr(self, 'error', None) or text_type(err)) else: return value.strftime(self.dateformat) def _deserialize(self, value): msg = 'Could not deserialize {0!r} to a datetime object.'.format(value) err = ValidationError(getattr(self, 'error', None) or msg) if not value: # Falsy values, e.g. '', None, [] are not valid raise err self.dateformat = self.dateformat or self.DEFAULT_FORMAT func = self.DATEFORMAT_DESERIALIZATION_FUNCS.get(self.dateformat) if func: try: return func(value) except (TypeError, AttributeError, ValueError): raise err elif utils.dateutil_available: try: return utils.from_datestring(value) except TypeError: raise err else: warnings.warn('It is recommended that you install python-dateutil ' 'for improved datetime deserialization.') raise err class LocalDateTime(DateTime): """A formatted datetime string in localized time, relative to UTC. ex. ``"Sun, 10 Nov 2013 08:23:45 -0600"`` Takes the same arguments as :class:`DateTime <marshmallow.fields.DateTime>`. """ localtime = True class Time(Field): """ISO8601-formatted time string. :param kwargs: The same keyword arguments that :class:`Field` receives. """ def _serialize(self, value, attr, obj): if value is None: return None try: ret = value.isoformat() except AttributeError: msg = '{0!r} cannot be formatted as a time.'.format(value) raise ValidationError(getattr(self, 'error', None) or msg) if value.microsecond: return ret[:12] return ret def _deserialize(self, value): """Deserialize an ISO8601-formatted time to a :class:`datetime.time` object.""" msg = 'Could not deserialize {0!r} to a time object.'.format(value) err = ValidationError(getattr(self, 'error', None) or msg) if not value: # falsy values are invalid raise err try: return utils.from_iso_time(value) except (AttributeError, TypeError, ValueError): raise err class Date(Field): """ISO8601-formatted date string. :param kwargs: The same keyword arguments that :class:`Field` receives. """ def _serialize(self, value, attr, obj): if value is None: return None try: return value.isoformat() except AttributeError: msg = '{0} cannot be formatted as a date.'.format(repr(value)) raise ValidationError(getattr(self, 'error', None) or msg) return value def _deserialize(self, value): """Deserialize an ISO8601-formatted date string to a :class:`datetime.date` object. """ msg = 'Could not deserialize {0!r} to a date object.'.format(value) err = ValidationError(getattr(self, 'error', None) or msg) if not value: # falsy values are invalid raise err try: return utils.from_iso_date(value) except (AttributeError, TypeError, ValueError): raise err class TimeDelta(Field): """A field that (de)serializes a :class:`datetime.timedelta` object to an integer and vice versa. The integer can represent the number of days, seconds or microseconds. :param str precision: Influences how the integer is interpreted during (de)serialization. Must be 'days', 'seconds' or 'microseconds'. :param str error: Error message stored upon validation failure. :param kwargs: The same keyword arguments that :class:`Field` receives. .. versionchanged:: 2.0.0 Always serializes to an integer value to avoid rounding errors. Add `precision` parameter. """ DAYS = 'days' SECONDS = 'seconds' MICROSECONDS = 'microseconds' def __init__(self, precision='seconds', error=None, **kwargs): precision = precision.lower() units = (self.DAYS, self.SECONDS, self.MICROSECONDS) if precision not in units: msg = 'The precision must be "{0}", "{1}" or "{2}".'.format(*units) raise ValueError(msg) self.precision = precision super(TimeDelta, self).__init__(error=error, **kwargs) def _serialize(self, value, attr, obj): if value is None: return None try: days = value.days if self.precision == self.DAYS: return days else: seconds = days * 86400 + value.seconds if self.precision == self.SECONDS: return seconds else: # microseconds return seconds * 10**6 + value.microseconds # flake8: noqa except AttributeError: msg = '{0!r} cannot be formatted as a timedelta.'.format(value) raise ValidationError(getattr(self, 'error', None) or msg) def _deserialize(self, value): try: value = int(value) except (TypeError, ValueError): msg = '{0!r} cannot be interpreted as a valid period of time.'.format(value) raise ValidationError(getattr(self, 'error', None) or msg) kwargs = {self.precision: value} try: return dt.timedelta(**kwargs) except OverflowError: msg = '{0!r} cannot be interpreted as a valid period of time.'.format(value) raise ValidationError(getattr(self, 'error', None) or msg) class Fixed(Number): """A fixed-precision number as a string. :param kwargs: The same keyword arguments that :class:`Number` receives. .. deprecated:: 1.2.0 Use `Decimal` instead. """ def __init__(self, decimals=5, *args, **kwargs): warnings.warn( 'The Fixed field is deprecated. Use the Decimal field instead.', category=DeprecationWarning ) super(Fixed, self).__init__(*args, **kwargs) self.precision = decimal.Decimal('0.' + '0' * (decimals - 1) + '1') def _validated(self, value): if value is None: return None try: dvalue = utils.float_to_decimal(float(value)) except (TypeError, ValueError) as err: raise ValidationError(getattr(self, 'error', None) or text_type(err)) if not dvalue.is_normal() and dvalue != utils.ZERO_DECIMAL: raise ValidationError( getattr(self, 'error', None) or 'Invalid Fixed precision number.' ) return utils.decimal_to_fixed(dvalue, self.precision) class Price(Fixed): """A Price field with fixed precision. :param kwargs: The same keyword arguments that :class:`Fixed` receives. .. deprecated:: 1.2.0 Use `Decimal` instead. """ def __init__(self, decimals=2, **kwargs): warnings.warn( 'The Price field is deprecated. Use the Decimal field for dealing with ' 'money values.', category=DeprecationWarning ) super(Price, self).__init__(decimals=decimals, **kwargs) class ValidatedField(Field): """A field that validates input on serialization.""" def _validated(self, value): raise NotImplementedError('Must implement _validate method') def _serialize(self, value, *args, **kwargs): ret = super(ValidatedField, self)._serialize(value, *args, **kwargs) return self._validated(ret) class Url(ValidatedField, String): """A validated URL field. Validation occurs during both serialization and deserialization. :param default: Default value for the field if the attribute is not set. :param str attribute: The name of the attribute to get the value from. If `None`, assumes the attribute has the same name as the field. :param bool relative: Allow relative URLs. :param kwargs: The same keyword arguments that :class:`String` receives. """ def __init__(self, relative=False, **kwargs): String.__init__(self, **kwargs) self.relative = relative # Insert validation into self.validators so that multiple errors can be # stored. self.validators.insert(0, validate.URL( relative=self.relative, error=getattr(self, 'error') )) def _validated(self, value): if value is None: return None return validate.URL( relative=self.relative, error=getattr(self, 'error') )(value) class Email(ValidatedField, String): """A validated email field. Validation occurs during both serialization and deserialization. :param args: The same positional arguments that :class:`String` receives. :param kwargs: The same keyword arguments that :class:`String` receives. """ def __init__(self, *args, **kwargs): String.__init__(self, *args, **kwargs) # Insert validation into self.validators so that multiple errors can be # stored. self.validators.insert(0, validate.Email(error=getattr(self, 'error'))) def _validated(self, value): if value is None: return None return validate.Email( error=getattr(self, 'error') )(value) class Method(Field): """A field that takes the value returned by a `Schema` method. :param str method_name: The name of the Schema method from which to retrieve the value. The method must take an argument ``obj`` (in addition to self) that is the object to be serialized. :param str deserialize: Optional name of the Schema method for deserializing a value The method must take a single argument ``value``, which is the value to deserialize. .. versionchanged:: 2.0.0 Removed optional ``context`` parameter on methods. Use ``self.context`` instead. """ _CHECK_ATTRIBUTE = False def __init__(self, method_name, deserialize=None, **kwargs): self.method_name = method_name if deserialize: self.deserialize_method_name = deserialize else: self.deserialize_method_name = None super(Method, self).__init__(**kwargs) def _serialize(self, value, attr, obj): method = utils.callable_or_raise(getattr(self.parent, self.method_name, None)) try: return method(obj) except AttributeError: pass return missing_ def _deserialize(self, value): if self.deserialize_method_name: try: method = utils.callable_or_raise( getattr(self.parent, self.deserialize_method_name, None) ) return method(value) except AttributeError: pass return value class Function(Field): """A field that takes the value returned by a function. :param callable func: A callable from which to retrieve the value. The function must take a single argument ``obj`` which is the object to be serialized. It can also optionally take a ``context`` argument, which is a dictionary of context variables passed to the serializer. :param callable deserialize: Deserialization function that takes the value to be deserialized as its only argument. """ _CHECK_ATTRIBUTE = False def __init__(self, func, deserialize=None, **kwargs): super(Function, self).__init__(**kwargs) self.func = utils.callable_or_raise(func) if deserialize: self.deserialize_func = utils.callable_or_raise(deserialize) else: self.deserialize_func = None def _serialize(self, value, attr, obj): try: if len(utils.get_func_args(self.func)) > 1: if self.parent.context is None: msg = 'No context available for Function field {0!r}'.format(attr) raise ValidationError(msg) return self.func(obj, self.parent.context) else: return self.func(obj) except AttributeError: # the object is not expected to have the attribute pass return missing_ def _deserialize(self, value): if self.deserialize_func: return self.deserialize_func(value) return value class Select(Field): """A field that provides a set of values which an attribute must be contrained to. :param choices: A list of valid values. :param kwargs: The same keyword arguments that :class:`Field` receives. :raise: ValidationError if attribute's value is not one of the given choices. """ def __init__(self, choices, **kwargs): warnings.warn( 'The Select field is deprecated. Use the ' 'marshmallow.validate.OneOf validator ' 'instead.', category=DeprecationWarning ) self.choices = choices return super(Select, self).__init__(**kwargs) def _validated(self, value): if value not in self.choices: raise ValidationError( getattr(self, 'error', None) or "{0!r} is not a valid choice for this field.".format(value) ) return value def _serialize(self, value, attr, obj): return self._validated(value) def _deserialize(self, value): return self._validated(value) class QuerySelect(Field): """A field that (de)serializes an ORM-mapped object to its primary (or otherwise unique) key and vice versa. A nonexistent key will result in a validation error. This field is ORM-agnostic. Example: :: query = session.query(User).order_by(User.id).all keygetter = 'id' field = fields.QuerySelect(query, keygetter) :param callable query: The query which will be executed at each (de)serialization to find the list of valid objects and keys. :param keygetter: Can be a callable or a string. In the former case, it must be a one-argument callable which returns a unique comparable key. In the latter case, the string specifies the name of an attribute of the ORM-mapped object. :param str error: Error message stored upon validation failure. :param kwargs: The same keyword arguments that :class:`Field` receives. .. versionadded:: 1.2.0 """ def __init__(self, query, keygetter, **kwargs): self.query = query self.keygetter = keygetter if callable(keygetter) else attrgetter(keygetter) super(QuerySelect, self).__init__(**kwargs) def keys(self): """Return a generator over the valid keys.""" return (self.keygetter(item) for item in self.query()) def results(self): """Return a generator over the query results.""" return (item for item in self.query()) def pairs(self): """Return a generator over the (key, result) pairs.""" return ((self.keygetter(item), item) for item in self.query()) def labels(self, labelgetter=text_type): """Return a generator over the (key, label) pairs, where label is a string associated with each query result. This convenience method is useful to populate, for instance, a form select field. :param labelgetter: Can be a callable or a string. In the former case, it must be a one-argument callable which returns the label text. In the latter case, the string specifies the name of an attribute of the ORM-mapped object. If not provided the ORM-mapped object's `__str__` or `__unicode__` method will be used. """ labelgetter = labelgetter if callable(labelgetter) else attrgetter(labelgetter) return ((self.keygetter(item), labelgetter(item)) for item in self.query()) def _serialize(self, value, attr, obj): value = self.keygetter(value) for key in self.keys(): if key == value: return value error = getattr(self, 'error', None) or 'Invalid object.' raise ValidationError(error) def _deserialize(self, value): for key, result in self.pairs(): if key == value: return result error = getattr(self, 'error', None) or 'Invalid key.' raise ValidationError(error) class QuerySelectList(QuerySelect): """A field that (de)serializes a list of ORM-mapped objects to a list of their primary (or otherwise unique) keys and vice versa. If any of the items in the list cannot be found in the query, this will result in a validation error. This field is ORM-agnostic. :param callable query: Same as :class:`QuerySelect`. :param keygetter: Same as :class:`QuerySelect`. :param str error: Error message stored upon validation failure. :param kwargs: The same keyword arguments that :class:`Field` receives. .. versionadded:: 1.2.0 """ def _serialize(self, value, attr, obj): items = [self.keygetter(v) for v in value] if not items: return [] keys = list(self.keys()) for item in items: try: keys.remove(item) except ValueError: error = getattr(self, 'error', None) or 'Invalid objects.' raise ValidationError(error) return items def _deserialize(self, value): if not value: return [] keys, results = (list(t) for t in zip(*self.pairs())) items = [] for val in value: try: index = keys.index(val) except ValueError: error = getattr(self, 'error', None) or 'Invalid keys.' raise ValidationError(error) else: del keys[index] items.append(results.pop(index)) return items class Constant(Field): """A field that (de)serializes to a preset constant. If you only want the constant added for serialization or deserialization, you should use ``dump_only=True`` or ``load_only=True`` respectively. :param constant: The constant to return for the field attribute. .. versionadded:: 2.0.0 """ _CHECK_ATTRIBUTE = False def __init__(self, constant, **kwargs): super(Constant, self).__init__(**kwargs) self.constant = constant def _serialize(self, value, *args, **kwargs): return self.constant def _deserialize(self, value): return self.constant # Aliases URL = Url Enum = Select Str = String Bool = Boolean Int = Integer
mit
albertomurillo/ansible
lib/ansible/modules/cloud/ovirt/ovirt_vm.py
9
110024
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2017, Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: ovirt_vm short_description: Module to manage Virtual Machines in oVirt/RHV version_added: "2.2" author: - Ondra Machacek (@machacekondra) description: - This module manages whole lifecycle of the Virtual Machine(VM) in oVirt/RHV. - Since VM can hold many states in oVirt/RHV, this see notes to see how the states of the VM are handled. options: name: description: - Name of the Virtual Machine to manage. - If VM don't exists C(name) is required. Otherwise C(id) or C(name) can be used. id: description: - ID of the Virtual Machine to manage. state: description: - Should the Virtual Machine be running/stopped/present/absent/suspended/next_run/registered/exported. When C(state) is I(registered) and the unregistered VM's name belongs to an already registered in engine VM in the same DC then we fail to register the unregistered template. - I(present) state will create/update VM and don't change its state if it already exists. - I(running) state will create/update VM and start it. - I(next_run) state updates the VM and if the VM has next run configuration it will be rebooted. - Please check I(notes) to more detailed description of states. - I(exported) state will export the VM to export domain or as OVA. - I(registered) is supported since 2.4. choices: [ absent, next_run, present, registered, running, stopped, suspended, exported ] default: present cluster: description: - Name of the cluster, where Virtual Machine should be created. - Required if creating VM. allow_partial_import: description: - Boolean indication whether to allow partial registration of Virtual Machine when C(state) is registered. type: bool version_added: "2.4" vnic_profile_mappings: description: - "Mapper which maps an external virtual NIC profile to one that exists in the engine when C(state) is registered. vnic_profile is described by the following dictionary:" suboptions: source_network_name: description: - The network name of the source network. source_profile_name: description: - The profile name related to the source network. target_profile_id: description: - The id of the target profile id to be mapped to in the engine. version_added: "2.5" cluster_mappings: description: - "Mapper which maps cluster name between VM's OVF and the destination cluster this VM should be registered to, relevant when C(state) is registered. Cluster mapping is described by the following dictionary:" suboptions: source_name: description: - The name of the source cluster. dest_name: description: - The name of the destination cluster. version_added: "2.5" role_mappings: description: - "Mapper which maps role name between VM's OVF and the destination role this VM should be registered to, relevant when C(state) is registered. Role mapping is described by the following dictionary:" suboptions: source_name: description: - The name of the source role. dest_name: description: - The name of the destination role. version_added: "2.5" domain_mappings: description: - "Mapper which maps aaa domain name between VM's OVF and the destination aaa domain this VM should be registered to, relevant when C(state) is registered. The aaa domain mapping is described by the following dictionary:" suboptions: source_name: description: - The name of the source aaa domain. dest_name: description: - The name of the destination aaa domain. version_added: "2.5" affinity_group_mappings: description: - "Mapper which maps affinty name between VM's OVF and the destination affinity this VM should be registered to, relevant when C(state) is registered." version_added: "2.5" affinity_label_mappings: description: - "Mappper which maps affinity label name between VM's OVF and the destination label this VM should be registered to, relevant when C(state) is registered." version_added: "2.5" lun_mappings: description: - "Mapper which maps lun between VM's OVF and the destination lun this VM should contain, relevant when C(state) is registered. lun_mappings is described by the following dictionary: - C(logical_unit_id): The logical unit number to identify a logical unit, - C(logical_unit_port): The port being used to connect with the LUN disk. - C(logical_unit_portal): The portal being used to connect with the LUN disk. - C(logical_unit_address): The address of the block storage host. - C(logical_unit_target): The iSCSI specification located on an iSCSI server - C(logical_unit_username): Username to be used to connect to the block storage host. - C(logical_unit_password): Password to be used to connect to the block storage host. - C(storage_type): The storage type which the LUN reside on (iscsi or fcp)" version_added: "2.5" reassign_bad_macs: description: - "Boolean indication whether to reassign bad macs when C(state) is registered." type: bool version_added: "2.5" template: description: - Name of the template, which should be used to create Virtual Machine. - Required if creating VM. - If template is not specified and VM doesn't exist, VM will be created from I(Blank) template. template_version: description: - Version number of the template to be used for VM. - By default the latest available version of the template is used. version_added: "2.3" use_latest_template_version: description: - Specify if latest template version should be used, when running a stateless VM. - If this parameter is set to I(yes) stateless VM is created. type: bool version_added: "2.3" storage_domain: description: - Name of the storage domain where all template disks should be created. - This parameter is considered only when C(template) is provided. - IMPORTANT - This parameter is not idempotent, if the VM exists and you specfiy different storage domain, disk won't move. version_added: "2.4" disk_format: description: - Specify format of the disk. - If C(cow) format is used, disk will by created as sparse, so space will be allocated for the volume as needed, also known as I(thin provision). - If C(raw) format is used, disk storage will be allocated right away, also known as I(preallocated). - Note that this option isn't idempotent as it's not currently possible to change format of the disk via API. - This parameter is considered only when C(template) and C(storage domain) is provided. choices: [ cow, raw ] default: cow version_added: "2.4" memory: description: - Amount of memory of the Virtual Machine. Prefix uses IEC 60027-2 standard (for example 1GiB, 1024MiB). - Default value is set by engine. memory_guaranteed: description: - Amount of minimal guaranteed memory of the Virtual Machine. Prefix uses IEC 60027-2 standard (for example 1GiB, 1024MiB). - C(memory_guaranteed) parameter can't be lower than C(memory) parameter. - Default value is set by engine. memory_max: description: - Upper bound of virtual machine memory up to which memory hot-plug can be performed. Prefix uses IEC 60027-2 standard (for example 1GiB, 1024MiB). - Default value is set by engine. version_added: "2.5" cpu_shares: description: - Set a CPU shares for this Virtual Machine. - Default value is set by oVirt/RHV engine. cpu_cores: description: - Number of virtual CPUs cores of the Virtual Machine. - Default value is set by oVirt/RHV engine. cpu_sockets: description: - Number of virtual CPUs sockets of the Virtual Machine. - Default value is set by oVirt/RHV engine. cpu_threads: description: - Number of virtual CPUs sockets of the Virtual Machine. - Default value is set by oVirt/RHV engine. version_added: "2.5" type: description: - Type of the Virtual Machine. - Default value is set by oVirt/RHV engine. - I(high_performance) is supported since Ansible 2.5 and oVirt/RHV 4.2. choices: [ desktop, server, high_performance ] quota_id: description: - "Virtual Machine quota ID to be used for disk. By default quota is chosen by oVirt/RHV engine." version_added: "2.5" operating_system: description: - Operating system of the Virtual Machine. - Default value is set by oVirt/RHV engine. - "Possible values: debian_7, freebsd, freebsdx64, other, other_linux, other_linux_ppc64, other_ppc64, rhel_3, rhel_4, rhel_4x64, rhel_5, rhel_5x64, rhel_6, rhel_6x64, rhel_6_ppc64, rhel_7x64, rhel_7_ppc64, sles_11, sles_11_ppc64, ubuntu_12_04, ubuntu_12_10, ubuntu_13_04, ubuntu_13_10, ubuntu_14_04, ubuntu_14_04_ppc64, windows_10, windows_10x64, windows_2003, windows_2003x64, windows_2008, windows_2008x64, windows_2008r2x64, windows_2008R2x64, windows_2012x64, windows_2012R2x64, windows_7, windows_7x64, windows_8, windows_8x64, windows_xp" boot_devices: description: - List of boot devices which should be used to boot. For example C([ cdrom, hd ]). - Default value is set by oVirt/RHV engine. choices: [ cdrom, hd, network ] boot_menu: description: - "I(True) enable menu to select boot device, I(False) to disable it. By default is chosen by oVirt/RHV engine." type: bool version_added: "2.5" usb_support: description: - "I(True) enable USB support, I(False) to disable it. By default is chosen by oVirt/RHV engine." type: bool version_added: "2.5" serial_console: description: - "I(True) enable VirtIO serial console, I(False) to disable it. By default is chosen by oVirt/RHV engine." type: bool version_added: "2.5" sso: description: - "I(True) enable Single Sign On by Guest Agent, I(False) to disable it. By default is chosen by oVirt/RHV engine." type: bool version_added: "2.5" host: description: - Specify host where Virtual Machine should be running. By default the host is chosen by engine scheduler. - This parameter is used only when C(state) is I(running) or I(present). high_availability: description: - If I(yes) Virtual Machine will be set as highly available. - If I(no) Virtual Machine won't be set as highly available. - If no value is passed, default value is set by oVirt/RHV engine. type: bool high_availability_priority: description: - Indicates the priority of the virtual machine inside the run and migration queues. Virtual machines with higher priorities will be started and migrated before virtual machines with lower priorities. The value is an integer between 0 and 100. The higher the value, the higher the priority. - If no value is passed, default value is set by oVirt/RHV engine. version_added: "2.5" lease: description: - Name of the storage domain this virtual machine lease reside on. - NOTE - Supported since oVirt 4.1. version_added: "2.4" custom_compatibility_version: description: - "Enables a virtual machine to be customized to its own compatibility version. If 'C(custom_compatibility_version)' is set, it overrides the cluster's compatibility version for this particular virtual machine." version_added: "2.7" host_devices: description: - Single Root I/O Virtualization - technology that allows single device to expose multiple endpoints that can be passed to VMs - host_devices is an list which contain dictinary with name and state of device version_added: "2.7" delete_protected: description: - If I(yes) Virtual Machine will be set as delete protected. - If I(no) Virtual Machine won't be set as delete protected. - If no value is passed, default value is set by oVirt/RHV engine. type: bool stateless: description: - If I(yes) Virtual Machine will be set as stateless. - If I(no) Virtual Machine will be unset as stateless. - If no value is passed, default value is set by oVirt/RHV engine. type: bool clone: description: - If I(yes) then the disks of the created virtual machine will be cloned and independent of the template. - This parameter is used only when C(state) is I(running) or I(present) and VM didn't exist before. type: bool default: 'no' clone_permissions: description: - If I(yes) then the permissions of the template (only the direct ones, not the inherited ones) will be copied to the created virtual machine. - This parameter is used only when C(state) is I(running) or I(present) and VM didn't exist before. type: bool default: 'no' cd_iso: description: - ISO file from ISO storage domain which should be attached to Virtual Machine. - If you pass empty string the CD will be ejected from VM. - If used with C(state) I(running) or I(present) and VM is running the CD will be attached to VM. - If used with C(state) I(running) or I(present) and VM is down the CD will be attached to VM persistently. force: description: - Please check to I(Synopsis) to more detailed description of force parameter, it can behave differently in different situations. type: bool default: 'no' nics: description: - List of NICs, which should be attached to Virtual Machine. NIC is described by following dictionary. suboptions: name: description: - Name of the NIC. profile_name: description: - Profile name where NIC should be attached. interface: description: - Type of the network interface. choices: ['virtio', 'e1000', 'rtl8139'] default: 'virtio' mac_address: description: - Custom MAC address of the network interface, by default it's obtained from MAC pool. - "NOTE - This parameter is used only when C(state) is I(running) or I(present) and is able to only create NICs. To manage NICs of the VM in more depth please use M(ovirt_nics) module instead." disks: description: - List of disks, which should be attached to Virtual Machine. Disk is described by following dictionary. suboptions: name: description: - Name of the disk. Either C(name) or C(id) is required. id: description: - ID of the disk. Either C(name) or C(id) is required. interface: description: - Interface of the disk. choices: ['virtio', 'IDE'] default: 'virtio' bootable: description: - I(True) if the disk should be bootable, default is non bootable. type: bool activate: description: - I(True) if the disk should be activated, default is activated. - "NOTE - This parameter is used only when C(state) is I(running) or I(present) and is able to only attach disks. To manage disks of the VM in more depth please use M(ovirt_disks) module instead." type: bool sysprep: description: - Dictionary with values for Windows Virtual Machine initialization using sysprep. suboptions: host_name: description: - Hostname to be set to Virtual Machine when deployed. active_directory_ou: description: - Active Directory Organizational Unit, to be used for login of user. org_name: description: - Organization name to be set to Windows Virtual Machine. domain: description: - Domain to be set to Windows Virtual Machine. timezone: description: - Timezone to be set to Windows Virtual Machine. ui_language: description: - UI language of the Windows Virtual Machine. system_locale: description: - System localization of the Windows Virtual Machine. input_locale: description: - Input localization of the Windows Virtual Machine. windows_license_key: description: - License key to be set to Windows Virtual Machine. user_name: description: - Username to be used for set password to Windows Virtual Machine. root_password: description: - Password to be set for username to Windows Virtual Machine. cloud_init: description: - Dictionary with values for Unix-like Virtual Machine initialization using cloud init. suboptions: host_name: description: - Hostname to be set to Virtual Machine when deployed. timezone: description: - Timezone to be set to Virtual Machine when deployed. user_name: description: - Username to be used to set password to Virtual Machine when deployed. root_password: description: - Password to be set for user specified by C(user_name) parameter. authorized_ssh_keys: description: - Use this SSH keys to login to Virtual Machine. regenerate_ssh_keys: description: - If I(True) SSH keys will be regenerated on Virtual Machine. type: bool custom_script: description: - Cloud-init script which will be executed on Virtual Machine when deployed. - This is appended to the end of the cloud-init script generated by any other options. dns_servers: description: - DNS servers to be configured on Virtual Machine. dns_search: description: - DNS search domains to be configured on Virtual Machine. nic_boot_protocol: description: - Set boot protocol of the network interface of Virtual Machine. choices: ['none', 'dhcp', 'static'] nic_ip_address: description: - If boot protocol is static, set this IP address to network interface of Virtual Machine. nic_netmask: description: - If boot protocol is static, set this netmask to network interface of Virtual Machine. nic_gateway: description: - If boot protocol is static, set this gateway to network interface of Virtual Machine. nic_name: description: - Set name to network interface of Virtual Machine. nic_on_boot: description: - If I(True) network interface will be set to start on boot. type: bool cloud_init_nics: description: - List of dictionaries representing network interfaces to be setup by cloud init. - This option is used, when user needs to setup more network interfaces via cloud init. - If one network interface is enough, user should use C(cloud_init) I(nic_*) parameters. C(cloud_init) I(nic_*) parameters are merged with C(cloud_init_nics) parameters. suboptions: nic_boot_protocol: description: - Set boot protocol of the network interface of Virtual Machine. Can be one of C(none), C(dhcp) or C(static). nic_ip_address: description: - If boot protocol is static, set this IP address to network interface of Virtual Machine. nic_netmask: description: - If boot protocol is static, set this netmask to network interface of Virtual Machine. nic_gateway: description: - If boot protocol is static, set this gateway to network interface of Virtual Machine. nic_name: description: - Set name to network interface of Virtual Machine. nic_on_boot: description: - If I(True) network interface will be set to start on boot. type: bool version_added: "2.3" cloud_init_persist: description: - "If I(yes) the C(cloud_init) or C(sysprep) parameters will be saved for the virtual machine and the virtual machine won't be started as run-once." type: bool version_added: "2.5" aliases: [ 'sysprep_persist' ] default: 'no' kernel_params_persist: description: - "If I(true) C(kernel_params), C(initrd_path) and C(kernel_path) will persist in virtual machine configuration, if I(False) it will be used for run once." type: bool version_added: "2.8" kernel_path: description: - Path to a kernel image used to boot the virtual machine. - Kernel image must be stored on either the ISO domain or on the host's storage. version_added: "2.3" initrd_path: description: - Path to an initial ramdisk to be used with the kernel specified by C(kernel_path) option. - Ramdisk image must be stored on either the ISO domain or on the host's storage. version_added: "2.3" kernel_params: description: - Kernel command line parameters (formatted as string) to be used with the kernel specified by C(kernel_path) option. version_added: "2.3" instance_type: description: - Name of virtual machine's hardware configuration. - By default no instance type is used. version_added: "2.3" description: description: - Description of the Virtual Machine. version_added: "2.3" comment: description: - Comment of the Virtual Machine. version_added: "2.3" timezone: description: - Sets time zone offset of the guest hardware clock. - For example C(Etc/GMT) version_added: "2.3" serial_policy: description: - Specify a serial number policy for the Virtual Machine. - Following options are supported. - C(vm) - Sets the Virtual Machine's UUID as its serial number. - C(host) - Sets the host's UUID as the Virtual Machine's serial number. - C(custom) - Allows you to specify a custom serial number in C(serial_policy_value). choices: ['vm', 'host', 'custom'] version_added: "2.3" serial_policy_value: description: - Allows you to specify a custom serial number. - This parameter is used only when C(serial_policy) is I(custom). version_added: "2.3" vmware: description: - Dictionary of values to be used to connect to VMware and import a virtual machine to oVirt. suboptions: username: description: - The username to authenticate against the VMware. password: description: - The password to authenticate against the VMware. url: description: - The URL to be passed to the I(virt-v2v) tool for conversion. - For example I(vpx://wmware_user@vcenter-host/DataCenter/Cluster/esxi-host?no_verify=1) drivers_iso: description: - The name of the ISO containing drivers that can be used during the I(virt-v2v) conversion process. sparse: description: - Specifies the disk allocation policy of the resulting virtual machine. I(true) for sparse, I(false) for preallocated. type: bool default: true storage_domain: description: - Specifies the target storage domain for converted disks. This is required parameter. version_added: "2.3" xen: description: - Dictionary of values to be used to connect to XEN and import a virtual machine to oVirt. suboptions: url: description: - The URL to be passed to the I(virt-v2v) tool for conversion. - For example I(xen+ssh://root@zen.server). This is required parameter. drivers_iso: description: - The name of the ISO containing drivers that can be used during the I(virt-v2v) conversion process. sparse: description: - Specifies the disk allocation policy of the resulting virtual machine. I(true) for sparse, I(false) for preallocated. type: bool default: true storage_domain: description: - Specifies the target storage domain for converted disks. This is required parameter. version_added: "2.3" kvm: description: - Dictionary of values to be used to connect to kvm and import a virtual machine to oVirt. suboptions: name: description: - The name of the KVM virtual machine. username: description: - The username to authenticate against the KVM. password: description: - The password to authenticate against the KVM. url: description: - The URL to be passed to the I(virt-v2v) tool for conversion. - For example I(qemu:///system). This is required parameter. drivers_iso: description: - The name of the ISO containing drivers that can be used during the I(virt-v2v) conversion process. sparse: description: - Specifies the disk allocation policy of the resulting virtual machine. I(true) for sparse, I(false) for preallocated. type: bool default: true storage_domain: description: - Specifies the target storage domain for converted disks. This is required parameter. version_added: "2.3" cpu_mode: description: - "CPU mode of the virtual machine. It can be some of the following: I(host_passthrough), I(host_model) or I(custom)." - "For I(host_passthrough) CPU type you need to set C(placement_policy) to I(pinned)." - "If no value is passed, default value is set by oVirt/RHV engine." version_added: "2.5" placement_policy: description: - "The configuration of the virtual machine's placement policy." - "If no value is passed, default value is set by oVirt/RHV engine." - "Placement policy can be one of the following values:" suboptions: migratable: description: - "Allow manual and automatic migration." pinned: description: - "Do not allow migration." user_migratable: description: - "Allow manual migration only." version_added: "2.5" ticket: description: - "If I(true), in addition return I(remote_vv_file) inside I(vm) dictionary, which contains compatible content for remote-viewer application. Works only C(state) is I(running)." version_added: "2.7" type: bool cpu_pinning: description: - "CPU Pinning topology to map virtual machine CPU to host CPU." - "CPU Pinning topology is a list of dictionary which can have following values:" suboptions: cpu: description: - "Number of the host CPU." vcpu: description: - "Number of the virtual machine CPU." version_added: "2.5" soundcard_enabled: description: - "If I(true), the sound card is added to the virtual machine." type: bool version_added: "2.5" smartcard_enabled: description: - "If I(true), use smart card authentication." type: bool version_added: "2.5" io_threads: description: - "Number of IO threads used by virtual machine. I(0) means IO threading disabled." version_added: "2.5" ballooning_enabled: description: - "If I(true), use memory ballooning." - "Memory balloon is a guest device, which may be used to re-distribute / reclaim the host memory based on VM needs in a dynamic way. In this way it's possible to create memory over commitment states." type: bool version_added: "2.5" numa_tune_mode: description: - "Set how the memory allocation for NUMA nodes of this VM is applied (relevant if NUMA nodes are set for this VM)." - "It can be one of the following: I(interleave), I(preferred) or I(strict)." - "If no value is passed, default value is set by oVirt/RHV engine." choices: ['interleave', 'preferred', 'strict'] version_added: "2.6" numa_nodes: description: - "List of vNUMA Nodes to set for this VM and pin them to assigned host's physical NUMA node." - "Each vNUMA node is described by following dictionary:" suboptions: index: description: - "The index of this NUMA node (mandatory)." memory: description: - "Memory size of the NUMA node in MiB (mandatory)." cores: description: - "list of VM CPU cores indexes to be included in this NUMA node (mandatory)." numa_node_pins: description: - "list of physical NUMA node indexes to pin this virtual NUMA node to." version_added: "2.6" rng_device: description: - "Random number generator (RNG). You can choose of one the following devices I(urandom), I(random) or I(hwrng)." - "In order to select I(hwrng), you must have it enabled on cluster first." - "/dev/urandom is used for cluster version >= 4.1, and /dev/random for cluster version <= 4.0" version_added: "2.5" custom_properties: description: - "Properties sent to VDSM to configure various hooks." - "Custom properties is a list of dictionary which can have following values:" suboptions: name: description: - "Name of the custom property. For example: I(hugepages), I(vhost), I(sap_agent), etc." regexp: description: - "Regular expression to set for custom property." value: description: - "Value to set for custom property." version_added: "2.5" watchdog: description: - "Assign watchdog device for the virtual machine." - "Watchdogs is a dictionary which can have following values:" suboptions: model: description: - "Model of the watchdog device. For example: I(i6300esb), I(diag288) or I(null)." action: description: - "Watchdog action to be performed when watchdog is triggered. For example: I(none), I(reset), I(poweroff), I(pause) or I(dump)." version_added: "2.5" graphical_console: description: - "Assign graphical console to the virtual machine." suboptions: headless_mode: description: - If I(true) disable the graphics console for this virtual machine. type: bool protocol: description: - Graphical protocol, a list of I(spice), I(vnc), or both. version_added: "2.5" exclusive: description: - "When C(state) is I(exported) this parameter indicates if the existing VM with the same name should be overwritten." version_added: "2.8" type: bool export_domain: description: - "When C(state) is I(exported)this parameter specifies the name of the export storage domain." version_added: "2.8" export_ova: description: - Dictionary of values to be used to export VM as OVA. suboptions: host: description: - The name of the destination host where the OVA has to be exported. directory: description: - The name of the directory where the OVA has to be exported. filename: description: - The name of the exported OVA file. version_added: "2.8" force_migrate: description: - "If I(true), the VM will migrate even if it is defined as non-migratable." version_added: "2.8" type: bool migrate: description: - "If I(true), the VM will migrate to any available host." version_added: "2.8" type: bool next_run: description: - "If I(true), the update will not be applied to the VM immediately and will be only applied when virtual machine is restarted." - NOTE - If there are multiple next run configuration changes on the VM, the first change may get reverted if this option is not passed. version_added: "2.8" type: bool snapshot_name: description: - "Snapshot to clone VM from." - "Snapshot with description specified should exist." - "You have to specify C(snapshot_vm) parameter with virtual machine name of this snapshot." version_added: "2.9" snapshot_vm: description: - "Source VM to clone VM from." - "VM should have snapshot specified by C(snapshot)." - "If C(snapshot_name) specified C(snapshot_vm) is required." version_added: "2.9" notes: - If VM is in I(UNASSIGNED) or I(UNKNOWN) state before any operation, the module will fail. If VM is in I(IMAGE_LOCKED) state before any operation, we try to wait for VM to be I(DOWN). If VM is in I(SAVING_STATE) state before any operation, we try to wait for VM to be I(SUSPENDED). If VM is in I(POWERING_DOWN) state before any operation, we try to wait for VM to be I(UP) or I(DOWN). VM can get into I(UP) state from I(POWERING_DOWN) state, when there is no ACPI or guest agent running inside VM, or if the shutdown operation fails. When user specify I(UP) C(state), we always wait to VM to be in I(UP) state in case VM is I(MIGRATING), I(REBOOTING), I(POWERING_UP), I(RESTORING_STATE), I(WAIT_FOR_LAUNCH). In other states we run start operation on VM. When user specify I(stopped) C(state), and If user pass C(force) parameter set to I(true) we forcibly stop the VM in any state. If user don't pass C(force) parameter, we always wait to VM to be in UP state in case VM is I(MIGRATING), I(REBOOTING), I(POWERING_UP), I(RESTORING_STATE), I(WAIT_FOR_LAUNCH). If VM is in I(PAUSED) or I(SUSPENDED) state, we start the VM. Then we gracefully shutdown the VM. When user specify I(suspended) C(state), we always wait to VM to be in UP state in case VM is I(MIGRATING), I(REBOOTING), I(POWERING_UP), I(RESTORING_STATE), I(WAIT_FOR_LAUNCH). If VM is in I(PAUSED) or I(DOWN) state, we start the VM. Then we suspend the VM. When user specify I(absent) C(state), we forcibly stop the VM in any state and remove it. extends_documentation_fragment: ovirt ''' EXAMPLES = ''' # Examples don't contain auth parameter for simplicity, # look at ovirt_auth module to see how to reuse authentication: - name: Creates a new Virtual Machine from template named 'rhel7_template' ovirt_vm: state: present name: myvm template: rhel7_template cluster: mycluster - name: Register VM ovirt_vm: state: registered storage_domain: mystorage cluster: mycluster name: myvm - name: Register VM using id ovirt_vm: state: registered storage_domain: mystorage cluster: mycluster id: 1111-1111-1111-1111 - name: Register VM, allowing partial import ovirt_vm: state: registered storage_domain: mystorage allow_partial_import: "True" cluster: mycluster id: 1111-1111-1111-1111 - name: Register VM with vnic profile mappings and reassign bad macs ovirt_vm: state: registered storage_domain: mystorage cluster: mycluster id: 1111-1111-1111-1111 vnic_profile_mappings: - source_network_name: mynetwork source_profile_name: mynetwork target_profile_id: 3333-3333-3333-3333 - source_network_name: mynetwork2 source_profile_name: mynetwork2 target_profile_id: 4444-4444-4444-4444 reassign_bad_macs: "True" - name: Register VM with mappings ovirt_vm: state: registered storage_domain: mystorage cluster: mycluster id: 1111-1111-1111-1111 role_mappings: - source_name: Role_A dest_name: Role_B domain_mappings: - source_name: Domain_A dest_name: Domain_B lun_mappings: - source_storage_type: iscsi source_logical_unit_id: 1IET_000d0001 source_logical_unit_port: 3260 source_logical_unit_portal: 1 source_logical_unit_address: 10.34.63.203 source_logical_unit_target: iqn.2016-08-09.brq.str-01:omachace dest_storage_type: iscsi dest_logical_unit_id: 1IET_000d0002 dest_logical_unit_port: 3260 dest_logical_unit_portal: 1 dest_logical_unit_address: 10.34.63.204 dest_logical_unit_target: iqn.2016-08-09.brq.str-02:omachace affinity_group_mappings: - source_name: Affinity_A dest_name: Affinity_B affinity_label_mappings: - source_name: Label_A dest_name: Label_B cluster_mappings: - source_name: cluster_A dest_name: cluster_B - name: Creates a stateless VM which will always use latest template version ovirt_vm: name: myvm template: rhel7 cluster: mycluster use_latest_template_version: true # Creates a new server rhel7 Virtual Machine from Blank template # on brq01 cluster with 2GiB memory and 2 vcpu cores/sockets # and attach bootable disk with name rhel7_disk and attach virtio NIC - ovirt_vm: state: present cluster: brq01 name: myvm memory: 2GiB cpu_cores: 2 cpu_sockets: 2 cpu_shares: 1024 type: server operating_system: rhel_7x64 disks: - name: rhel7_disk bootable: True nics: - name: nic1 # Change VM Name - ovirt_vm: id: 00000000-0000-0000-0000-000000000000 name: "new_vm_name" - name: Run VM with cloud init ovirt_vm: name: rhel7 template: rhel7 cluster: Default memory: 1GiB high_availability: true high_availability_priority: 50 # Available from Ansible 2.5 cloud_init: nic_boot_protocol: static nic_ip_address: 10.34.60.86 nic_netmask: 255.255.252.0 nic_gateway: 10.34.63.254 nic_name: eth1 nic_on_boot: true host_name: example.com custom_script: | write_files: - content: | Hello, world! path: /tmp/greeting.txt permissions: '0644' user_name: root root_password: super_password - name: Run VM with cloud init, with multiple network interfaces ovirt_vm: name: rhel7_4 template: rhel7 cluster: mycluster cloud_init_nics: - nic_name: eth0 nic_boot_protocol: dhcp nic_on_boot: true - nic_name: eth1 nic_boot_protocol: static nic_ip_address: 10.34.60.86 nic_netmask: 255.255.252.0 nic_gateway: 10.34.63.254 nic_on_boot: true - name: Run VM with sysprep ovirt_vm: name: windows2012R2_AD template: windows2012R2 cluster: Default memory: 3GiB high_availability: true sysprep: host_name: windowsad.example.com user_name: Administrator root_password: SuperPassword123 - name: Migrate/Run VM to/on host named 'host1' ovirt_vm: state: running name: myvm host: host1 - name: Migrate VM to any available host ovirt_vm: state: running name: myvm migrate: true - name: Change VMs CD ovirt_vm: name: myvm cd_iso: drivers.iso - name: Eject VMs CD ovirt_vm: name: myvm cd_iso: '' - name: Boot VM from CD ovirt_vm: name: myvm cd_iso: centos7_x64.iso boot_devices: - cdrom - name: Stop vm ovirt_vm: state: stopped name: myvm - name: Upgrade memory to already created VM ovirt_vm: name: myvm memory: 4GiB - name: Hot plug memory to already created and running VM (VM won't be restarted) ovirt_vm: name: myvm memory: 4GiB # Create/update a VM to run with two vNUMA nodes and pin them to physical NUMA nodes as follows: # vnuma index 0-> numa index 0, vnuma index 1-> numa index 1 - name: Create a VM to run with two vNUMA nodes ovirt_vm: name: myvm cluster: mycluster numa_tune_mode: "interleave" numa_nodes: - index: 0 cores: [0] memory: 20 numa_node_pins: [0] - index: 1 cores: [1] memory: 30 numa_node_pins: [1] - name: Update an existing VM to run without previously created vNUMA nodes (i.e. remove all vNUMA nodes+NUMA pinning setting) ovirt_vm: name: myvm cluster: mycluster state: "present" numa_tune_mode: "interleave" numa_nodes: - index: -1 # When change on the VM needs restart of the VM, use next_run state, # The VM will be updated and rebooted if there are any changes. # If present state would be used, VM won't be restarted. - ovirt_vm: state: next_run name: myvm boot_devices: - network - name: Import virtual machine from VMware ovirt_vm: state: stopped cluster: mycluster name: vmware_win10 timeout: 1800 poll_interval: 30 vmware: url: vpx://user@1.2.3.4/Folder1/Cluster1/2.3.4.5?no_verify=1 name: windows10 storage_domain: mynfs username: user password: password - name: Create vm from template and create all disks on specific storage domain ovirt_vm: name: vm_test cluster: mycluster template: mytemplate storage_domain: mynfs nics: - name: nic1 - name: Remove VM, if VM is running it will be stopped ovirt_vm: state: absent name: myvm # Defining a specific quota for a VM: # Since Ansible 2.5 - ovirt_quotas_facts: data_center: Default name: myquota - ovirt_vm: name: myvm sso: False boot_menu: True usb_support: True serial_console: True quota_id: "{{ ovirt_quotas[0]['id'] }}" - name: Create a VM that has the console configured for both Spice and VNC ovirt_vm: name: myvm template: mytemplate cluster: mycluster graphical_console: protocol: - spice - vnc # Execute remote viever to VM - block: - name: Create a ticket for console for a running VM ovirt_vms: name: myvm ticket: true state: running register: myvm - name: Save ticket to file copy: content: "{{ myvm.vm.remote_vv_file }}" dest: ~/vvfile.vv - name: Run remote viewer with file command: remote-viewer ~/vvfile.vv # Default value of host_device state is present - name: Attach host devices to virtual machine ovirt_vm: name: myvm host: myhost placement_policy: pinned host_devices: - name: pci_0000_00_06_0 - name: pci_0000_00_07_0 state: absent - name: pci_0000_00_08_0 state: present - name: Export the VM as OVA ovirt_vm: name: myvm state: exported cluster: mycluster export_ova: host: myhost filename: myvm.ova directory: /tmp/ - name: Clone VM from snapshot ovirt_vm: snapshot_vm: myvm snapshot_name: myvm_snap name: myvm_clone state: present ''' RETURN = ''' id: description: ID of the VM which is managed returned: On success if VM is found. type: str sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c vm: description: "Dictionary of all the VM attributes. VM attributes can be found on your oVirt/RHV instance at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/vm. Additionally when user sent ticket=true, this module will return also remote_vv_file parameter in vm dictionary, which contains remote-viewer compatible file to open virtual machine console. Please note that this file contains sensible information." returned: On success if VM is found. type: dict ''' import traceback try: import ovirtsdk4.types as otypes except ImportError: pass from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ovirt import ( BaseModule, check_params, check_sdk, convert_to_bytes, create_connection, equal, get_dict_of_struct, get_entity, get_link_name, get_id_by_name, ovirt_full_argument_spec, search_by_attributes, search_by_name, wait, ) class VmsModule(BaseModule): def __init__(self, *args, **kwargs): super(VmsModule, self).__init__(*args, **kwargs) self._initialization = None self._is_new = False def __get_template_with_version(self): """ oVirt/RHV in version 4.1 doesn't support search by template+version_number, so we need to list all templates with specific name and then iterate through it's version until we find the version we look for. """ template = None templates_service = self._connection.system_service().templates_service() if self.param('template'): templates = templates_service.list( search='name=%s and cluster=%s' % (self.param('template'), self.param('cluster')) ) if self.param('template_version'): templates = [ t for t in templates if t.version.version_number == self.param('template_version') ] if not templates: raise ValueError( "Template with name '%s' and version '%s' in cluster '%s' was not found'" % ( self.param('template'), self.param('template_version'), self.param('cluster') ) ) template = sorted(templates, key=lambda t: t.version.version_number, reverse=True)[0] elif self._is_new: # If template isn't specified and VM is about to be created specify default template: template = templates_service.template_service('00000000-0000-0000-0000-000000000000').get() return template def __get_storage_domain_and_all_template_disks(self, template): if self.param('template') is None: return None if self.param('storage_domain') is None: return None disks = list() for att in self._connection.follow_link(template.disk_attachments): disks.append( otypes.DiskAttachment( disk=otypes.Disk( id=att.disk.id, format=otypes.DiskFormat(self.param('disk_format')), storage_domains=[ otypes.StorageDomain( id=get_id_by_name( self._connection.system_service().storage_domains_service(), self.param('storage_domain') ) ) ] ) ) ) return disks def __get_snapshot(self): if self.param('snapshot_vm') is None: return None if self.param('snapshot_name') is None: return None vms_service = self._connection.system_service().vms_service() vm_id = get_id_by_name(vms_service, self.param('snapshot_vm')) vm_service = vms_service.vm_service(vm_id) snaps_service = vm_service.snapshots_service() snaps = snaps_service.list() snap = next( (s for s in snaps if s.description == self.param('snapshot_name')), None ) return snap def __get_cluster(self): if self.param('cluster') is not None: return self.param('cluster') elif self.param('snapshot_name') is not None and self.param('snapshot_vm') is not None: vms_service = self._connection.system_service().vms_service() vm = search_by_name(vms_service, self.param('snapshot_vm')) return self._connection.system_service().clusters_service().cluster_service(vm.cluster.id).get().name def build_entity(self): template = self.__get_template_with_version() cluster = self.__get_cluster() snapshot = self.__get_snapshot() disk_attachments = self.__get_storage_domain_and_all_template_disks(template) return otypes.Vm( id=self.param('id'), name=self.param('name'), cluster=otypes.Cluster( name=cluster ) if cluster else None, disk_attachments=disk_attachments, template=otypes.Template( id=template.id, ) if template else None, use_latest_template_version=self.param('use_latest_template_version'), stateless=self.param('stateless') or self.param('use_latest_template_version'), delete_protected=self.param('delete_protected'), bios=( otypes.Bios(boot_menu=otypes.BootMenu(enabled=self.param('boot_menu'))) ) if self.param('boot_menu') is not None else None, console=( otypes.Console(enabled=self.param('serial_console')) ) if self.param('serial_console') is not None else None, usb=( otypes.Usb(enabled=self.param('usb_support')) ) if self.param('usb_support') is not None else None, sso=( otypes.Sso( methods=[otypes.Method(id=otypes.SsoMethod.GUEST_AGENT)] if self.param('sso') else [] ) ) if self.param('sso') is not None else None, quota=otypes.Quota(id=self._module.params.get('quota_id')) if self.param('quota_id') is not None else None, high_availability=otypes.HighAvailability( enabled=self.param('high_availability'), priority=self.param('high_availability_priority'), ) if self.param('high_availability') is not None or self.param('high_availability_priority') else None, lease=otypes.StorageDomainLease( storage_domain=otypes.StorageDomain( id=get_id_by_name( service=self._connection.system_service().storage_domains_service(), name=self.param('lease') ) ) ) if self.param('lease') is not None else None, cpu=otypes.Cpu( topology=otypes.CpuTopology( cores=self.param('cpu_cores'), sockets=self.param('cpu_sockets'), threads=self.param('cpu_threads'), ) if any(( self.param('cpu_cores'), self.param('cpu_sockets'), self.param('cpu_threads') )) else None, cpu_tune=otypes.CpuTune( vcpu_pins=[ otypes.VcpuPin(vcpu=int(pin['vcpu']), cpu_set=str(pin['cpu'])) for pin in self.param('cpu_pinning') ], ) if self.param('cpu_pinning') else None, mode=otypes.CpuMode(self.param('cpu_mode')) if self.param('cpu_mode') else None, ) if any(( self.param('cpu_cores'), self.param('cpu_sockets'), self.param('cpu_threads'), self.param('cpu_mode'), self.param('cpu_pinning') )) else None, cpu_shares=self.param('cpu_shares'), os=otypes.OperatingSystem( type=self.param('operating_system'), boot=otypes.Boot( devices=[ otypes.BootDevice(dev) for dev in self.param('boot_devices') ], ) if self.param('boot_devices') else None, cmdline=self.param('kernel_params') if self.param('kernel_params_persist') else None, initrd=self.param('initrd_path') if self.param('kernel_params_persist') else None, kernel=self.param('kernel_path') if self.param('kernel_params_persist') else None, ) if ( self.param('operating_system') or self.param('boot_devices') or self.param('kernel_params_persist') ) else None, type=otypes.VmType( self.param('type') ) if self.param('type') else None, memory=convert_to_bytes( self.param('memory') ) if self.param('memory') else None, memory_policy=otypes.MemoryPolicy( guaranteed=convert_to_bytes(self.param('memory_guaranteed')), ballooning=self.param('ballooning_enabled'), max=convert_to_bytes(self.param('memory_max')), ) if any(( self.param('memory_guaranteed'), self.param('ballooning_enabled') is not None, self.param('memory_max') )) else None, instance_type=otypes.InstanceType( id=get_id_by_name( self._connection.system_service().instance_types_service(), self.param('instance_type'), ), ) if self.param('instance_type') else None, custom_compatibility_version=otypes.Version( major=self._get_major(self.param('custom_compatibility_version')), minor=self._get_minor(self.param('custom_compatibility_version')), ) if self.param('custom_compatibility_version') is not None else None, description=self.param('description'), comment=self.param('comment'), time_zone=otypes.TimeZone( name=self.param('timezone'), ) if self.param('timezone') else None, serial_number=otypes.SerialNumber( policy=otypes.SerialNumberPolicy(self.param('serial_policy')), value=self.param('serial_policy_value'), ) if ( self.param('serial_policy') is not None or self.param('serial_policy_value') is not None ) else None, placement_policy=otypes.VmPlacementPolicy( affinity=otypes.VmAffinity(self.param('placement_policy')), hosts=[ otypes.Host(name=self.param('host')), ] if self.param('host') else None, ) if self.param('placement_policy') else None, soundcard_enabled=self.param('soundcard_enabled'), display=otypes.Display( smartcard_enabled=self.param('smartcard_enabled') ) if self.param('smartcard_enabled') is not None else None, io=otypes.Io( threads=self.param('io_threads'), ) if self.param('io_threads') is not None else None, numa_tune_mode=otypes.NumaTuneMode( self.param('numa_tune_mode') ) if self.param('numa_tune_mode') else None, rng_device=otypes.RngDevice( source=otypes.RngSource(self.param('rng_device')), ) if self.param('rng_device') else None, custom_properties=[ otypes.CustomProperty( name=cp.get('name'), regexp=cp.get('regexp'), value=str(cp.get('value')), ) for cp in self.param('custom_properties') if cp ] if self.param('custom_properties') is not None else None, initialization=self.get_initialization() if self.param('cloud_init_persist') else None, snapshots=[otypes.Snapshot(id=snapshot.id)] if snapshot is not None else None, ) def _get_export_domain_service(self): provider_name = self._module.params['export_domain'] export_sds_service = self._connection.system_service().storage_domains_service() export_sd_id = get_id_by_name(export_sds_service, provider_name) return export_sds_service.service(export_sd_id) def post_export_action(self, entity): self._service = self._get_export_domain_service().vms_service() def update_check(self, entity): res = self._update_check(entity) if entity.next_run_configuration_exists: res = res and self._update_check(self._service.service(entity.id).get(next_run=True)) return res def _update_check(self, entity): def check_cpu_pinning(): if self.param('cpu_pinning'): current = [] if entity.cpu.cpu_tune: current = [(str(pin.cpu_set), int(pin.vcpu)) for pin in entity.cpu.cpu_tune.vcpu_pins] passed = [(str(pin['cpu']), int(pin['vcpu'])) for pin in self.param('cpu_pinning')] return sorted(current) == sorted(passed) return True def check_custom_properties(): if self.param('custom_properties'): current = [] if entity.custom_properties: current = [(cp.name, cp.regexp, str(cp.value)) for cp in entity.custom_properties] passed = [(cp.get('name'), cp.get('regexp'), str(cp.get('value'))) for cp in self.param('custom_properties') if cp] return sorted(current) == sorted(passed) return True def check_host(): if self.param('host') is not None: return self.param('host') in [self._connection.follow_link(host).name for host in getattr(entity.placement_policy, 'hosts', None) or []] return True def check_custom_compatibility_version(): if self.param('custom_compatibility_version') is not None: return (self._get_minor(self.param('custom_compatibility_version')) == self._get_minor(entity.custom_compatibility_version) and self._get_major(self.param('custom_compatibility_version')) == self._get_major(entity.custom_compatibility_version)) return True cpu_mode = getattr(entity.cpu, 'mode') vm_display = entity.display return ( check_cpu_pinning() and check_custom_properties() and check_host() and check_custom_compatibility_version() and not self.param('cloud_init_persist') and not self.param('kernel_params_persist') and equal(self.param('cluster'), get_link_name(self._connection, entity.cluster)) and equal(convert_to_bytes(self.param('memory')), entity.memory) and equal(convert_to_bytes(self.param('memory_guaranteed')), entity.memory_policy.guaranteed) and equal(convert_to_bytes(self.param('memory_max')), entity.memory_policy.max) and equal(self.param('cpu_cores'), entity.cpu.topology.cores) and equal(self.param('cpu_sockets'), entity.cpu.topology.sockets) and equal(self.param('cpu_threads'), entity.cpu.topology.threads) and equal(self.param('cpu_mode'), str(cpu_mode) if cpu_mode else None) and equal(self.param('type'), str(entity.type)) and equal(self.param('name'), str(entity.name)) and equal(self.param('operating_system'), str(entity.os.type)) and equal(self.param('boot_menu'), entity.bios.boot_menu.enabled) and equal(self.param('soundcard_enabled'), entity.soundcard_enabled) and equal(self.param('smartcard_enabled'), getattr(vm_display, 'smartcard_enabled', False)) and equal(self.param('io_threads'), entity.io.threads) and equal(self.param('ballooning_enabled'), entity.memory_policy.ballooning) and equal(self.param('serial_console'), getattr(entity.console, 'enabled', None)) and equal(self.param('usb_support'), entity.usb.enabled) and equal(self.param('sso'), True if entity.sso.methods else False) and equal(self.param('quota_id'), getattr(entity.quota, 'id', None)) and equal(self.param('high_availability'), entity.high_availability.enabled) and equal(self.param('high_availability_priority'), entity.high_availability.priority) and equal(self.param('lease'), get_link_name(self._connection, getattr(entity.lease, 'storage_domain', None))) and equal(self.param('stateless'), entity.stateless) and equal(self.param('cpu_shares'), entity.cpu_shares) and equal(self.param('delete_protected'), entity.delete_protected) and equal(self.param('use_latest_template_version'), entity.use_latest_template_version) and equal(self.param('boot_devices'), [str(dev) for dev in getattr(entity.os.boot, 'devices', [])]) and equal(self.param('instance_type'), get_link_name(self._connection, entity.instance_type), ignore_case=True) and equal(self.param('description'), entity.description) and equal(self.param('comment'), entity.comment) and equal(self.param('timezone'), getattr(entity.time_zone, 'name', None)) and equal(self.param('serial_policy'), str(getattr(entity.serial_number, 'policy', None))) and equal(self.param('serial_policy_value'), getattr(entity.serial_number, 'value', None)) and equal(self.param('placement_policy'), str(entity.placement_policy.affinity) if entity.placement_policy else None) and equal(self.param('numa_tune_mode'), str(entity.numa_tune_mode)) and equal(self.param('rng_device'), str(entity.rng_device.source) if entity.rng_device else None) ) def pre_create(self, entity): # Mark if entity exists before touching it: if entity is None: self._is_new = True def post_update(self, entity): self.post_present(entity.id) def post_present(self, entity_id): # After creation of the VM, attach disks and NICs: entity = self._service.service(entity_id).get() self.__attach_disks(entity) self.__attach_nics(entity) self._attach_cd(entity) self.changed = self.__attach_numa_nodes(entity) self.changed = self.__attach_watchdog(entity) self.changed = self.__attach_graphical_console(entity) self.changed = self.__attach_host_devices(entity) def pre_remove(self, entity): # Forcibly stop the VM, if it's not in DOWN state: if entity.status != otypes.VmStatus.DOWN: if not self._module.check_mode: self.changed = self.action( action='stop', action_condition=lambda vm: vm.status != otypes.VmStatus.DOWN, wait_condition=lambda vm: vm.status == otypes.VmStatus.DOWN, )['changed'] def __suspend_shutdown_common(self, vm_service): if vm_service.get().status in [ otypes.VmStatus.MIGRATING, otypes.VmStatus.POWERING_UP, otypes.VmStatus.REBOOT_IN_PROGRESS, otypes.VmStatus.WAIT_FOR_LAUNCH, otypes.VmStatus.UP, otypes.VmStatus.RESTORING_STATE, ]: self._wait_for_UP(vm_service) def _pre_shutdown_action(self, entity): vm_service = self._service.vm_service(entity.id) self.__suspend_shutdown_common(vm_service) if entity.status in [otypes.VmStatus.SUSPENDED, otypes.VmStatus.PAUSED]: vm_service.start() self._wait_for_UP(vm_service) return vm_service.get() def _pre_suspend_action(self, entity): vm_service = self._service.vm_service(entity.id) self.__suspend_shutdown_common(vm_service) if entity.status in [otypes.VmStatus.PAUSED, otypes.VmStatus.DOWN]: vm_service.start() self._wait_for_UP(vm_service) return vm_service.get() def _post_start_action(self, entity): vm_service = self._service.service(entity.id) self._wait_for_UP(vm_service) self._attach_cd(vm_service.get()) self._migrate_vm(vm_service.get()) def _attach_cd(self, entity): cd_iso = self.param('cd_iso') if cd_iso is not None: vm_service = self._service.service(entity.id) current = vm_service.get().status == otypes.VmStatus.UP and self.param('state') == 'running' cdroms_service = vm_service.cdroms_service() cdrom_device = cdroms_service.list()[0] cdrom_service = cdroms_service.cdrom_service(cdrom_device.id) cdrom = cdrom_service.get(current=current) if getattr(cdrom.file, 'id', '') != cd_iso: if not self._module.check_mode: cdrom_service.update( cdrom=otypes.Cdrom( file=otypes.File(id=cd_iso) ), current=current, ) self.changed = True return entity def _migrate_vm(self, entity): vm_host = self.param('host') vm_service = self._service.vm_service(entity.id) # In case VM is preparing to be UP, wait to be up, to migrate it: if entity.status == otypes.VmStatus.UP: if vm_host is not None: hosts_service = self._connection.system_service().hosts_service() current_vm_host = hosts_service.host_service(entity.host.id).get().name if vm_host != current_vm_host: if not self._module.check_mode: vm_service.migrate(host=otypes.Host(name=vm_host), force=self.param('force_migrate')) self._wait_for_UP(vm_service) self.changed = True elif self.param('migrate'): if not self._module.check_mode: vm_service.migrate(force=self.param('force_migrate')) self._wait_for_UP(vm_service) self.changed = True return entity def _wait_for_UP(self, vm_service): wait( service=vm_service, condition=lambda vm: vm.status == otypes.VmStatus.UP, wait=self.param('wait'), timeout=self.param('timeout'), ) def _wait_for_vm_disks(self, vm_service): disks_service = self._connection.system_service().disks_service() for da in vm_service.disk_attachments_service().list(): disk_service = disks_service.disk_service(da.disk.id) wait( service=disk_service, condition=lambda disk: disk.status == otypes.DiskStatus.OK if disk.storage_type == otypes.DiskStorageType.IMAGE else True, wait=self.param('wait'), timeout=self.param('timeout'), ) def wait_for_down(self, vm): """ This function will first wait for the status DOWN of the VM. Then it will find the active snapshot and wait until it's state is OK for stateless VMs and statless snaphot is removed. """ vm_service = self._service.vm_service(vm.id) wait( service=vm_service, condition=lambda vm: vm.status == otypes.VmStatus.DOWN, wait=self.param('wait'), timeout=self.param('timeout'), ) if vm.stateless: snapshots_service = vm_service.snapshots_service() snapshots = snapshots_service.list() snap_active = [ snap for snap in snapshots if snap.snapshot_type == otypes.SnapshotType.ACTIVE ][0] snap_stateless = [ snap for snap in snapshots if snap.snapshot_type == otypes.SnapshotType.STATELESS ] # Stateless snapshot may be already removed: if snap_stateless: """ We need to wait for Active snapshot ID, to be removed as it's current stateless snapshot. Then we need to wait for staless snapshot ID to be read, for use, because it will become active snapshot. """ wait( service=snapshots_service.snapshot_service(snap_active.id), condition=lambda snap: snap is None, wait=self.param('wait'), timeout=self.param('timeout'), ) wait( service=snapshots_service.snapshot_service(snap_stateless[0].id), condition=lambda snap: snap.snapshot_status == otypes.SnapshotStatus.OK, wait=self.param('wait'), timeout=self.param('timeout'), ) return True def __attach_graphical_console(self, entity): graphical_console = self.param('graphical_console') if not graphical_console: return False vm_service = self._service.service(entity.id) gcs_service = vm_service.graphics_consoles_service() graphical_consoles = gcs_service.list() # Remove all graphical consoles if there are any: if bool(graphical_console.get('headless_mode')): if not self._module.check_mode: for gc in graphical_consoles: gcs_service.console_service(gc.id).remove() return len(graphical_consoles) > 0 # If there are not gc add any gc to be added: protocol = graphical_console.get('protocol') if isinstance(protocol, str): protocol = [protocol] current_protocols = [str(gc.protocol) for gc in graphical_consoles] if not current_protocols: if not self._module.check_mode: for p in protocol: gcs_service.add( otypes.GraphicsConsole( protocol=otypes.GraphicsType(p), ) ) return True # Update consoles: if sorted(protocol) != sorted(current_protocols): if not self._module.check_mode: for gc in graphical_consoles: gcs_service.console_service(gc.id).remove() for p in protocol: gcs_service.add( otypes.GraphicsConsole( protocol=otypes.GraphicsType(p), ) ) return True def __attach_disks(self, entity): if not self.param('disks'): return vm_service = self._service.service(entity.id) disks_service = self._connection.system_service().disks_service() disk_attachments_service = vm_service.disk_attachments_service() self._wait_for_vm_disks(vm_service) for disk in self.param('disks'): # If disk ID is not specified, find disk by name: disk_id = disk.get('id') if disk_id is None: disk_id = getattr( search_by_name( service=disks_service, name=disk.get('name') ), 'id', None ) # Attach disk to VM: disk_attachment = disk_attachments_service.attachment_service(disk_id) if get_entity(disk_attachment) is None: if not self._module.check_mode: disk_attachments_service.add( otypes.DiskAttachment( disk=otypes.Disk( id=disk_id, ), active=disk.get('activate', True), interface=otypes.DiskInterface( disk.get('interface', 'virtio') ), bootable=disk.get('bootable', False), ) ) self.changed = True def __get_vnic_profile_id(self, nic): """ Return VNIC profile ID looked up by it's name, because there can be more VNIC profiles with same name, other criteria of filter is cluster. """ vnics_service = self._connection.system_service().vnic_profiles_service() clusters_service = self._connection.system_service().clusters_service() cluster = search_by_name(clusters_service, self.param('cluster')) profiles = [ profile for profile in vnics_service.list() if profile.name == nic.get('profile_name') ] cluster_networks = [ net.id for net in self._connection.follow_link(cluster.networks) ] try: return next( profile.id for profile in profiles if profile.network.id in cluster_networks ) except StopIteration: raise Exception( "Profile '%s' was not found in cluster '%s'" % ( nic.get('profile_name'), self.param('cluster') ) ) def __attach_numa_nodes(self, entity): updated = False numa_nodes_service = self._service.service(entity.id).numa_nodes_service() if len(self.param('numa_nodes')) > 0: # Remove all existing virtual numa nodes before adding new ones existed_numa_nodes = numa_nodes_service.list() existed_numa_nodes.sort(reverse=len(existed_numa_nodes) > 1 and existed_numa_nodes[1].index > existed_numa_nodes[0].index) for current_numa_node in existed_numa_nodes: numa_nodes_service.node_service(current_numa_node.id).remove() updated = True for numa_node in self.param('numa_nodes'): if numa_node is None or numa_node.get('index') is None or numa_node.get('cores') is None or numa_node.get('memory') is None: continue numa_nodes_service.add( otypes.VirtualNumaNode( index=numa_node.get('index'), memory=numa_node.get('memory'), cpu=otypes.Cpu( cores=[ otypes.Core( index=core ) for core in numa_node.get('cores') ], ), numa_node_pins=[ otypes.NumaNodePin( index=pin ) for pin in numa_node.get('numa_node_pins') ] if numa_node.get('numa_node_pins') is not None else None, ) ) updated = True return updated def __attach_watchdog(self, entity): watchdogs_service = self._service.service(entity.id).watchdogs_service() watchdog = self.param('watchdog') if watchdog is not None: current_watchdog = next(iter(watchdogs_service.list()), None) if watchdog.get('model') is None and current_watchdog: watchdogs_service.watchdog_service(current_watchdog.id).remove() return True elif watchdog.get('model') is not None and current_watchdog is None: watchdogs_service.add( otypes.Watchdog( model=otypes.WatchdogModel(watchdog.get('model').lower()), action=otypes.WatchdogAction(watchdog.get('action')), ) ) return True elif current_watchdog is not None: if ( str(current_watchdog.model).lower() != watchdog.get('model').lower() or str(current_watchdog.action).lower() != watchdog.get('action').lower() ): watchdogs_service.watchdog_service(current_watchdog.id).update( otypes.Watchdog( model=otypes.WatchdogModel(watchdog.get('model')), action=otypes.WatchdogAction(watchdog.get('action')), ) ) return True return False def __attach_nics(self, entity): # Attach NICs to VM, if specified: nics_service = self._service.service(entity.id).nics_service() for nic in self.param('nics'): if search_by_name(nics_service, nic.get('name')) is None: if not self._module.check_mode: nics_service.add( otypes.Nic( name=nic.get('name'), interface=otypes.NicInterface( nic.get('interface', 'virtio') ), vnic_profile=otypes.VnicProfile( id=self.__get_vnic_profile_id(nic), ) if nic.get('profile_name') else None, mac=otypes.Mac( address=nic.get('mac_address') ) if nic.get('mac_address') else None, ) ) self.changed = True def get_initialization(self): if self._initialization is not None: return self._initialization sysprep = self.param('sysprep') cloud_init = self.param('cloud_init') cloud_init_nics = self.param('cloud_init_nics') or [] if cloud_init is not None: cloud_init_nics.append(cloud_init) if cloud_init or cloud_init_nics: self._initialization = otypes.Initialization( nic_configurations=[ otypes.NicConfiguration( boot_protocol=otypes.BootProtocol( nic.pop('nic_boot_protocol').lower() ) if nic.get('nic_boot_protocol') else None, name=nic.pop('nic_name', None), on_boot=nic.pop('nic_on_boot', None), ip=otypes.Ip( address=nic.pop('nic_ip_address', None), netmask=nic.pop('nic_netmask', None), gateway=nic.pop('nic_gateway', None), ) if ( nic.get('nic_gateway') is not None or nic.get('nic_netmask') is not None or nic.get('nic_ip_address') is not None ) else None, ) for nic in cloud_init_nics if ( nic.get('nic_gateway') is not None or nic.get('nic_netmask') is not None or nic.get('nic_ip_address') is not None or nic.get('nic_boot_protocol') is not None or nic.get('nic_on_boot') is not None ) ] if cloud_init_nics else None, **cloud_init ) elif sysprep: self._initialization = otypes.Initialization( **sysprep ) return self._initialization def __attach_host_devices(self, entity): vm_service = self._service.service(entity.id) host_devices_service = vm_service.host_devices_service() host_devices = self.param('host_devices') updated = False if host_devices: device_names = [dev.name for dev in host_devices_service.list()] for device in host_devices: device_name = device.get('name') state = device.get('state', 'present') if state == 'absent' and device_name in device_names: updated = True if not self._module.check_mode: device_id = get_id_by_name(host_devices_service, device.get('name')) host_devices_service.device_service(device_id).remove() elif state == 'present' and device_name not in device_names: updated = True if not self._module.check_mode: host_devices_service.add( otypes.HostDevice( name=device.get('name'), ) ) return updated def _get_role_mappings(module): roleMappings = list() for roleMapping in module.params['role_mappings']: roleMappings.append( otypes.RegistrationRoleMapping( from_=otypes.Role( name=roleMapping['source_name'], ) if roleMapping['source_name'] else None, to=otypes.Role( name=roleMapping['dest_name'], ) if roleMapping['dest_name'] else None, ) ) return roleMappings def _get_affinity_group_mappings(module): affinityGroupMappings = list() for affinityGroupMapping in module.params['affinity_group_mappings']: affinityGroupMappings.append( otypes.RegistrationAffinityGroupMapping( from_=otypes.AffinityGroup( name=affinityGroupMapping['source_name'], ) if affinityGroupMapping['source_name'] else None, to=otypes.AffinityGroup( name=affinityGroupMapping['dest_name'], ) if affinityGroupMapping['dest_name'] else None, ) ) return affinityGroupMappings def _get_affinity_label_mappings(module): affinityLabelMappings = list() for affinityLabelMapping in module.params['affinity_label_mappings']: affinityLabelMappings.append( otypes.RegistrationAffinityLabelMapping( from_=otypes.AffinityLabel( name=affinityLabelMapping['source_name'], ) if affinityLabelMapping['source_name'] else None, to=otypes.AffinityLabel( name=affinityLabelMapping['dest_name'], ) if affinityLabelMapping['dest_name'] else None, ) ) return affinityLabelMappings def _get_domain_mappings(module): domainMappings = list() for domainMapping in module.params['domain_mappings']: domainMappings.append( otypes.RegistrationDomainMapping( from_=otypes.Domain( name=domainMapping['source_name'], ) if domainMapping['source_name'] else None, to=otypes.Domain( name=domainMapping['dest_name'], ) if domainMapping['dest_name'] else None, ) ) return domainMappings def _get_lun_mappings(module): lunMappings = list() for lunMapping in module.params['lun_mappings']: lunMappings.append( otypes.RegistrationLunMapping( from_=otypes.Disk( lun_storage=otypes.HostStorage( type=otypes.StorageType(lunMapping['source_storage_type']) if (lunMapping['source_storage_type'] in ['iscsi', 'fcp']) else None, logical_units=[ otypes.LogicalUnit( id=lunMapping['source_logical_unit_id'], ) ], ), ) if lunMapping['source_logical_unit_id'] else None, to=otypes.Disk( lun_storage=otypes.HostStorage( type=otypes.StorageType(lunMapping['dest_storage_type']) if (lunMapping['dest_storage_type'] in ['iscsi', 'fcp']) else None, logical_units=[ otypes.LogicalUnit( id=lunMapping['dest_logical_unit_id'], port=lunMapping['dest_logical_unit_port'], portal=lunMapping['dest_logical_unit_portal'], address=lunMapping['dest_logical_unit_address'], target=lunMapping['dest_logical_unit_target'], password=lunMapping['dest_logical_unit_password'], username=lunMapping['dest_logical_unit_username'], ) ], ), ) if lunMapping['dest_logical_unit_id'] else None, ), ), return lunMappings def _get_cluster_mappings(module): clusterMappings = list() for clusterMapping in module.params['cluster_mappings']: clusterMappings.append( otypes.RegistrationClusterMapping( from_=otypes.Cluster( name=clusterMapping['source_name'], ), to=otypes.Cluster( name=clusterMapping['dest_name'], ) if clusterMapping['dest_name'] else None, ) ) return clusterMappings def _get_vnic_profile_mappings(module): vnicProfileMappings = list() for vnicProfileMapping in module.params['vnic_profile_mappings']: vnicProfileMappings.append( otypes.VnicProfileMapping( source_network_name=vnicProfileMapping['source_network_name'], source_network_profile_name=vnicProfileMapping['source_profile_name'], target_vnic_profile=otypes.VnicProfile( id=vnicProfileMapping['target_profile_id'], ) if vnicProfileMapping['target_profile_id'] else None, ) ) return vnicProfileMappings def import_vm(module, connection): vms_service = connection.system_service().vms_service() if search_by_name(vms_service, module.params['name']) is not None: return False events_service = connection.system_service().events_service() last_event = events_service.list(max=1)[0] external_type = [ tmp for tmp in ['kvm', 'xen', 'vmware'] if module.params[tmp] is not None ][0] external_vm = module.params[external_type] imports_service = connection.system_service().external_vm_imports_service() imported_vm = imports_service.add( otypes.ExternalVmImport( vm=otypes.Vm( name=module.params['name'] ), name=external_vm.get('name'), username=external_vm.get('username', 'test'), password=external_vm.get('password', 'test'), provider=otypes.ExternalVmProviderType(external_type), url=external_vm.get('url'), cluster=otypes.Cluster( name=module.params['cluster'], ) if module.params['cluster'] else None, storage_domain=otypes.StorageDomain( name=external_vm.get('storage_domain'), ) if external_vm.get('storage_domain') else None, sparse=external_vm.get('sparse', True), host=otypes.Host( name=module.params['host'], ) if module.params['host'] else None, ) ) # Wait until event with code 1152 for our VM don't appear: vms_service = connection.system_service().vms_service() wait( service=vms_service.vm_service(imported_vm.vm.id), condition=lambda vm: len([ event for event in events_service.list( from_=int(last_event.id), search='type=1152 and vm.id=%s' % vm.id, ) ]) > 0 if vm is not None else False, fail_condition=lambda vm: vm is None, timeout=module.params['timeout'], poll_interval=module.params['poll_interval'], ) return True def control_state(vm, vms_service, module): if vm is None: return force = module.params['force'] state = module.params['state'] vm_service = vms_service.vm_service(vm.id) if vm.status == otypes.VmStatus.IMAGE_LOCKED: wait( service=vm_service, condition=lambda vm: vm.status == otypes.VmStatus.DOWN, ) elif vm.status == otypes.VmStatus.SAVING_STATE: # Result state is SUSPENDED, we should wait to be suspended: wait( service=vm_service, condition=lambda vm: vm.status == otypes.VmStatus.SUSPENDED, ) elif ( vm.status == otypes.VmStatus.UNASSIGNED or vm.status == otypes.VmStatus.UNKNOWN ): # Invalid states: module.fail_json(msg="Not possible to control VM, if it's in '{0}' status".format(vm.status)) elif vm.status == otypes.VmStatus.POWERING_DOWN: if (force and state == 'stopped') or state == 'absent': vm_service.stop() wait( service=vm_service, condition=lambda vm: vm.status == otypes.VmStatus.DOWN, ) else: # If VM is powering down, wait to be DOWN or UP. # VM can end in UP state in case there is no GA # or ACPI on the VM or shutdown operation crashed: wait( service=vm_service, condition=lambda vm: vm.status in [otypes.VmStatus.DOWN, otypes.VmStatus.UP], ) def main(): argument_spec = ovirt_full_argument_spec( state=dict(type='str', default='present', choices=['absent', 'next_run', 'present', 'registered', 'running', 'stopped', 'suspended', 'exported']), name=dict(type='str'), id=dict(type='str'), cluster=dict(type='str'), allow_partial_import=dict(type='bool'), template=dict(type='str'), template_version=dict(type='int'), use_latest_template_version=dict(type='bool'), storage_domain=dict(type='str'), disk_format=dict(type='str', default='cow', choices=['cow', 'raw']), disks=dict(type='list', default=[]), memory=dict(type='str'), memory_guaranteed=dict(type='str'), memory_max=dict(type='str'), cpu_sockets=dict(type='int'), cpu_cores=dict(type='int'), cpu_shares=dict(type='int'), cpu_threads=dict(type='int'), type=dict(type='str', choices=['server', 'desktop', 'high_performance']), operating_system=dict(type='str'), cd_iso=dict(type='str'), boot_devices=dict(type='list', choices=['cdrom', 'hd', 'network']), vnic_profile_mappings=dict(default=[], type='list'), cluster_mappings=dict(default=[], type='list'), role_mappings=dict(default=[], type='list'), affinity_group_mappings=dict(default=[], type='list'), affinity_label_mappings=dict(default=[], type='list'), lun_mappings=dict(default=[], type='list'), domain_mappings=dict(default=[], type='list'), reassign_bad_macs=dict(default=None, type='bool'), boot_menu=dict(type='bool'), serial_console=dict(type='bool'), usb_support=dict(type='bool'), sso=dict(type='bool'), quota_id=dict(type='str'), high_availability=dict(type='bool'), high_availability_priority=dict(type='int'), lease=dict(type='str'), stateless=dict(type='bool'), delete_protected=dict(type='bool'), force=dict(type='bool', default=False), nics=dict(type='list', default=[]), cloud_init=dict(type='dict'), cloud_init_nics=dict(type='list', default=[]), cloud_init_persist=dict(type='bool', default=False, aliases=['sysprep_persist']), kernel_params_persist=dict(type='bool', default=False), sysprep=dict(type='dict'), host=dict(type='str'), clone=dict(type='bool', default=False), clone_permissions=dict(type='bool', default=False), kernel_path=dict(type='str'), initrd_path=dict(type='str'), kernel_params=dict(type='str'), instance_type=dict(type='str'), description=dict(type='str'), comment=dict(type='str'), timezone=dict(type='str'), serial_policy=dict(type='str', choices=['vm', 'host', 'custom']), serial_policy_value=dict(type='str'), vmware=dict(type='dict'), xen=dict(type='dict'), kvm=dict(type='dict'), cpu_mode=dict(type='str'), placement_policy=dict(type='str'), custom_compatibility_version=dict(type='str'), ticket=dict(type='bool', default=None), cpu_pinning=dict(type='list'), soundcard_enabled=dict(type='bool', default=None), smartcard_enabled=dict(type='bool', default=None), io_threads=dict(type='int', default=None), ballooning_enabled=dict(type='bool', default=None), rng_device=dict(type='str'), numa_tune_mode=dict(type='str', choices=['interleave', 'preferred', 'strict']), numa_nodes=dict(type='list', default=[]), custom_properties=dict(type='list'), watchdog=dict(type='dict'), host_devices=dict(type='list'), graphical_console=dict(type='dict'), exclusive=dict(type='bool'), export_domain=dict(default=None), export_ova=dict(type='dict'), force_migrate=dict(type='bool'), migrate=dict(type='bool', default=None), next_run=dict(type='bool'), snapshot_name=dict(type='str'), snapshot_vm=dict(type='str'), ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, required_one_of=[['id', 'name']], required_if=[ ('state', 'registered', ['storage_domain']), ], required_together=[['snapshot_name', 'snapshot_vm']] ) check_sdk(module) check_params(module) try: state = module.params['state'] auth = module.params.pop('auth') connection = create_connection(auth) vms_service = connection.system_service().vms_service() vms_module = VmsModule( connection=connection, module=module, service=vms_service, ) vm = vms_module.search_entity(list_params={'all_content': True}) control_state(vm, vms_service, module) if state in ('present', 'running', 'next_run'): if module.params['xen'] or module.params['kvm'] or module.params['vmware']: vms_module.changed = import_vm(module, connection) # In case of wait=false and state=running, waits for VM to be created # In case VM don't exist, wait for VM DOWN state, # otherwise don't wait for any state, just update VM: ret = vms_module.create( entity=vm, result_state=otypes.VmStatus.DOWN if vm is None else None, update_params={'next_run': module.params['next_run']} if module.params['next_run'] is not None else None, clone=module.params['clone'], clone_permissions=module.params['clone_permissions'], _wait=True if not module.params['wait'] and state == 'running' else module.params['wait'], ) # If VM is going to be created and check_mode is on, return now: if module.check_mode and ret.get('id') is None: module.exit_json(**ret) vms_module.post_present(ret['id']) # Run the VM if it was just created, else don't run it: if state == 'running': def kernel_persist_check(): return (module.params.get('kernel_params') or module.params.get('initrd_path') or module.params.get('kernel_path') and not module.params.get('cloud_init_persist')) initialization = vms_module.get_initialization() ret = vms_module.action( action='start', post_action=vms_module._post_start_action, action_condition=lambda vm: ( vm.status not in [ otypes.VmStatus.MIGRATING, otypes.VmStatus.POWERING_UP, otypes.VmStatus.REBOOT_IN_PROGRESS, otypes.VmStatus.WAIT_FOR_LAUNCH, otypes.VmStatus.UP, otypes.VmStatus.RESTORING_STATE, ] ), wait_condition=lambda vm: vm.status == otypes.VmStatus.UP, # Start action kwargs: use_cloud_init=True if not module.params.get('cloud_init_persist') and module.params.get('cloud_init') is not None else None, use_sysprep=True if not module.params.get('cloud_init_persist') and module.params.get('sysprep') is not None else None, vm=otypes.Vm( placement_policy=otypes.VmPlacementPolicy( hosts=[otypes.Host(name=module.params['host'])] ) if module.params['host'] else None, initialization=initialization, os=otypes.OperatingSystem( cmdline=module.params.get('kernel_params'), initrd=module.params.get('initrd_path'), kernel=module.params.get('kernel_path'), ) if (kernel_persist_check()) else None, ) if ( kernel_persist_check() or module.params.get('host') or initialization is not None and not module.params.get('cloud_init_persist') ) else None, ) if module.params['ticket']: vm_service = vms_service.vm_service(ret['id']) graphics_consoles_service = vm_service.graphics_consoles_service() graphics_console = graphics_consoles_service.list()[0] console_service = graphics_consoles_service.console_service(graphics_console.id) ticket = console_service.remote_viewer_connection_file() if ticket: ret['vm']['remote_vv_file'] = ticket if state == 'next_run': # Apply next run configuration, if needed: vm = vms_service.vm_service(ret['id']).get() if vm.next_run_configuration_exists: ret = vms_module.action( action='reboot', entity=vm, action_condition=lambda vm: vm.status == otypes.VmStatus.UP, wait_condition=lambda vm: vm.status == otypes.VmStatus.UP, ) ret['changed'] = vms_module.changed elif state == 'stopped': if module.params['xen'] or module.params['kvm'] or module.params['vmware']: vms_module.changed = import_vm(module, connection) ret = vms_module.create( entity=vm, result_state=otypes.VmStatus.DOWN if vm is None else None, clone=module.params['clone'], clone_permissions=module.params['clone_permissions'], ) if module.params['force']: ret = vms_module.action( action='stop', action_condition=lambda vm: vm.status != otypes.VmStatus.DOWN, wait_condition=vms_module.wait_for_down, ) else: ret = vms_module.action( action='shutdown', pre_action=vms_module._pre_shutdown_action, action_condition=lambda vm: vm.status != otypes.VmStatus.DOWN, wait_condition=vms_module.wait_for_down, ) vms_module.post_present(ret['id']) elif state == 'suspended': ret = vms_module.create( entity=vm, result_state=otypes.VmStatus.DOWN if vm is None else None, clone=module.params['clone'], clone_permissions=module.params['clone_permissions'], ) vms_module.post_present(ret['id']) ret = vms_module.action( action='suspend', pre_action=vms_module._pre_suspend_action, action_condition=lambda vm: vm.status != otypes.VmStatus.SUSPENDED, wait_condition=lambda vm: vm.status == otypes.VmStatus.SUSPENDED, ) elif state == 'absent': ret = vms_module.remove() elif state == 'registered': storage_domains_service = connection.system_service().storage_domains_service() # Find the storage domain with unregistered VM: sd_id = get_id_by_name(storage_domains_service, module.params['storage_domain']) storage_domain_service = storage_domains_service.storage_domain_service(sd_id) vms_service = storage_domain_service.vms_service() # Find the unregistered VM we want to register: vms = vms_service.list(unregistered=True) vm = next( (vm for vm in vms if (vm.id == module.params['id'] or vm.name == module.params['name'])), None ) changed = False if vm is None: vm = vms_module.search_entity() if vm is None: raise ValueError( "VM '%s(%s)' wasn't found." % (module.params['name'], module.params['id']) ) else: # Register the vm into the system: changed = True vm_service = vms_service.vm_service(vm.id) vm_service.register( allow_partial_import=module.params['allow_partial_import'], cluster=otypes.Cluster( name=module.params['cluster'] ) if module.params['cluster'] else None, vnic_profile_mappings=_get_vnic_profile_mappings(module) if module.params['vnic_profile_mappings'] else None, reassign_bad_macs=module.params['reassign_bad_macs'] if module.params['reassign_bad_macs'] is not None else None, registration_configuration=otypes.RegistrationConfiguration( cluster_mappings=_get_cluster_mappings(module), role_mappings=_get_role_mappings(module), domain_mappings=_get_domain_mappings(module), lun_mappings=_get_lun_mappings(module), affinity_group_mappings=_get_affinity_group_mappings(module), affinity_label_mappings=_get_affinity_label_mappings(module), ) if (module.params['cluster_mappings'] or module.params['role_mappings'] or module.params['domain_mappings'] or module.params['lun_mappings'] or module.params['affinity_group_mappings'] or module.params['affinity_label_mappings']) else None ) if module.params['wait']: vm = vms_module.wait_for_import() else: # Fetch vm to initialize return. vm = vm_service.get() ret = { 'changed': changed, 'id': vm.id, 'vm': get_dict_of_struct(vm) } elif state == 'exported': if module.params['export_domain']: export_service = vms_module._get_export_domain_service() export_vm = search_by_attributes(export_service.vms_service(), id=vm.id) ret = vms_module.action( entity=vm, action='export', action_condition=lambda t: export_vm is None or module.params['exclusive'], wait_condition=lambda t: t is not None, post_action=vms_module.post_export_action, storage_domain=otypes.StorageDomain(id=export_service.get().id), exclusive=module.params['exclusive'], ) elif module.params['export_ova']: export_vm = module.params['export_ova'] ret = vms_module.action( entity=vm, action='export_to_path_on_host', host=otypes.Host(name=export_vm.get('host')), directory=export_vm.get('directory'), filename=export_vm.get('filename'), ) module.exit_json(**ret) except Exception as e: module.fail_json(msg=str(e), exception=traceback.format_exc()) finally: connection.close(logout=auth.get('token') is None) if __name__ == "__main__": main()
gpl-3.0
gandreello/openthread
tools/harness-automation/cases/leader_9_2_5.py
16
1875
#!/usr/bin/env python # # Copyright (c) 2016, The OpenThread Authors. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # 3. Neither the name of the copyright holder nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # import unittest from autothreadharness.harness_case import HarnessCase class Leader_9_2_5(HarnessCase): role = HarnessCase.ROLE_LEADER case = '9 2 5' golden_devices_required = 1 def on_dialog(self, dialog, title): pass if __name__ == '__main__': unittest.main()
bsd-3-clause
kracekumar/drf_tada
user/test_api.py
2
1796
# -*- coding: utf-8 -*- from django.core.urlresolvers import reverse from rest_framework import status from rest_framework.test import APIClient from commons.base_testcase import BaseApiTestCase class UserApiTestCase(BaseApiTestCase): def test_user_read_detail(self): url = reverse('user-detail', args=[self.user.pk]) data = {'id': self.user.pk, 'first_name': self.user.first_name, 'last_name': self.user.last_name, 'username': self.user.username, 'email': self.user.email} resp = self.client.get(url, format='json') assert resp.status_code == status.HTTP_200_OK assert resp.data == data def test_user_read_detail_without_login(self): client = APIClient() url = reverse('user-detail', args=[self.user.pk]) resp = client.get(url, format='json') assert resp.status_code == status.HTTP_401_UNAUTHORIZED def test_change_password(self): url = reverse('change-password', args=[self.user.pk]) data = {'password': 'password'} resp = self.client.post(url, data=data, format='json') assert resp.status_code == status.HTTP_202_ACCEPTED def test_update_user_details(self): url = reverse('user-detail', args=[self.user.pk]) data = {'username': 'Joker'} resp = self.client.patch(url, data=data, format='json') assert resp.status_code == status.HTTP_202_ACCEPTED expected_data = {'id': self.user.pk, 'first_name': self.user.first_name, 'last_name': self.user.last_name, 'username': data['username'], 'email': self.user.email} assert resp.data == expected_data
bsd-3-clause
crcresearch/osf.io
api_tests/users/views/test_user_nodes_list.py
5
7012
import pytest from api.base.settings.defaults import API_BASE from api_tests.nodes.filters.test_filters import NodesListFilteringMixin, NodesListDateFilteringMixin from osf_tests.factories import ( AuthUserFactory, BookmarkCollectionFactory, CollectionFactory, NodeFactory, PreprintFactory, ProjectFactory, RegistrationFactory, ) from website.views import find_bookmark_collection @pytest.mark.django_db class TestUserNodes: @pytest.fixture() def user_one(self): user_one = AuthUserFactory() user_one.social['twitter'] = 'RheisenDennis' user_one.save() return user_one @pytest.fixture() def user_two(self): return AuthUserFactory() @pytest.fixture() def public_project_user_one(self, user_one): return ProjectFactory(title='Public Project User One', is_public=True, creator=user_one) @pytest.fixture() def private_project_user_one(self, user_one): return ProjectFactory(title='Private Project User One', is_public=False, creator=user_one) @pytest.fixture() def public_project_user_two(self, user_two): return ProjectFactory(title='Public Project User Two', is_public=True, creator=user_two) @pytest.fixture() def private_project_user_two(self, user_two): return ProjectFactory(title='Private Project User Two', is_public=False, creator=user_two) @pytest.fixture() def deleted_project_user_one(self, user_one): return CollectionFactory(title='Deleted Project User One', is_public=False, creator=user_one, is_deleted=True) @pytest.fixture() def folder(self): return CollectionFactory() @pytest.fixture() def deleted_folder(self, user_one): return CollectionFactory(title='Deleted Folder User One', is_public=False, creator=user_one, is_deleted=True) @pytest.fixture() def bookmark_collection(self, user_one): return find_bookmark_collection(user_one) @pytest.fixture() def registration(self, user_one, public_project_user_one): return RegistrationFactory(project=public_project_user_one, creator=user_one, is_public=True) def test_user_nodes(self, app, user_one, user_two, public_project_user_one, public_project_user_two, private_project_user_one, private_project_user_two, deleted_project_user_one, folder, deleted_folder, registration): # test_authorized_in_gets_200 url = "/{}users/{}/nodes/".format(API_BASE, user_one._id) res = app.get(url, auth=user_one.auth) assert res.status_code == 200 assert res.content_type == 'application/vnd.api+json' # test_anonymous_gets_200 url = "/{}users/{}/nodes/".format(API_BASE, user_one._id) res = app.get(url) assert res.status_code == 200 assert res.content_type == 'application/vnd.api+json' # test_get_projects_logged_in url = "/{}users/{}/nodes/".format(API_BASE, user_one._id) res = app.get(url, auth=user_one.auth) node_json = res.json['data'] ids = [each['id'] for each in node_json] assert public_project_user_one._id in ids assert private_project_user_one._id in ids assert public_project_user_two._id not in ids assert private_project_user_two._id not in ids assert folder._id not in ids assert deleted_folder._id not in ids assert deleted_project_user_one._id not in ids assert registration._id not in ids # test_get_projects_not_logged_in url = "/{}users/{}/nodes/".format(API_BASE, user_one._id) res = app.get(url) node_json = res.json['data'] ids = [each['id'] for each in node_json] assert public_project_user_one._id in ids assert private_project_user_one._id not in ids assert public_project_user_two._id not in ids assert private_project_user_two._id not in ids assert folder._id not in ids assert deleted_project_user_one._id not in ids assert registration._id not in ids # test_get_projects_logged_in_as_different_user url = "/{}users/{}/nodes/".format(API_BASE, user_two._id) res = app.get(url, auth=user_one.auth) node_json = res.json['data'] ids = [each['id'] for each in node_json] assert public_project_user_two._id in ids assert public_project_user_one._id not in ids assert private_project_user_one._id not in ids assert private_project_user_two._id not in ids assert folder._id not in ids assert deleted_project_user_one._id not in ids assert registration._id not in ids @pytest.mark.django_db class TestUserNodesPreprintsFiltering: @pytest.fixture() def user(self): return AuthUserFactory() @pytest.fixture() def no_preprints_node(self, user): return ProjectFactory(creator=user) @pytest.fixture() def valid_preprint_node(self, user): return ProjectFactory(creator=user) @pytest.fixture() def orphaned_preprint_node(self, user): return ProjectFactory(creator=user) @pytest.fixture() def abandoned_preprint_node(self, user): return ProjectFactory(creator=user) @pytest.fixture() def valid_preprint(self, valid_preprint_node): return PreprintFactory(project=valid_preprint_node) @pytest.fixture() def abandoned_preprint(self, abandoned_preprint_node): return PreprintFactory(project=abandoned_preprint_node, is_published=False) @pytest.fixture() def orphaned_preprint(self, orphaned_preprint_node): orphaned_preprint = PreprintFactory(project=orphaned_preprint_node) orphaned_preprint.node.preprint_file = None orphaned_preprint.node.save() return orphaned_preprint @pytest.fixture() def url_base(self): return '/{}users/me/nodes/?filter[preprint]='.format(API_BASE) def test_filter_false(self, app, user, abandoned_preprint_node, no_preprints_node, orphaned_preprint_node, url_base): expected_ids = [abandoned_preprint_node._id, no_preprints_node._id, orphaned_preprint_node._id] res = app.get('{}false'.format(url_base), auth=user.auth) actual_ids = [n['id'] for n in res.json['data']] assert set(expected_ids) == set(actual_ids) def test_filter_true(self, app, user, valid_preprint_node, valid_preprint, url_base): expected_ids = [valid_preprint_node._id] res = app.get('{}true'.format(url_base), auth=user.auth) actual_ids = [n['id'] for n in res.json['data']] assert set(expected_ids) == set(actual_ids) @pytest.mark.django_db class TestNodeListFiltering(NodesListFilteringMixin): @pytest.fixture() def url(self): return '/{}users/me/nodes/?'.format(API_BASE) @pytest.mark.django_db class TestNodeListDateFiltering(NodesListDateFilteringMixin): @pytest.fixture() def url(self): return '/{}users/me/nodes/?'.format(API_BASE)
apache-2.0
nicolasmoreau/NodeSoftware
nodes/tipbase/mapping_examplenode.py
32
12047
#!/usr/bin/env python # -*- coding: utf-8 -*- """ The config file for importing the example_data dataset into ExampleNode. Go to http://vamdc.tmy.se/doc/importing.html for understanding what happens below. Use this file this way (first edit base path) : $ cd ../imptools $ python run_rewrite.py ../nodes/ExampleNode/mapping_examplenode.py This will create the output files in the path defined below. If you already have a compatible mysql database you can then import these files directly with the ExampleNode/load.sql script (edit load.sql first to have the right paths). $ cd node/ExampleNode $ mysql -u <databaseuser> -p < load.sql """ # import database libraries and helper methods import string from imptools.linefuncs import constant, bySepNr, charrange # custom linefunctions for our example def get_bibtex(linedata): "Return the raw data" return linedata def get_bibtex_dbref(linedata): """ Extract the dbref from the bibtex entry (e.g. REF1, REF2...) """ first_line = linedata.split()[0] typ, dbref = first_line.split('{') return dbref.strip(',').strip() def merge_cols_by_sep(linedata, *sepNr): """ Merges data from several columns (separated by ,) into one, separating them with '-'. sepNr are the nth position of the file, separated by 'sep'. Assumes a single line input. """ sep = ',' return '-'.join([bySepNr(linedata, nr, sep=sep).strip() for nr in sepNr]) # Setting up filenames - change to correct path #base = "/home/vamdc/NodeSoftware/nodes/ExampleNode/" base = "/home/griatch/Devel/Work/VAMDC-git/nodes/ExampleNode/" inbase = base + "example_data/" # Raw indata files species_list_file = inbase + 'species.dat' publications_file = inbase + "references.dat" transitions_file = inbase + 'transitions.dat' # Parsed files intended for direct loading into database species_out = base + 'species.in' publications_out = base + 'references.in' states_out = base + 'states.in' transitions_out = base + 'transitions.in' # The mapping itself mapping = [ # Populate Species model, using the species input file. # This file uses fixed collumns which we parse by line index. {'outfile':species_out, 'infiles':species_list_file, 'commentchar':'#', 'headlines':1, 'linemap':[ {'cname':'id', 'cbyte':(charrange, 0, 9)}, {'cname':'name', 'cbyte':(charrange, 10, 16), 'cnull':'\N'}, {'cname':'ion', 'cbyte':(charrange, 17, 22), 'cnull':'\N'}, {'cname':'mass', 'cbyte':(charrange, 23, 35), 'cnull':'\N'}, {'cname':'massno', 'cbyte':(charrange, 36, 46), 'cnull':'\N'}, {'cname':'ionen_ev', 'cbyte':(charrange, 47, 57), 'cnull':'\N'}, {'cname':'ionen_cm1', 'cbyte':(charrange, 58, 70), 'cnull':'\N'}, {'cname':'atomic', 'cbyte':(charrange, 71, 79), 'cnull':'\N'}, {'cname':'isotope', 'cbyte':(charrange, 80, 87), 'cnull':'\N'}, ], }, # end of definition for species file # Populate Publication model with bibtex data file. This file # contains bibtex block entries (we cannot parse each line # separately), so we need to define the possible start/end blocks # separating entries. {'outfile':publications_out, 'infiles':publications_file, 'headlines':0, 'commentchar':'%', 'startblock':('@article','@book','@techreport','@inproceedings','@misc','@ARTICLE'), 'endblock':('@article','@book','@techreport','@inproceedings','@misc','@ARTICLE'), 'linemap':[ {'cname':'dbref', 'cbyte':(get_bibtex_dbref,)}, {'cname':'bibtex', 'cbyte':(get_bibtex,)}, ], }, # end of bibtex publication data # All states and transitions are stored in one file, where each # line contains all data for the transition: # # upper_state info | lower _state info | transition info # # Each data unit is separated by ',', # # We parse this file three times to get the upper, lower and # transition information respectively. We must also create a # unique id for each state so that the transition can reference # the correct states properly # # State model read from transitions file - upper states # (first pass) {'outfile': states_out, 'infiles': transitions_file, 'commentchar': '#', 'headlines':3, 'linemap':[ # creating a unique id hash by combining data from the {'cname':'id', #species,coup,jnum,term,energy (upper states) 'cbyte':(merge_cols_by_sep, 0, 4, 6, 5, 1), 'cnull':'N/A'}, {'cname':'species', 'cbyte':(bySepNr, 0), 'cnull':'N/A'}, {'cname':'energy', 'cbyte':(bySepNr, 1), 'cnull':'N/A'}, {'cname':'config', 'cbyte':(bySepNr, 2), 'cnull':'N/A'}, {'cname':'lande', 'cbyte':(bySepNr, 3), 'cnull':'N/A'}, {'cname':'coupling', 'cbyte':(bySepNr, 4), 'cnull':'N/A'}, {'cname':'term', 'cbyte':(bySepNr, 5), 'cnull':'N/A'}, {'cname':'j', 'cbyte':(bySepNr,6), 'cnull':'N/A'}, {'cname':'l', 'cbyte':(bySepNr,7), 'cnull':'N/A'}, {'cname':'s', 'cbyte':(bySepNr,8), 'cnull':'N/A'}, {'cname':'p', 'cbyte':(bySepNr,9), 'cnull':'N/A'}, {'cname':'j1', 'cbyte':(bySepNr,10), 'cnull':'N/A'}, {'cname':'j2', 'cbyte':(bySepNr,11), 'cnull':'N/A'}, {'cname':'k', 'cbyte':(bySepNr,12), 'cnull':'N/A'}, {'cname':'s2', 'cbyte':(bySepNr,13), 'cnull':'N/A'}, {'cname':'jc', 'cbyte':(bySepNr,14), 'cnull':'N/A'}, # half-times {'cname':'tau_exp', 'cbyte':(bySepNr,15), 'cnull':'N/A'}, {'cname':'tau_calc', 'cbyte':(bySepNr,16), 'cnull':'N/A'}, {'cname':'tau_exp_ref', 'cbyte':(bySepNr,17), 'cnull':'N/A'}, {'cname':'tau_calc_ref', 'cbyte':(bySepNr,18), 'cnull':'N/A'}, {'cname':'energy_ref', 'cbyte':(bySepNr, 19), 'cnull':'N/A'}, {'cname':'lande_ref', 'cbyte':(bySepNr, 20), 'cnull':'N/A'}, {'cname':'level_ref', 'cbyte':(bySepNr, 21), 'cnull':'N/A'}, ] }, # end of upper states # State model read from transitions file -lower states # (second pass) {'outfile': states_out, 'infiles': transitions_file, 'commentchar': '#', 'headlines':3, 'linemap':[ {'cname':'id', #species,coup,jnum,term,energy (lower states) 'cbyte':(merge_cols_by_sep, 22, 26, 28, 27, 23), 'cnull':'N/A'}, {'cname':'species', 'cbyte':(bySepNr, 22), 'cnull':'N/A'}, {'cname':'energy', 'cbyte':(bySepNr, 23), 'cnull':'N/A'}, {'cname':'config', 'cbyte':(bySepNr, 24), ##2 'cnull':'N/A'}, {'cname':'lande', 'cbyte':(bySepNr, 25), 'cnull':'N/A'}, {'cname':'coupling', 'cbyte':(bySepNr, 26), 'cnull':'N/A'}, {'cname':'term', 'cbyte':(bySepNr, 27), 'cnull':'N/A'}, {'cname':'j', 'cbyte':(bySepNr, 28), 'cnull':'N/A'}, {'cname':'l', 'cbyte':(bySepNr,29), 'cnull':'N/A'}, {'cname':'s', 'cbyte':(bySepNr,30), 'cnull':'N/A'}, {'cname':'p', 'cbyte':(bySepNr,31), 'cnull':'N/A'}, {'cname':'j1', 'cbyte':(bySepNr,32), 'cnull':'N/A'}, {'cname':'j2', 'cbyte':(bySepNr,33), 'cnull':'N/A'}, {'cname':'k', 'cbyte':(bySepNr,34), 'cnull':'N/A'}, {'cname':'s2', 'cbyte':(bySepNr,35), 'cnull':'N/A'}, {'cname':'jc', 'cbyte':(bySepNr,36), 'cnull':'N/A'}, # half-times {'cname':'tau_exp', 'cbyte':(bySepNr,37), 'cnull':'N/A'}, {'cname':'tau_calc', 'cbyte':(bySepNr,38), 'cnull':'N/A'}, {'cname':'tau_exp_ref', 'cbyte':(bySepNr,39), 'cnull':'N/A'}, {'cname':'tau_calc_ref', 'cbyte':(bySepNr,40), 'cnull':'N/A'}, {'cname':'energy_ref', 'cbyte':(bySepNr, 41), 'cnull':'N/A'}, {'cname':'lande_ref', 'cbyte':(bySepNr, 42), 'cnull':'N/A'}, {'cname':'level_ref', 'cbyte':(bySepNr, 43), 'cnull':'N/A'}, ] }, # end of lower states # Transition model, from the transitions file # (third pass) {'outfile':transitions_out, 'infiles':transitions_file, 'commentchar':'#', 'headlines':3, 'linemap':[ {'cname':'id', 'cbyte':(constant, 'NULL'), 'cnull':'NULL'}, # here we recreate the same ids we used for the upper/lower states before {'cname':'upstate', 'cbyte':(merge_cols_by_sep, 0, 4, 6, 5, 1), 'cnull':'N/A'}, {'cname':'lostate', 'cbyte':(merge_cols_by_sep, 22, 26, 28, 27, 23), 'cnull':'N/A'}, {'cname':'vacwave', 'cbyte':(bySepNr, 44), 'cnull':'N/A'}, {'cname':'species', # we pick this from the start of the line 'cbyte':(bySepNr, 0), 'cnull':'N/A'}, {'cname':'loggf', 'cbyte':(bySepNr, 45), 'cnull':'N/A'}, {'cname':'landeff', 'cbyte':(bySepNr, 46), 'cnull':'N/A'}, {'cname':'gammarad', 'cbyte':(bySepNr, 47), 'cnull':'N/A'}, {'cname':'gammastark', 'cbyte':(bySepNr, 48), 'cnull':'N/A'}, {'cname':'gammawaals', 'cbyte':(bySepNr, 49), 'cnull':'N/A'}, {'cname':'wave_accur', 'cbyte':(bySepNr, 50), 'cnull':'N/A'}, {'cname':'loggf_accur', 'cbyte':(bySepNr, 51), 'cnull':'N/A'}, {'cname':'comment', 'cbyte':(bySepNr, 52), 'cnull':'N/A'}, {'cname':'wave_ref', 'cbyte':(bySepNr, 53), 'cnull':'N/A'}, {'cname':'loggf_ref', 'cbyte':(bySepNr, 54), 'cnull':'N/A'}, {'cname':'lande_ref', 'cbyte':(bySepNr, 55), 'cnull':'N/A'}, {'cname':'gammarad_ref', 'cbyte':(bySepNr, 56), 'cnull':'N/A'}, {'cname':'gammastark_ref', 'cbyte':(bySepNr, 57), 'cnull':'N/A'}, {'cname':'waals_ref', 'cbyte':(bySepNr, 58), 'cnull':'N/A'}, ], }, # end of transitions file reading ]
gpl-3.0
pelya/commandergenius
project/jni/python/src/Lib/email/mime/multipart.py
480
1573
# Copyright (C) 2002-2006 Python Software Foundation # Author: Barry Warsaw # Contact: email-sig@python.org """Base class for MIME multipart/* type messages.""" __all__ = ['MIMEMultipart'] from email.mime.base import MIMEBase class MIMEMultipart(MIMEBase): """Base class for MIME multipart/* type messages.""" def __init__(self, _subtype='mixed', boundary=None, _subparts=None, **_params): """Creates a multipart/* type message. By default, creates a multipart/mixed message, with proper Content-Type and MIME-Version headers. _subtype is the subtype of the multipart content type, defaulting to `mixed'. boundary is the multipart boundary string. By default it is calculated as needed. _subparts is a sequence of initial subparts for the payload. It must be an iterable object, such as a list. You can always attach new subparts to the message by using the attach() method. Additional parameters for the Content-Type header are taken from the keyword arguments (or passed into the _params argument). """ MIMEBase.__init__(self, 'multipart', _subtype, **_params) # Initialise _payload to an empty list as the Message superclass's # implementation of is_multipart assumes that _payload is a list for # multipart messages. self._payload = [] if _subparts: for p in _subparts: self.attach(p) if boundary: self.set_boundary(boundary)
lgpl-2.1
tonyli71/designate
functionaltests/api/v2/test_zone_ownership_transfers.py
4
7725
""" Copyright 2015 Rackspace Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from tempest_lib import exceptions from functionaltests.common import datagen from functionaltests.common import utils from functionaltests.api.v2.base import DesignateV2Test from functionaltests.api.v2.clients.transfer_requests_client import \ TransferRequestClient from functionaltests.api.v2.clients.transfer_accepts_client import \ TransferAcceptClient from functionaltests.api.v2.clients.zone_client import ZoneClient from functionaltests.api.v2.fixtures import ZoneFixture from functionaltests.api.v2.fixtures import TransferRequestFixture @utils.parameterized_class class TransferZoneOwnerShipTest(DesignateV2Test): def setUp(self): super(TransferZoneOwnerShipTest, self).setUp() self.increase_quotas(user='default') self.increase_quotas(user='alt') self.zone = self.useFixture(ZoneFixture()).created_zone def test_list_transfer_requests(self): self.useFixture(TransferRequestFixture( zone=self.zone, post_model=datagen.random_transfer_request_data(), )) resp, model = TransferRequestClient.as_user('default') \ .list_transfer_requests() self.assertEqual(resp.status, 200) self.assertGreater(len(model.transfer_requests), 0) def test_create_zone_transfer_request(self): fixture = self.useFixture(TransferRequestFixture( zone=self.zone, post_model=datagen.random_transfer_request_data(), )) self.assertEqual(fixture.post_resp.status, 201) self.assertEqual(fixture.transfer_request.zone_id, self.zone.id) # todo: this fails. the zone_name is null in the POST's response, but # it's filled in on a subsequent get # self.assertEqual(fixture.transfer_request.zone_name, self.zone.name) self.assertEqual(fixture.transfer_request.project_id, TransferRequestClient.as_user(fixture.user).tenant_id) self.assertEqual(fixture.transfer_request.target_project_id, None) # check that the zone_name is filled in resp, transfer_request = TransferRequestClient.as_user(fixture.user) \ .get_transfer_request(fixture.transfer_request.id) self.assertEqual(transfer_request.zone_name, self.zone.name) def test_view_zone_transfer_request(self): fixture = self.useFixture(TransferRequestFixture( zone=self.zone, post_model=datagen.random_transfer_request_data(), )) resp, transfer_request = TransferRequestClient.as_user('alt')\ .get_transfer_request(fixture.transfer_request.id) self.assertEqual(resp.status, 200) self.assertEqual(getattr(transfer_request, 'key', None), None) def test_create_zone_transfer_request_scoped(self): target_project_id = TransferRequestClient.as_user('alt').tenant_id post_model = datagen.random_transfer_request_data( target_project_id=target_project_id) fixture = self.useFixture(TransferRequestFixture( zone=self.zone, post_model=post_model, user='default', target_user='alt', )) self.assertEqual(fixture.post_resp.status, 201) self.assertEqual(fixture.transfer_request.zone_id, self.zone.id) # todo: the zone_name is null initially, but shows up on later gets # self.assertEqual(fixture.transfer_request.zone_name, self.zone.name) self.assertEqual(fixture.transfer_request.project_id, TransferRequestClient.as_user(fixture.user).tenant_id) self.assertEqual(fixture.transfer_request.target_project_id, target_project_id) resp, transfer_request = TransferRequestClient.as_user('alt')\ .get_transfer_request(fixture.transfer_request.id) self.assertEqual(resp.status, 200) def test_view_zone_transfer_request_scoped(self): target_project_id = TransferRequestClient.as_user('admin').tenant_id post_model = datagen.random_transfer_request_data( target_project_id=target_project_id) fixture = self.useFixture(TransferRequestFixture( zone=self.zone, post_model=post_model, user='default', target_user='admin', )) transfer_request = fixture.transfer_request self.assertEqual(transfer_request.target_project_id, target_project_id) self._assert_exception( exceptions.NotFound, 'zone_transfer_request_not_found', 404, TransferRequestClient.as_user('alt').get_transfer_request, self.zone.id) resp, transfer_request = TransferRequestClient.as_user('admin')\ .get_transfer_request(transfer_request.id) self.assertEqual(resp.status, 200) def test_create_zone_transfer_request_no_body(self): client = TransferRequestClient.as_user('default') resp, transfer_request = client \ .post_transfer_request_empty_body(self.zone.id) self.assertEqual(resp.status, 201) self.addCleanup(TransferRequestFixture.cleanup_transfer_request, client, transfer_request.id) def test_do_zone_transfer(self): fixture = self.useFixture(TransferRequestFixture( zone=self.zone, post_model=datagen.random_transfer_request_data(), user='default', target_user='alt', )) transfer_request = fixture.transfer_request resp, transfer_accept = TransferAcceptClient.as_user('alt')\ .post_transfer_accept( datagen.random_transfer_accept_data( key=transfer_request.key, zone_transfer_request_id=transfer_request.id )) self.assertEqual(resp.status, 201) def test_do_zone_transfer_scoped(self): target_project_id = TransferRequestClient.as_user('alt').tenant_id post_model = datagen.random_transfer_request_data( target_project_id=target_project_id) fixture = self.useFixture(TransferRequestFixture( zone=self.zone, post_model=post_model, user='default', target_user='alt', )) transfer_request = fixture.transfer_request resp, retrived_transfer_request = TransferRequestClient.\ as_user('alt').get_transfer_request(transfer_request.id) self.assertEqual(resp.status, 200) resp, transfer_accept = TransferAcceptClient.as_user('alt')\ .post_transfer_accept( datagen.random_transfer_accept_data( key=transfer_request.key, zone_transfer_request_id=transfer_request.id )) self.assertEqual(resp.status, 201) client = ZoneClient.as_user('default') self._assert_exception( exceptions.NotFound, 'domain_not_found', 404, client.get_zone, self.zone.id) resp, zone = ZoneClient.as_user('alt').get_zone(self.zone.id) self.assertEqual(resp.status, 200)
apache-2.0
throwable-one/lettuce
tests/integration/lib/Django-1.2.5/django/contrib/sitemaps/views.py
61
2026
from django.http import HttpResponse, Http404 from django.template import loader from django.contrib.sites.models import get_current_site from django.core import urlresolvers from django.utils.encoding import smart_str from django.core.paginator import EmptyPage, PageNotAnInteger def index(request, sitemaps): current_site = get_current_site(request) sites = [] protocol = request.is_secure() and 'https' or 'http' for section, site in sitemaps.items(): site.request = request if callable(site): pages = site().paginator.num_pages else: pages = site.paginator.num_pages sitemap_url = urlresolvers.reverse('django.contrib.sitemaps.views.sitemap', kwargs={'section': section}) sites.append('%s://%s%s' % (protocol, current_site.domain, sitemap_url)) if pages > 1: for page in range(2, pages+1): sites.append('%s://%s%s?p=%s' % (protocol, current_site.domain, sitemap_url, page)) xml = loader.render_to_string('sitemap_index.xml', {'sitemaps': sites}) return HttpResponse(xml, mimetype='application/xml') def sitemap(request, sitemaps, section=None): maps, urls = [], [] if section is not None: if section not in sitemaps: raise Http404("No sitemap available for section: %r" % section) maps.append(sitemaps[section]) else: maps = sitemaps.values() page = request.GET.get("p", 1) current_site = get_current_site(request) for site in maps: try: if callable(site): urls.extend(site().get_urls(page=page, site=current_site)) else: urls.extend(site.get_urls(page=page, site=current_site)) except EmptyPage: raise Http404("Page %s empty" % page) except PageNotAnInteger: raise Http404("No page '%s'" % page) xml = smart_str(loader.render_to_string('sitemap.xml', {'urlset': urls})) return HttpResponse(xml, mimetype='application/xml')
gpl-3.0
behzadnouri/scipy
scipy/io/matlab/tests/test_miobase.py
110
1338
""" Testing miobase module """ import numpy as np from numpy.testing import assert_raises, assert_equal from scipy.io.matlab.miobase import matdims def test_matdims(): # Test matdims dimension finder assert_equal(matdims(np.array(1)), (1, 1)) # numpy scalar assert_equal(matdims(np.array([1])), (1, 1)) # 1d array, 1 element assert_equal(matdims(np.array([1,2])), (2, 1)) # 1d array, 2 elements assert_equal(matdims(np.array([[2],[3]])), (2, 1)) # 2d array, column vector assert_equal(matdims(np.array([[2,3]])), (1, 2)) # 2d array, row vector # 3d array, rowish vector assert_equal(matdims(np.array([[[2,3]]])), (1, 1, 2)) assert_equal(matdims(np.array([])), (0, 0)) # empty 1d array assert_equal(matdims(np.array([[]])), (0, 0)) # empty 2d assert_equal(matdims(np.array([[[]]])), (0, 0, 0)) # empty 3d # Optional argument flips 1-D shape behavior. assert_equal(matdims(np.array([1,2]), 'row'), (1, 2)) # 1d array, 2 elements # The argument has to make sense though assert_raises(ValueError, matdims, np.array([1,2]), 'bizarre') # Check empty sparse matrices get their own shape from scipy.sparse import csr_matrix, csc_matrix assert_equal(matdims(csr_matrix(np.zeros((3, 3)))), (3, 3)) assert_equal(matdims(csc_matrix(np.zeros((2, 2)))), (2, 2))
bsd-3-clause
akionakamura/scikit-learn
examples/svm/plot_svm_margin.py
318
2328
#!/usr/bin/python # -*- coding: utf-8 -*- """ ========================================================= SVM Margins Example ========================================================= The plots below illustrate the effect the parameter `C` has on the separation line. A large value of `C` basically tells our model that we do not have that much faith in our data's distribution, and will only consider points close to line of separation. A small value of `C` includes more/all the observations, allowing the margins to be calculated using all the data in the area. """ print(__doc__) # Code source: Gaël Varoquaux # Modified for documentation by Jaques Grobler # License: BSD 3 clause import numpy as np import matplotlib.pyplot as plt from sklearn import svm # we create 40 separable points np.random.seed(0) X = np.r_[np.random.randn(20, 2) - [2, 2], np.random.randn(20, 2) + [2, 2]] Y = [0] * 20 + [1] * 20 # figure number fignum = 1 # fit the model for name, penalty in (('unreg', 1), ('reg', 0.05)): clf = svm.SVC(kernel='linear', C=penalty) clf.fit(X, Y) # get the separating hyperplane w = clf.coef_[0] a = -w[0] / w[1] xx = np.linspace(-5, 5) yy = a * xx - (clf.intercept_[0]) / w[1] # plot the parallels to the separating hyperplane that pass through the # support vectors margin = 1 / np.sqrt(np.sum(clf.coef_ ** 2)) yy_down = yy + a * margin yy_up = yy - a * margin # plot the line, the points, and the nearest vectors to the plane plt.figure(fignum, figsize=(4, 3)) plt.clf() plt.plot(xx, yy, 'k-') plt.plot(xx, yy_down, 'k--') plt.plot(xx, yy_up, 'k--') plt.scatter(clf.support_vectors_[:, 0], clf.support_vectors_[:, 1], s=80, facecolors='none', zorder=10) plt.scatter(X[:, 0], X[:, 1], c=Y, zorder=10, cmap=plt.cm.Paired) plt.axis('tight') x_min = -4.8 x_max = 4.2 y_min = -6 y_max = 6 XX, YY = np.mgrid[x_min:x_max:200j, y_min:y_max:200j] Z = clf.predict(np.c_[XX.ravel(), YY.ravel()]) # Put the result into a color plot Z = Z.reshape(XX.shape) plt.figure(fignum, figsize=(4, 3)) plt.pcolormesh(XX, YY, Z, cmap=plt.cm.Paired) plt.xlim(x_min, x_max) plt.ylim(y_min, y_max) plt.xticks(()) plt.yticks(()) fignum = fignum + 1 plt.show()
bsd-3-clause
protwis/protwis
contactnetwork/management/commands/build_distance_representative.py
1
2958
from django.core.management.base import BaseCommand, CommandError from django.core.management import call_command from django.conf import settings from django.db import connection from django.db.models import Q, F from contactnetwork.distances import * from protein.models import ProteinFamily import time import scipy class Command(BaseCommand): help = "Build distance representatives" def handle(self, *args, **options): self.receptor_representatives() def receptor_representatives(self): print('Script to decide distance representative for a state/receptor combination. Lowest average distance to all other structures for the same receptor/state') structures = Structure.objects.all().prefetch_related( "pdb_code", "state", "protein_conformation__protein__parent__family") distinct_proteins = {} resolution_lookup = {} for s in structures: pdb = s.pdb_code.index resolution_lookup[pdb] = s.resolution state = s.state.slug slug = s.protein_conformation.protein.parent.family.slug name = s.protein_conformation.protein.parent.family.name key = '{}_{}'.format(name,state) if key not in distinct_proteins: distinct_proteins[key] = [] distinct_proteins[key].append(pdb) for conformation, pdbs in distinct_proteins.items(): print(conformation, "PDBS:",pdbs) number_of_pdbs = len(pdbs) if (number_of_pdbs==1): # Do not care when only one PDB for a conformation rep print("REPRESENTATIVE:", pdbs[0]) s = Structure.objects.get(pdb_code__index=pdbs[0]) s.distance_representative = True s.save() else: # Distances dis = Distances() dis.load_pdbs(pdbs) distance_matrix = dis.get_distance_matrix() # Calculate structures with lowest average distance (rank-by-vote fusion) ranking = np.zeros(len(distance_matrix)) average = np.zeros(len(distance_matrix)) for i in range(0,len(distance_matrix)): ranking = ranking + scipy.stats.rankdata(distance_matrix[i,:], method='min') average = average + distance_matrix[i,:] # check if single minimum lowest = np.where(ranking==min(ranking))[0] if len(lowest)>1: lowest = lowest[np.where(average[lowest]==min(average))[0][0]] for i in range(0,len(distance_matrix)): if i==lowest: print("REPRESENTATIVE:",pdbs[i]) s = Structure.objects.get(pdb_code__index=pdbs[i]) s.distance_representative = (i==lowest) s.save()
apache-2.0
liamgh/liamgreenhughes-sl4a-tf101
python/gdata/src/gdata/Crypto/Util/test.py
228
18297
# # test.py : Functions used for testing the modules # # Part of the Python Cryptography Toolkit # # Distribute and use freely; there are no restrictions on further # dissemination and usage except those imposed by the laws of your # country of residence. This software is provided "as is" without # warranty of fitness for use or suitability for any purpose, express # or implied. Use at your own risk or not at all. # __revision__ = "$Id: test.py,v 1.16 2004/08/13 22:24:18 akuchling Exp $" import binascii import string import testdata from Crypto.Cipher import * def die(string): import sys print '***ERROR: ', string # sys.exit(0) # Will default to continuing onward... def print_timing (size, delta, verbose): if verbose: if delta == 0: print 'Unable to measure time -- elapsed time too small' else: print '%.2f K/sec' % (size/delta) def exerciseBlockCipher(cipher, verbose): import string, time try: ciph = eval(cipher) except NameError: print cipher, 'module not available' return None print cipher+ ':' str='1' # Build 128K of test data for i in xrange(0, 17): str=str+str if ciph.key_size==0: ciph.key_size=16 password = 'password12345678Extra text for password'[0:ciph.key_size] IV = 'Test IV Test IV Test IV Test'[0:ciph.block_size] if verbose: print ' ECB mode:', obj=ciph.new(password, ciph.MODE_ECB) if obj.block_size != ciph.block_size: die("Module and cipher object block_size don't match") text='1234567812345678'[0:ciph.block_size] c=obj.encrypt(text) if (obj.decrypt(c)!=text): die('Error encrypting "'+text+'"') text='KuchlingKuchling'[0:ciph.block_size] c=obj.encrypt(text) if (obj.decrypt(c)!=text): die('Error encrypting "'+text+'"') text='NotTodayNotEver!'[0:ciph.block_size] c=obj.encrypt(text) if (obj.decrypt(c)!=text): die('Error encrypting "'+text+'"') start=time.time() s=obj.encrypt(str) s2=obj.decrypt(s) end=time.time() if (str!=s2): die('Error in resulting plaintext from ECB mode') print_timing(256, end-start, verbose) del obj if verbose: print ' CFB mode:', obj1=ciph.new(password, ciph.MODE_CFB, IV) obj2=ciph.new(password, ciph.MODE_CFB, IV) start=time.time() ciphertext=obj1.encrypt(str[0:65536]) plaintext=obj2.decrypt(ciphertext) end=time.time() if (plaintext!=str[0:65536]): die('Error in resulting plaintext from CFB mode') print_timing(64, end-start, verbose) del obj1, obj2 if verbose: print ' CBC mode:', obj1=ciph.new(password, ciph.MODE_CBC, IV) obj2=ciph.new(password, ciph.MODE_CBC, IV) start=time.time() ciphertext=obj1.encrypt(str) plaintext=obj2.decrypt(ciphertext) end=time.time() if (plaintext!=str): die('Error in resulting plaintext from CBC mode') print_timing(256, end-start, verbose) del obj1, obj2 if verbose: print ' PGP mode:', obj1=ciph.new(password, ciph.MODE_PGP, IV) obj2=ciph.new(password, ciph.MODE_PGP, IV) start=time.time() ciphertext=obj1.encrypt(str) plaintext=obj2.decrypt(ciphertext) end=time.time() if (plaintext!=str): die('Error in resulting plaintext from PGP mode') print_timing(256, end-start, verbose) del obj1, obj2 if verbose: print ' OFB mode:', obj1=ciph.new(password, ciph.MODE_OFB, IV) obj2=ciph.new(password, ciph.MODE_OFB, IV) start=time.time() ciphertext=obj1.encrypt(str) plaintext=obj2.decrypt(ciphertext) end=time.time() if (plaintext!=str): die('Error in resulting plaintext from OFB mode') print_timing(256, end-start, verbose) del obj1, obj2 def counter(length=ciph.block_size): return length * 'a' if verbose: print ' CTR mode:', obj1=ciph.new(password, ciph.MODE_CTR, counter=counter) obj2=ciph.new(password, ciph.MODE_CTR, counter=counter) start=time.time() ciphertext=obj1.encrypt(str) plaintext=obj2.decrypt(ciphertext) end=time.time() if (plaintext!=str): die('Error in resulting plaintext from CTR mode') print_timing(256, end-start, verbose) del obj1, obj2 # Test the IV handling if verbose: print ' Testing IV handling' obj1=ciph.new(password, ciph.MODE_CBC, IV) plaintext='Test'*(ciph.block_size/4)*3 ciphertext1=obj1.encrypt(plaintext) obj1.IV=IV ciphertext2=obj1.encrypt(plaintext) if ciphertext1!=ciphertext2: die('Error in setting IV') # Test keyword arguments obj1=ciph.new(key=password) obj1=ciph.new(password, mode=ciph.MODE_CBC) obj1=ciph.new(mode=ciph.MODE_CBC, key=password) obj1=ciph.new(IV=IV, mode=ciph.MODE_CBC, key=password) return ciph def exerciseStreamCipher(cipher, verbose): import string, time try: ciph = eval(cipher) except (NameError): print cipher, 'module not available' return None print cipher + ':', str='1' # Build 128K of test data for i in xrange(0, 17): str=str+str key_size = ciph.key_size or 16 password = 'password12345678Extra text for password'[0:key_size] obj1=ciph.new(password) obj2=ciph.new(password) if obj1.block_size != ciph.block_size: die("Module and cipher object block_size don't match") if obj1.key_size != ciph.key_size: die("Module and cipher object key_size don't match") text='1234567812345678Python' c=obj1.encrypt(text) if (obj2.decrypt(c)!=text): die('Error encrypting "'+text+'"') text='B1FF I2 A R3A11Y |<00L D00D!!!!!' c=obj1.encrypt(text) if (obj2.decrypt(c)!=text): die('Error encrypting "'+text+'"') text='SpamSpamSpamSpamSpamSpamSpamSpamSpam' c=obj1.encrypt(text) if (obj2.decrypt(c)!=text): die('Error encrypting "'+text+'"') start=time.time() s=obj1.encrypt(str) str=obj2.decrypt(s) end=time.time() print_timing(256, end-start, verbose) del obj1, obj2 return ciph def TestStreamModules(args=['arc4', 'XOR'], verbose=1): import sys, string args=map(string.lower, args) if 'arc4' in args: # Test ARC4 stream cipher arc4=exerciseStreamCipher('ARC4', verbose) if (arc4!=None): for entry in testdata.arc4: key,plain,cipher=entry key=binascii.a2b_hex(key) plain=binascii.a2b_hex(plain) cipher=binascii.a2b_hex(cipher) obj=arc4.new(key) ciphertext=obj.encrypt(plain) if (ciphertext!=cipher): die('ARC4 failed on entry '+`entry`) if 'xor' in args: # Test XOR stream cipher XOR=exerciseStreamCipher('XOR', verbose) if (XOR!=None): for entry in testdata.xor: key,plain,cipher=entry key=binascii.a2b_hex(key) plain=binascii.a2b_hex(plain) cipher=binascii.a2b_hex(cipher) obj=XOR.new(key) ciphertext=obj.encrypt(plain) if (ciphertext!=cipher): die('XOR failed on entry '+`entry`) def TestBlockModules(args=['aes', 'arc2', 'des', 'blowfish', 'cast', 'des3', 'idea', 'rc5'], verbose=1): import string args=map(string.lower, args) if 'aes' in args: ciph=exerciseBlockCipher('AES', verbose) # AES if (ciph!=None): if verbose: print ' Verifying against test suite...' for entry in testdata.aes: key,plain,cipher=entry key=binascii.a2b_hex(key) plain=binascii.a2b_hex(plain) cipher=binascii.a2b_hex(cipher) obj=ciph.new(key, ciph.MODE_ECB) ciphertext=obj.encrypt(plain) if (ciphertext!=cipher): die('AES failed on entry '+`entry`) for i in ciphertext: if verbose: print hex(ord(i)), if verbose: print for entry in testdata.aes_modes: mode, key, plain, cipher, kw = entry key=binascii.a2b_hex(key) plain=binascii.a2b_hex(plain) cipher=binascii.a2b_hex(cipher) obj=ciph.new(key, mode, **kw) obj2=ciph.new(key, mode, **kw) ciphertext=obj.encrypt(plain) if (ciphertext!=cipher): die('AES encrypt failed on entry '+`entry`) for i in ciphertext: if verbose: print hex(ord(i)), if verbose: print plain2=obj2.decrypt(ciphertext) if plain2!=plain: die('AES decrypt failed on entry '+`entry`) for i in plain2: if verbose: print hex(ord(i)), if verbose: print if 'arc2' in args: ciph=exerciseBlockCipher('ARC2', verbose) # Alleged RC2 if (ciph!=None): if verbose: print ' Verifying against test suite...' for entry in testdata.arc2: key,plain,cipher=entry key=binascii.a2b_hex(key) plain=binascii.a2b_hex(plain) cipher=binascii.a2b_hex(cipher) obj=ciph.new(key, ciph.MODE_ECB) ciphertext=obj.encrypt(plain) if (ciphertext!=cipher): die('ARC2 failed on entry '+`entry`) for i in ciphertext: if verbose: print hex(ord(i)), print if 'blowfish' in args: ciph=exerciseBlockCipher('Blowfish',verbose)# Bruce Schneier's Blowfish cipher if (ciph!=None): if verbose: print ' Verifying against test suite...' for entry in testdata.blowfish: key,plain,cipher=entry key=binascii.a2b_hex(key) plain=binascii.a2b_hex(plain) cipher=binascii.a2b_hex(cipher) obj=ciph.new(key, ciph.MODE_ECB) ciphertext=obj.encrypt(plain) if (ciphertext!=cipher): die('Blowfish failed on entry '+`entry`) for i in ciphertext: if verbose: print hex(ord(i)), if verbose: print if 'cast' in args: ciph=exerciseBlockCipher('CAST', verbose) # CAST-128 if (ciph!=None): if verbose: print ' Verifying against test suite...' for entry in testdata.cast: key,plain,cipher=entry key=binascii.a2b_hex(key) plain=binascii.a2b_hex(plain) cipher=binascii.a2b_hex(cipher) obj=ciph.new(key, ciph.MODE_ECB) ciphertext=obj.encrypt(plain) if (ciphertext!=cipher): die('CAST failed on entry '+`entry`) for i in ciphertext: if verbose: print hex(ord(i)), if verbose: print if 0: # The full-maintenance test; it requires 4 million encryptions, # and correspondingly is quite time-consuming. I've disabled # it; it's faster to compile block/cast.c with -DTEST and run # the resulting program. a = b = '\x01\x23\x45\x67\x12\x34\x56\x78\x23\x45\x67\x89\x34\x56\x78\x9A' for i in range(0, 1000000): obj = cast.new(b, cast.MODE_ECB) a = obj.encrypt(a[:8]) + obj.encrypt(a[-8:]) obj = cast.new(a, cast.MODE_ECB) b = obj.encrypt(b[:8]) + obj.encrypt(b[-8:]) if a!="\xEE\xA9\xD0\xA2\x49\xFD\x3B\xA6\xB3\x43\x6F\xB8\x9D\x6D\xCA\x92": if verbose: print 'CAST test failed: value of "a" doesn\'t match' if b!="\xB2\xC9\x5E\xB0\x0C\x31\xAD\x71\x80\xAC\x05\xB8\xE8\x3D\x69\x6E": if verbose: print 'CAST test failed: value of "b" doesn\'t match' if 'des' in args: # Test/benchmark DES block cipher des=exerciseBlockCipher('DES', verbose) if (des!=None): # Various tests taken from the DES library packaged with Kerberos V4 obj=des.new(binascii.a2b_hex('0123456789abcdef'), des.MODE_ECB) s=obj.encrypt('Now is t') if (s!=binascii.a2b_hex('3fa40e8a984d4815')): die('DES fails test 1') obj=des.new(binascii.a2b_hex('08192a3b4c5d6e7f'), des.MODE_ECB) s=obj.encrypt('\000\000\000\000\000\000\000\000') if (s!=binascii.a2b_hex('25ddac3e96176467')): die('DES fails test 2') obj=des.new(binascii.a2b_hex('0123456789abcdef'), des.MODE_CBC, binascii.a2b_hex('1234567890abcdef')) s=obj.encrypt("Now is the time for all ") if (s!=binascii.a2b_hex('e5c7cdde872bf27c43e934008c389c0f683788499a7c05f6')): die('DES fails test 3') obj=des.new(binascii.a2b_hex('0123456789abcdef'), des.MODE_CBC, binascii.a2b_hex('fedcba9876543210')) s=obj.encrypt("7654321 Now is the time for \000\000\000\000") if (s!=binascii.a2b_hex("ccd173ffab2039f4acd8aefddfd8a1eb468e91157888ba681d269397f7fe62b4")): die('DES fails test 4') del obj,s # R. Rivest's test: see http://theory.lcs.mit.edu/~rivest/destest.txt x=binascii.a2b_hex('9474B8E8C73BCA7D') for i in range(0, 16): obj=des.new(x, des.MODE_ECB) if (i & 1): x=obj.decrypt(x) else: x=obj.encrypt(x) if x!=binascii.a2b_hex('1B1A2DDB4C642438'): die("DES fails Rivest's test") if verbose: print ' Verifying against test suite...' for entry in testdata.des: key,plain,cipher=entry key=binascii.a2b_hex(key) plain=binascii.a2b_hex(plain) cipher=binascii.a2b_hex(cipher) obj=des.new(key, des.MODE_ECB) ciphertext=obj.encrypt(plain) if (ciphertext!=cipher): die('DES failed on entry '+`entry`) for entry in testdata.des_cbc: key, iv, plain, cipher=entry key, iv, cipher=binascii.a2b_hex(key),binascii.a2b_hex(iv),binascii.a2b_hex(cipher) obj1=des.new(key, des.MODE_CBC, iv) obj2=des.new(key, des.MODE_CBC, iv) ciphertext=obj1.encrypt(plain) if (ciphertext!=cipher): die('DES CBC mode failed on entry '+`entry`) if 'des3' in args: ciph=exerciseBlockCipher('DES3', verbose) # Triple DES if (ciph!=None): if verbose: print ' Verifying against test suite...' for entry in testdata.des3: key,plain,cipher=entry key=binascii.a2b_hex(key) plain=binascii.a2b_hex(plain) cipher=binascii.a2b_hex(cipher) obj=ciph.new(key, ciph.MODE_ECB) ciphertext=obj.encrypt(plain) if (ciphertext!=cipher): die('DES3 failed on entry '+`entry`) for i in ciphertext: if verbose: print hex(ord(i)), if verbose: print for entry in testdata.des3_cbc: key, iv, plain, cipher=entry key, iv, cipher=binascii.a2b_hex(key),binascii.a2b_hex(iv),binascii.a2b_hex(cipher) obj1=ciph.new(key, ciph.MODE_CBC, iv) obj2=ciph.new(key, ciph.MODE_CBC, iv) ciphertext=obj1.encrypt(plain) if (ciphertext!=cipher): die('DES3 CBC mode failed on entry '+`entry`) if 'idea' in args: ciph=exerciseBlockCipher('IDEA', verbose) # IDEA block cipher if (ciph!=None): if verbose: print ' Verifying against test suite...' for entry in testdata.idea: key,plain,cipher=entry key=binascii.a2b_hex(key) plain=binascii.a2b_hex(plain) cipher=binascii.a2b_hex(cipher) obj=ciph.new(key, ciph.MODE_ECB) ciphertext=obj.encrypt(plain) if (ciphertext!=cipher): die('IDEA failed on entry '+`entry`) if 'rc5' in args: # Ronald Rivest's RC5 algorithm ciph=exerciseBlockCipher('RC5', verbose) if (ciph!=None): if verbose: print ' Verifying against test suite...' for entry in testdata.rc5: key,plain,cipher=entry key=binascii.a2b_hex(key) plain=binascii.a2b_hex(plain) cipher=binascii.a2b_hex(cipher) obj=ciph.new(key[4:], ciph.MODE_ECB, version =ord(key[0]), word_size=ord(key[1]), rounds =ord(key[2]) ) ciphertext=obj.encrypt(plain) if (ciphertext!=cipher): die('RC5 failed on entry '+`entry`) for i in ciphertext: if verbose: print hex(ord(i)), if verbose: print
apache-2.0
HalcyonChimera/osf.io
admin/collection_providers/forms.py
11
9406
import bleach import json from django import forms from osf.models import CollectionProvider, CollectionSubmission from admin.base.utils import get_nodelicense_choices, get_defaultlicense_choices, validate_slug class CollectionProviderForm(forms.ModelForm): collected_type_choices = forms.CharField(widget=forms.HiddenInput(), required=False) status_choices = forms.CharField(widget=forms.HiddenInput(), required=False) volume_choices = forms.CharField(widget=forms.HiddenInput(), required=False) issue_choices = forms.CharField(widget=forms.HiddenInput(), required=False) program_area_choices = forms.CharField(widget=forms.HiddenInput(), required=False) _id = forms.SlugField( required=True, help_text='URL Slug', validators=[validate_slug] ) class Meta: model = CollectionProvider exclude = ['primary_identifier_name', 'primary_collection', 'type', 'allow_commenting', 'advisory_board', 'example', 'domain', 'domain_redirect_enabled', 'reviews_comments_anonymous', 'reviews_comments_private', 'reviews_workflow'] widgets = { 'licenses_acceptable': forms.CheckboxSelectMultiple(), } def __init__(self, *args, **kwargs): nodelicense_choices = get_nodelicense_choices() defaultlicense_choices = get_defaultlicense_choices() super(CollectionProviderForm, self).__init__(*args, **kwargs) self.fields['licenses_acceptable'].choices = nodelicense_choices self.fields['default_license'].choices = defaultlicense_choices def clean_description(self, *args, **kwargs): if not self.data.get('description'): return u'' return bleach.clean( self.data.get('description'), tags=['a', 'br', 'em', 'p', 'span', 'strong'], attributes=['class', 'style', 'href', 'title', 'target'], styles=['text-align', 'vertical-align'], strip=True ) def clean_footer_links(self, *args, **kwargs): if not self.data.get('footer_links'): return u'' return bleach.clean( self.data.get('footer_links'), tags=['a', 'br', 'div', 'em', 'p', 'span', 'strong'], attributes=['class', 'style', 'href', 'title', 'target'], styles=['text-align', 'vertical-align'], strip=True ) def clean_collected_type_choices(self): collection_provider = self.instance # if this is to modify an existing CollectionProvider if collection_provider.primary_collection: type_choices_old = set([c.strip(' ') for c in collection_provider.primary_collection.collected_type_choices]) type_choices_new = set([c.strip(' ') for c in json.loads(self.data.get('collected_type_choices'))]) type_choices_added = type_choices_new - type_choices_old type_choices_removed = type_choices_old - type_choices_new for item in type_choices_removed: if CollectionSubmission.objects.filter(collection=collection_provider.primary_collection, collected_type=item).exists(): raise forms.ValidationError( 'Cannot delete "{}" because it is used as metadata on objects.'.format(item) ) else: # if this is creating a CollectionProvider type_choices_added = [] type_choices_removed = [] choices = self.data.get('collected_type_choices') if choices: type_choices_added = json.loads(choices) return { 'added': type_choices_added, 'removed': type_choices_removed, } def clean_status_choices(self): collection_provider = self.instance # if this is to modify an existing CollectionProvider if collection_provider.primary_collection: status_choices_old = set([c.strip(' ') for c in collection_provider.primary_collection.status_choices]) status_choices_new = set([c.strip(' ') for c in json.loads(self.data.get('status_choices'))]) status_choices_added = status_choices_new - status_choices_old status_choices_removed = status_choices_old - status_choices_new for item in status_choices_removed: if CollectionSubmission.objects.filter(collection=collection_provider.primary_collection, status=item).exists(): raise forms.ValidationError( 'Cannot delete "{}" because it is used as metadata on objects.'.format(item) ) else: # if this is creating a CollectionProvider status_choices_added = [] status_choices_removed = [] choices = self.data.get('status_choices') if choices: status_choices_added = json.loads(choices) return { 'added': status_choices_added, 'removed': status_choices_removed, } def clean_volume_choices(self): collection_provider = self.instance # if this is to modify an existing CollectionProvider if collection_provider.primary_collection: volume_choices_old = set([c.strip(' ') for c in collection_provider.primary_collection.volume_choices]) volume_choices_new = set([c.strip(' ') for c in json.loads(self.data.get('volume_choices'))]) volume_choices_added = volume_choices_new - volume_choices_old volume_choices_removed = volume_choices_old - volume_choices_new for item in volume_choices_removed: if CollectionSubmission.objects.filter(collection=collection_provider.primary_collection, volume=item).exists(): raise forms.ValidationError( 'Cannot delete "{}" because it is used as metadata on objects.'.format(item) ) else: # if this is creating a CollectionProvider volume_choices_added = [] volume_choices_removed = [] choices = self.data.get('volume_choices') if choices: volume_choices_added = json.loads(choices) return { 'added': volume_choices_added, 'removed': volume_choices_removed, } def clean_issue_choices(self): collection_provider = self.instance # if this is to modify an existing CollectionProvider if collection_provider.primary_collection: issue_choices_old = set([c.strip(' ') for c in collection_provider.primary_collection.issue_choices]) issue_choices_new = set([c.strip(' ') for c in json.loads(self.data.get('issue_choices'))]) issue_choices_added = issue_choices_new - issue_choices_old issue_choices_removed = issue_choices_old - issue_choices_new for item in issue_choices_removed: if CollectionSubmission.objects.filter(collection=collection_provider.primary_collection, issue=item).exists(): raise forms.ValidationError( 'Cannot delete "{}" because it is used as metadata on objects.'.format(item) ) else: # if this is creating a CollectionProvider issue_choices_added = [] issue_choices_removed = [] choices = self.data.get('issue_choices') if choices: issue_choices_added = json.loads(choices) return { 'added': issue_choices_added, 'removed': issue_choices_removed, } def clean_program_area_choices(self): collection_provider = self.instance # if this is to modify an existing CollectionProvider if collection_provider.primary_collection: program_area_choices_old = set([c.strip(' ') for c in collection_provider.primary_collection.program_area_choices]) program_area_choices_new = set([c.strip(' ') for c in json.loads(self.data.get('program_area_choices'))]) program_area_choices_added = program_area_choices_new - program_area_choices_old program_area_choices_removed = program_area_choices_old - program_area_choices_new for item in program_area_choices_removed: if CollectionSubmission.objects.filter(collection=collection_provider.primary_collection, program_area=item).exists(): raise forms.ValidationError( 'Cannot delete "{}" because it is used as metadata on objects.'.format(item) ) else: # if this is creating a CollectionProvider program_area_choices_added = [] program_area_choices_removed = [] choices = self.data.get('program_area_choices') if choices: program_area_choices_added = json.loads(choices) return { 'added': program_area_choices_added, 'removed': program_area_choices_removed, }
apache-2.0
flberger/planes
examples/gui_widgets.py
1
7228
#!/usr/bin/python3 """planes GUI Widgets Demo Copyright (c) 2014 Florian Berger <mail@florian-berger.de> """ # This file is part of planes. # # planes is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # planes is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with planes. If not, see <http://www.gnu.org/licenses/>. # work started on 11. October 2014 import sys # Add current and parent directory. One of them is supposed to contain the # planes package. # sys.path.append("../") sys.path.append("./") import pygame import planes.gui import planes.gui.lmr import planes.gui.tmb import traceback def add_Label(parent_plane, x, y): """Add a Label. """ parent_plane.sub(planes.gui.Label(parent_plane.random_name(), "Label", pygame.Rect((x, y), (100, 50)))) return def add_OutlinedText(parent_plane, x, y): """Add an OutlinedText. """ outlined_text = planes.gui.OutlinedText(parent_plane.random_name(), "OutlinedText") outlined_text.rect.left = x outlined_text.rect.top = y parent_plane.sub(outlined_text) return def add_Button(parent_plane, x, y): """Add a Button. """ parent_plane.sub(planes.gui.Button("Button", pygame.Rect((x, y), (100, 50)), None)) return def add_TextBox(parent_plane, x, y): """Add a TextBox. """ textbox = planes.gui.TextBox(parent_plane.random_name(), pygame.Rect((x, y), (100, 50))) textbox.text = "TextBox" def get_focus(plane): # NOTE: Assuming parent_plane is the display # parent_plane.key_sensitive(textbox) return textbox.left_click_callback = get_focus parent_plane.sub(textbox) return # TODO: #def add_Container(parent_plane, x, y): # """Add a Container. # """ # # return # TODO: #def add_GetStringDialog(parent_plane, x, y): # """Add a GetStringDialog. # """ # # return # TODO: #def add_FadingContainer(parent_plane, x, y): # """Add a FadingContainer. # """ # # return def add_OptionList(parent_plane, x, y): """Add an OptionList. """ option_list = planes.gui.OptionList(parent_plane.random_name(), ["First option", "Second option", "Third option"]) option_list.rect.left = x option_list.rect.top = y parent_plane.sub(option_list) return # TODO: #def add_OptionSelector(parent_plane, x, y): # """Add an OptionSelector. # """ # # return # TODO: #def add_OkBox(parent_plane, x, y): # """Add an OkBox. # """ # # return # TODO: #def add_ScrollingPlane(parent_plane, x, y): # """Add a ScrollingPlane. # """ # # return def add_PlusMinusBox(parent_plane, x, y): """Add a PlusMinusBox. """ plus_minus_box = planes.gui.PlusMinusBox(parent_plane.random_name(), 3) plus_minus_box.rect.left = x plus_minus_box.rect.top = y def get_focus(plane): # NOTE: Assuming parent_plane is the display # parent_plane.key_sensitive(plus_minus_box.textbox) return plus_minus_box.textbox.left_click_callback = get_focus parent_plane.sub(plus_minus_box) return def add_ProgressBar(parent_plane, x, y): """Add a ProgressBar. """ parent_plane.sub(planes.gui.ProgressBar(parent_plane.random_name(), pygame.Rect((x, y), (100, 50)), 42, text = "ProgressBar")) return # TODO: #def add_TMBContainer(parent_plane, x, y): # """ Add a TMBContainer. # """ # # return # TODO: #def add_TMBOkBox(parent_plane, x, y): # """ Add a TMBOkBox. # """ # # return # TODO: #def add_TMBOptionSelector(parent_plane, x, y): # """ Add a TMBOptionSelector. # """ # # return # TODO: #def add_TMBGetStringDialog(parent_plane, x, y): # """ Add a TMBGetStringDialog. # """ # # return # TODO: #def add_TMBFadingContainer(parent_plane, x, y): # """ Add a TMBFadingContainer. # """ # # return def add_LMRButton(parent_plane, x, y): """ Add an LMRButton. """ lmr_button = planes.gui.lmr.LMRButton("LMRButton", 100, None) lmr_button.rect.left = x lmr_button.rect.top = y parent_plane.sub(lmr_button) return def add_LMROptionList(parent_plane, x, y): """ Add an LMROptionList. """ lmr_option_list = planes.gui.lmr.LMROptionList(parent_plane.random_name(), ["First option", "Second Option", "Third Option"], 200) lmr_option_list.rect.left = x lmr_option_list.rect.top = y parent_plane.sub(lmr_option_list) return def add_LMRPlusMinusBox(parent_plane, x, y): """ Add a LMRPlusMinusBox. """ lmr_plus_minus_box = planes.gui.lmr.LMRPlusMinusBox(parent_plane.random_name(), 3) lmr_plus_minus_box.rect.left = x lmr_plus_minus_box.rect.top = y def get_focus(plane): # NOTE: Assuming parent_plane is the display # parent_plane.key_sensitive(lmr_plus_minus_box.textbox) return lmr_plus_minus_box.textbox.left_click_callback = get_focus parent_plane.sub(lmr_plus_minus_box) return def mainloop(display, framerate): """Runs a pygame / planes main loop. framerate is the framerate. This must be run in the main thread, otherwise pygame.event will not receive any events under MS Windows. """ print("about to start main loop") clock = pygame.time.Clock() while True: events = pygame.event.get() for event in events: if event.type == pygame.QUIT: print("got pygame.QUIT, terminating in mainloop()") pygame.quit() raise SystemExit display.process(events) display.update() display.render() pygame.display.flip() # Slow down to framerate given # clock.tick(framerate) return def main(): """Main method. """ window = planes.Display((800, 600)) window.image.fill((127, 127, 127)) pygame.display.set_caption("planes Widgets Demo") widget_creators = [add_Label, add_OutlinedText, add_Button, add_TextBox, add_OptionList, add_PlusMinusBox, add_ProgressBar, add_LMRButton, add_LMROptionList, add_LMRPlusMinusBox] x = 10 y = 10 for creator_function in widget_creators: creator_function(window, x, y) x += 220 if x > 700: x = 10 y += 150 print("starting main loop in main thread") try: mainloop(window, 60) except: print("Exception in mainloop():") print(traceback.format_exc()) pygame.quit() raise SystemExit return if __name__ == "__main__": main()
gpl-3.0
dmitry-sobolev/ansible
lib/ansible/modules/network/a10/a10_virtual_server.py
17
11709
#!/usr/bin/python # -*- coding: utf-8 -*- """ Ansible module to manage A10 Networks slb virtual server objects (c) 2014, Mischa Peters <mpeters@a10networks.com>, Eric Chou <ericc@a10networks.com> This file is part of Ansible Ansible is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Ansible is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Ansible. If not, see <http://www.gnu.org/licenses/>. """ ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: a10_virtual_server version_added: 1.8 short_description: Manage A10 Networks AX/SoftAX/Thunder/vThunder devices' virtual servers. description: - Manage SLB (Server Load Balancing) virtual server objects on A10 Networks devices via aXAPIv2. author: "Eric Chou (@ericchou) 2016, Mischa Peters (@mischapeters) 2014" notes: - Requires A10 Networks aXAPI 2.1. extends_documentation_fragment: a10 options: partition: version_added: "2.3" description: - set active-partition required: false default: null virtual_server: description: - The SLB (Server Load Balancing) virtual server name. required: true default: null aliases: ['vip', 'virtual'] virtual_server_ip: description: - The SLB virtual server IPv4 address. required: false default: null aliases: ['ip', 'address'] virtual_server_status: description: - The SLB virtual server status, such as enabled or disabled. required: false default: enable aliases: ['status'] choices: ['enabled', 'disabled'] virtual_server_ports: description: - A list of ports to create for the virtual server. Each list item should be a dictionary which specifies the C(port:) and C(type:), but can also optionally specify the C(service_group:) as well as the C(status:). See the examples below for details. This parameter is required when C(state) is C(present). required: false validate_certs: description: - If C(no), SSL certificates will not be validated. This should only be used on personally controlled devices using self-signed certificates. required: false default: 'yes' choices: ['yes', 'no'] ''' RETURN = ''' # ''' EXAMPLES = ''' # Create a new virtual server - a10_virtual_server: host: a10.mydomain.com username: myadmin password: mypassword partition: mypartition virtual_server: vserver1 virtual_server_ip: 1.1.1.1 virtual_server_ports: - port: 80 protocol: TCP service_group: sg-80-tcp - port: 443 protocol: HTTPS service_group: sg-443-https - port: 8080 protocol: http status: disabled ''' RETURN = ''' content: description: the full info regarding the slb_virtual returned: success type: string sample: "mynewvirtualserver" ''' import json from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.urls import url_argument_spec from ansible.module_utils.a10 import axapi_call, a10_argument_spec, axapi_authenticate, axapi_failure from ansible.module_utils.a10 import axapi_enabled_disabled, axapi_get_vport_protocol, AXAPI_VPORT_PROTOCOLS VALID_PORT_FIELDS = ['port', 'protocol', 'service_group', 'status'] def validate_ports(module, ports): for item in ports: for key in item: if key not in VALID_PORT_FIELDS: module.fail_json(msg="invalid port field (%s), must be one of: %s" % (key, ','.join(VALID_PORT_FIELDS))) # validate the port number is present and an integer if 'port' in item: try: item['port'] = int(item['port']) except: module.fail_json(msg="port definitions must be integers") else: module.fail_json(msg="port definitions must define the port field") # validate the port protocol is present, and convert it to # the internal API integer value (and validate it) if 'protocol' in item: protocol = axapi_get_vport_protocol(item['protocol']) if not protocol: module.fail_json(msg="invalid port protocol, must be one of: %s" % ','.join(AXAPI_VPORT_PROTOCOLS)) else: item['protocol'] = protocol else: module.fail_json(msg="port definitions must define the port protocol (%s)" % ','.join(AXAPI_VPORT_PROTOCOLS)) # convert the status to the internal API integer value if 'status' in item: item['status'] = axapi_enabled_disabled(item['status']) else: item['status'] = 1 # ensure the service_group field is at least present if 'service_group' not in item: item['service_group'] = '' def main(): argument_spec = a10_argument_spec() argument_spec.update(url_argument_spec()) argument_spec.update( dict( state=dict(type='str', default='present', choices=['present', 'absent']), virtual_server=dict(type='str', aliases=['vip', 'virtual'], required=True), virtual_server_ip=dict(type='str', aliases=['ip', 'address'], required=True), virtual_server_status=dict(type='str', default='enabled', aliases=['status'], choices=['enabled', 'disabled']), virtual_server_ports=dict(type='list', required=True), partition=dict(type='str', default=[]), ) ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=False ) host = module.params['host'] username = module.params['username'] password = module.params['password'] partition = module.params['partition'] state = module.params['state'] write_config = module.params['write_config'] slb_virtual = module.params['virtual_server'] slb_virtual_ip = module.params['virtual_server_ip'] slb_virtual_status = module.params['virtual_server_status'] slb_virtual_ports = module.params['virtual_server_ports'] if slb_virtual is None: module.fail_json(msg='virtual_server is required') validate_ports(module, slb_virtual_ports) axapi_base_url = 'https://%s/services/rest/V2.1/?format=json' % host session_url = axapi_authenticate(module, axapi_base_url, username, password) slb_server_partition = axapi_call(module, session_url + '&method=system.partition.active', json.dumps({'name': partition})) slb_virtual_data = axapi_call(module, session_url + '&method=slb.virtual_server.search', json.dumps({'name': slb_virtual})) slb_virtual_exists = not axapi_failure(slb_virtual_data) changed = False if state == 'present': json_post = { 'virtual_server': { 'name': slb_virtual, 'address': slb_virtual_ip, 'status': axapi_enabled_disabled(slb_virtual_status), 'vport_list': slb_virtual_ports, } } # before creating/updating we need to validate that any # service groups defined in the ports list exist since # since the API will still create port definitions for # them while indicating a failure occurred checked_service_groups = [] for port in slb_virtual_ports: if 'service_group' in port and port['service_group'] not in checked_service_groups: # skip blank service group entries if port['service_group'] == '': continue result = axapi_call(module, session_url + '&method=slb.service_group.search', json.dumps({'name': port['service_group']})) if axapi_failure(result): module.fail_json(msg="the service group %s specified in the ports list does not exist" % port['service_group']) checked_service_groups.append(port['service_group']) if not slb_virtual_exists: result = axapi_call(module, session_url + '&method=slb.virtual_server.create', json.dumps(json_post)) if axapi_failure(result): module.fail_json(msg="failed to create the virtual server: %s" % result['response']['err']['msg']) changed = True else: def needs_update(src_ports, dst_ports): ''' Checks to determine if the port definitions of the src_ports array are in or different from those in dst_ports. If there is a difference, this function returns true, otherwise false. ''' for src_port in src_ports: found = False different = False for dst_port in dst_ports: if src_port['port'] == dst_port['port']: found = True for valid_field in VALID_PORT_FIELDS: if src_port[valid_field] != dst_port[valid_field]: different = True break if found or different: break if not found or different: return True # every port from the src exists in the dst, and none of them were different return False defined_ports = slb_virtual_data.get('virtual_server', {}).get('vport_list', []) # we check for a needed update both ways, in case ports # are missing from either the ones specified by the user # or from those on the device if needs_update(defined_ports, slb_virtual_ports) or needs_update(slb_virtual_ports, defined_ports): result = axapi_call(module, session_url + '&method=slb.virtual_server.update', json.dumps(json_post)) if axapi_failure(result): module.fail_json(msg="failed to create the virtual server: %s" % result['response']['err']['msg']) changed = True # if we changed things, get the full info regarding # the service group for the return data below if changed: result = axapi_call(module, session_url + '&method=slb.virtual_server.search', json.dumps({'name': slb_virtual})) else: result = slb_virtual_data elif state == 'absent': if slb_virtual_exists: result = axapi_call(module, session_url + '&method=slb.virtual_server.delete', json.dumps({'name': slb_virtual})) changed = True else: result = dict(msg="the virtual server was not present") # if the config has changed, save the config unless otherwise requested if changed and write_config: write_result = axapi_call(module, session_url + '&method=system.action.write_memory') if axapi_failure(write_result): module.fail_json(msg="failed to save the configuration: %s" % write_result['response']['err']['msg']) # log out of the session nicely and exit axapi_call(module, session_url + '&method=session.close') module.exit_json(changed=changed, content=result) if __name__ == '__main__': main()
gpl-3.0
r3d83ard/assemblyline_daily_sample
test/test_apis.py
1
1947
import unittest import hashlib from utilities import logger_util from api import malshr_api class TestMalshrAPI(unittest.TestCase): malshare = '' md5 = '' hash_md5 = '' def setUp(self): self.malshare = malshr_api.Malshare_API() self.md5 = 'b63bff90e6a55c4a404a8a48d076de45' self.hash_md5 = hashlib.md5() def tearDown(self): del self.malshare self.malshare = None def test_m_api_daily_md5_string(self): self.setUp() self.assertIsNotNone(self.malshare.m_api_daily_md5_string()) self.tearDown() def test_m_api_daily_md5_list(self): self.setUp() self.assertIsNotNone(self.malshare.m_api_daily_md5_list()) self.tearDown() def test_m_api_daily_sources_string(self): self.setUp() self.assertIsNotNone(self.malshare.m_api_daily_sources_string()) self.tearDown() def test_m_api_daily_sources_list(self): self.setUp() self.assertIsNotNone(self.malshare.m_api_daily_sources_list()) self.tearDown() def test_m_api_raw_sample(self): self.setUp() response = self.malshare.m_api_raw_sample(self.md5) self.assertTrue(response) self.hash_md5.update(response.read()) self.assertEqual(self.hash_md5.hexdigest(),self.md5) self.tearDown() def test_m_api_file_details(self): self.setUp() self.assertIsNotNone(self.malshare.m_api_file_details(self.md5)) self.tearDown() def test_m_api_daily_md5_file_type(self): self.setUp() self.assertIsNotNone(self.malshare.m_api_daily_md5_file_type('PE32')) self.tearDown() class TestVTIAPI(unittest.TestCase): def setUp(self): pass def tearDown(self): pass class TestOTXAPI(unittest.TestCase): def setUp(self): pass def tearDown(self): pass if __name__ == '__main__': unittest.main()
mit
winry-linux/augur
augur/parser/parseRepo.py
1
2661
#!/usr/bin/env python # This file is part of Augur - <http://github.com/winry-linux/augur> # # Copyright 2017, Joshua Strot <joshua@winrylinux.org> # # Augur is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Augur is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Augur. If not, see <http://www.gnu.org/licenses/>. from urllib import request from tarfile import open as tarOpen def downloadDatabase(mirror): """ Download the database of packages and versions from the specified mirror. Performs no parsing, just downloads it to ``/tmp/winry-testing.db``. In the future this should be expanded to perform more error handling on the Downloading of the database. Parameters ---------- mirror : str URL of the mirror to download the database from. URL most contain a valid web protocol such as ``http://`` or ``https://`` """ print("=> Downloading database from mirror") request.urlretrieve("%(mirror)s/winry-testing/winry-testing.db" % locals(), "/tmp/winry-testing.db") def parsePackages(mirror): """ Parse a tarred pacman database and find the packages and versions. Opens the database and then parses all of the top level directory names to generate a dictionary of all the packages and versions. This is essentially a more advanced wrapper of ``downloadDatabases()`` Parameters ---------- mirror : str URL of the mirror to download the database from. URL most contain a valid web protocol such as ``http://`` or ``https://`` Returns ------- dict Dictionary of all the packages and versions from the mirrors database. All the package names will be keys, with their versions as values. """ # Download an updated database into the /tmp directory downloadDatabase(mirror) # Open the database as an archive archive = tarOpen("/tmp/winry-testing.db") packagesRaw = [package for package in archive.getnames() if "/" not in package] # Parse database into dictionary packages = {} for package in packagesRaw: packages[package.rsplit("-", 2)[0]] = "-".join(package.rsplit("-", 2)[1:]) return packages
gpl-3.0
Jalle19/xbmc
tools/EventClients/lib/python/ps3/sixpair.py
208
2903
#!/usr/bin/python # -*- coding: utf-8 -*- import sys import usb vendor = 0x054c product = 0x0268 timeout = 5000 passed_value = 0x03f5 def find_sixaxes(): res = [] for bus in usb.busses(): for dev in bus.devices: if dev.idVendor == vendor and dev.idProduct == product: res.append(dev) return res def find_interface(dev): for cfg in dev.configurations: for itf in cfg.interfaces: for alt in itf: if alt.interfaceClass == 3: return alt raise Exception("Unable to find interface") def mac_to_string(mac): return "%02x:%02x:%02x:%02x:%02x:%02x" % (mac[0], mac[1], mac[2], mac[3], mac[4], mac[5]) def set_pair_filename(dirname, filename, mac): for bus in usb.busses(): if int(bus.dirname) == int(dirname): for dev in bus.devices: if int(dev.filename) == int(filename): if dev.idVendor == vendor and dev.idProduct == product: update_pair(dev, mac) return else: raise Exception("Device is not a sixaxis") raise Exception("Device not found") def set_pair(dev, mac): itf = find_interface(dev) handle = dev.open() msg = (0x01, 0x00) + mac; try: handle.detachKernelDriver(itf.interfaceNumber) except usb.USBError: pass handle.claimInterface(itf.interfaceNumber) try: handle.controlMsg(usb.ENDPOINT_OUT | usb.TYPE_CLASS | usb.RECIP_INTERFACE , usb.REQ_SET_CONFIGURATION, msg, passed_value, itf.interfaceNumber, timeout) finally: handle.releaseInterface() def get_pair(dev): itf = find_interface(dev) handle = dev.open() try: handle.detachKernelDriver(itf.interfaceNumber) except usb.USBError: pass handle.claimInterface(itf.interfaceNumber) try: msg = handle.controlMsg(usb.ENDPOINT_IN | usb.TYPE_CLASS | usb.RECIP_INTERFACE , usb.REQ_CLEAR_FEATURE, 8, passed_value, itf.interfaceNumber, timeout) finally: handle.releaseInterface() return msg[2:8] def set_pair_all(mac): devs = find_sixaxes() for dev in devs: update_pair(dev, mac) def update_pair(dev, mac): old = get_pair(dev) if old != mac: print "Reparing sixaxis from:" + mac_to_string(old) + " to:" + mac_to_string(mac) set_pair(dev, mac) if __name__=="__main__": devs = find_sixaxes() mac = None if len(sys.argv) > 1: try: mac = sys.argv[1].split(':') mac = tuple([int(x, 16) for x in mac]) if len(mac) != 6: print "Invalid length of HCI address, should be 6 parts" mac = None except: print "Failed to parse HCI address" mac = None for dev in devs: if mac: update_pair(dev, mac) else: print "Found sixaxis paired to: " + mac_to_string(get_pair(dev))
gpl-2.0
ARMmbed/yotta_osx_installer
workspace/lib/python2.7/site-packages/github/PaginatedList.py
21
7671
# -*- coding: utf-8 -*- # ########################## Copyrights and license ############################ # # # Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> # # Copyright 2012 Zearin <zearin@gonk.net> # # Copyright 2013 AKFish <akfish@gmail.com> # # Copyright 2013 Bill Mill <bill.mill@gmail.com> # # Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> # # Copyright 2013 davidbrai <davidbrai@gmail.com> # # # # This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ # # # # PyGithub is free software: you can redistribute it and/or modify it under # # the terms of the GNU Lesser General Public License as published by the Free # # Software Foundation, either version 3 of the License, or (at your option) # # any later version. # # # # PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY # # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # # details. # # # # You should have received a copy of the GNU Lesser General Public License # # along with PyGithub. If not, see <http://www.gnu.org/licenses/>. # # # # ############################################################################## import github.GithubObject class PaginatedListBase: def __init__(self): self.__elements = list() def __getitem__(self, index): assert isinstance(index, (int, slice)) if isinstance(index, (int, long)): self.__fetchToIndex(index) return self.__elements[index] else: return self._Slice(self, index) def __iter__(self): for element in self.__elements: yield element while self._couldGrow(): newElements = self._grow() for element in newElements: yield element def _isBiggerThan(self, index): return len(self.__elements) > index or self._couldGrow() def __fetchToIndex(self, index): while len(self.__elements) <= index and self._couldGrow(): self._grow() def _grow(self): newElements = self._fetchNextPage() self.__elements += newElements return newElements class _Slice: def __init__(self, theList, theSlice): self.__list = theList self.__start = theSlice.start or 0 self.__stop = theSlice.stop self.__step = theSlice.step or 1 def __iter__(self): index = self.__start while not self.__finished(index): if self.__list._isBiggerThan(index): yield self.__list[index] index += self.__step else: return def __finished(self, index): return self.__stop is not None and index >= self.__stop class PaginatedList(PaginatedListBase): """ This class abstracts the `pagination of the API <http://developer.github.com/v3/#pagination>`_. You can simply enumerate through instances of this class:: for repo in user.get_repos(): print repo.name You can also index them or take slices:: second_repo = user.get_repos()[1] first_repos = user.get_repos()[:10] If you want to iterate in reversed order, just do:: for repo in user.get_repos().reversed: print repo.name And if you really need it, you can explicitely access a specific page:: some_repos = user.get_repos().get_page(0) some_other_repos = user.get_repos().get_page(3) """ def __init__(self, contentClass, requester, firstUrl, firstParams): PaginatedListBase.__init__(self) self.__requester = requester self.__contentClass = contentClass self.__firstUrl = firstUrl self.__firstParams = firstParams or () self.__nextUrl = firstUrl self.__nextParams = firstParams or {} if self.__requester.per_page != 30: self.__nextParams["per_page"] = self.__requester.per_page self._reversed = False self.__totalCount = None @property def totalCount(self): if not self.__totalCount: self._grow() return self.__totalCount def _getLastPageUrl(self): headers, data = self.__requester.requestJsonAndCheck( "GET", self.__firstUrl, parameters=self.__nextParams ) links = self.__parseLinkHeader(headers) lastUrl = links.get("last") return lastUrl @property def reversed(self): r = PaginatedList(self.__contentClass, self.__requester, self.__firstUrl, self.__firstParams) r.__reverse() return r def __reverse(self): self._reversed = True lastUrl = self._getLastPageUrl() if lastUrl: self.__nextUrl = lastUrl def _couldGrow(self): return self.__nextUrl is not None def _fetchNextPage(self): headers, data = self.__requester.requestJsonAndCheck( "GET", self.__nextUrl, parameters=self.__nextParams ) self.__nextUrl = None if len(data) > 0: links = self.__parseLinkHeader(headers) if self._reversed: if "prev" in links: self.__nextUrl = links["prev"] elif "next" in links: self.__nextUrl = links["next"] self.__nextParams = None if 'items' in data: self.__totalCount = data['total_count'] data = data["items"] content = [ self.__contentClass(self.__requester, headers, element, completed=False) for element in data if element is not None ] if self._reversed: return content[::-1] return content def __parseLinkHeader(self, headers): links = {} if "link" in headers: linkHeaders = headers["link"].split(", ") for linkHeader in linkHeaders: (url, rel) = linkHeader.split("; ") url = url[1:-1] rel = rel[5:-1] links[rel] = url return links def get_page(self, page): params = dict(self.__firstParams) if page != 0: params["page"] = page + 1 if self.__requester.per_page != 30: params["per_page"] = self.__requester.per_page headers, data = self.__requester.requestJsonAndCheck( "GET", self.__firstUrl, parameters=params ) if 'items' in data: self.__totalCount = data['total_count'] data = data["items"] return [ self.__contentClass(self.__requester, headers, element, completed=False) for element in data ]
apache-2.0
siosio/intellij-community
plugins/hg4idea/testData/bin/mercurial/hook.py
93
7881
# hook.py - hook support for mercurial # # Copyright 2007 Matt Mackall <mpm@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from i18n import _ import os, sys, time, types import extensions, util, demandimport def _pythonhook(ui, repo, name, hname, funcname, args, throw): '''call python hook. hook is callable object, looked up as name in python module. if callable returns "true", hook fails, else passes. if hook raises exception, treated as hook failure. exception propagates if throw is "true". reason for "true" meaning "hook failed" is so that unmodified commands (e.g. mercurial.commands.update) can be run as hooks without wrappers to convert return values.''' ui.note(_("calling hook %s: %s\n") % (hname, funcname)) starttime = time.time() obj = funcname if not util.safehasattr(obj, '__call__'): d = funcname.rfind('.') if d == -1: raise util.Abort(_('%s hook is invalid ("%s" not in ' 'a module)') % (hname, funcname)) modname = funcname[:d] oldpaths = sys.path if util.mainfrozen(): # binary installs require sys.path manipulation modpath, modfile = os.path.split(modname) if modpath and modfile: sys.path = sys.path[:] + [modpath] modname = modfile try: demandimport.disable() obj = __import__(modname) demandimport.enable() except ImportError: e1 = sys.exc_type, sys.exc_value, sys.exc_traceback try: # extensions are loaded with hgext_ prefix obj = __import__("hgext_%s" % modname) demandimport.enable() except ImportError: demandimport.enable() e2 = sys.exc_type, sys.exc_value, sys.exc_traceback if ui.tracebackflag: ui.warn(_('exception from first failed import attempt:\n')) ui.traceback(e1) if ui.tracebackflag: ui.warn(_('exception from second failed import attempt:\n')) ui.traceback(e2) raise util.Abort(_('%s hook is invalid ' '(import of "%s" failed)') % (hname, modname)) sys.path = oldpaths try: for p in funcname.split('.')[1:]: obj = getattr(obj, p) except AttributeError: raise util.Abort(_('%s hook is invalid ' '("%s" is not defined)') % (hname, funcname)) if not util.safehasattr(obj, '__call__'): raise util.Abort(_('%s hook is invalid ' '("%s" is not callable)') % (hname, funcname)) try: try: # redirect IO descriptors to the ui descriptors so hooks # that write directly to these don't mess up the command # protocol when running through the command server old = sys.stdout, sys.stderr, sys.stdin sys.stdout, sys.stderr, sys.stdin = ui.fout, ui.ferr, ui.fin r = obj(ui=ui, repo=repo, hooktype=name, **args) except KeyboardInterrupt: raise except Exception, exc: if isinstance(exc, util.Abort): ui.warn(_('error: %s hook failed: %s\n') % (hname, exc.args[0])) else: ui.warn(_('error: %s hook raised an exception: ' '%s\n') % (hname, exc)) if throw: raise ui.traceback() return True finally: sys.stdout, sys.stderr, sys.stdin = old duration = time.time() - starttime readablefunc = funcname if isinstance(funcname, types.FunctionType): readablefunc = funcname.__module__ + "." + funcname.__name__ ui.log('pythonhook', 'pythonhook-%s: %s finished in %0.2f seconds\n', name, readablefunc, duration) if r: if throw: raise util.Abort(_('%s hook failed') % hname) ui.warn(_('warning: %s hook failed\n') % hname) return r def _exthook(ui, repo, name, cmd, args, throw): ui.note(_("running hook %s: %s\n") % (name, cmd)) starttime = time.time() env = {} for k, v in args.iteritems(): if util.safehasattr(v, '__call__'): v = v() if isinstance(v, dict): # make the dictionary element order stable across Python # implementations v = ('{' + ', '.join('%r: %r' % i for i in sorted(v.iteritems())) + '}') env['HG_' + k.upper()] = v if repo: cwd = repo.root else: cwd = os.getcwd() if 'HG_URL' in env and env['HG_URL'].startswith('remote:http'): r = util.system(cmd, environ=env, cwd=cwd, out=ui) else: r = util.system(cmd, environ=env, cwd=cwd, out=ui.fout) duration = time.time() - starttime ui.log('exthook', 'exthook-%s: %s finished in %0.2f seconds\n', name, cmd, duration) if r: desc, r = util.explainexit(r) if throw: raise util.Abort(_('%s hook %s') % (name, desc)) ui.warn(_('warning: %s hook %s\n') % (name, desc)) return r def _allhooks(ui): hooks = [] for name, cmd in ui.configitems('hooks'): if not name.startswith('priority'): priority = ui.configint('hooks', 'priority.%s' % name, 0) hooks.append((-priority, len(hooks), name, cmd)) return [(k, v) for p, o, k, v in sorted(hooks)] _redirect = False def redirect(state): global _redirect _redirect = state def hook(ui, repo, name, throw=False, **args): if not ui.callhooks: return False r = False oldstdout = -1 try: for hname, cmd in _allhooks(ui): if hname.split('.')[0] != name or not cmd: continue if oldstdout == -1 and _redirect: try: stdoutno = sys.__stdout__.fileno() stderrno = sys.__stderr__.fileno() # temporarily redirect stdout to stderr, if possible if stdoutno >= 0 and stderrno >= 0: sys.__stdout__.flush() oldstdout = os.dup(stdoutno) os.dup2(stderrno, stdoutno) except (OSError, AttributeError): # files seem to be bogus, give up on redirecting (WSGI, etc) pass if util.safehasattr(cmd, '__call__'): r = _pythonhook(ui, repo, name, hname, cmd, args, throw) or r elif cmd.startswith('python:'): if cmd.count(':') >= 2: path, cmd = cmd[7:].rsplit(':', 1) path = util.expandpath(path) if repo: path = os.path.join(repo.root, path) try: mod = extensions.loadpath(path, 'hghook.%s' % hname) except Exception: ui.write(_("loading %s hook failed:\n") % hname) raise hookfn = getattr(mod, cmd) else: hookfn = cmd[7:].strip() r = _pythonhook(ui, repo, name, hname, hookfn, args, throw) or r else: r = _exthook(ui, repo, hname, cmd, args, throw) or r finally: if _redirect and oldstdout >= 0: os.dup2(oldstdout, stdoutno) os.close(oldstdout) return r
apache-2.0
openstack/tempest
tempest/lib/api_schema/response/compute/v2_55/flavors.py
3
4245
# Copyright 2018 NEC Corporation. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from tempest.lib.api_schema.response.compute.v2_1 import flavors as flavorsv21 from tempest.lib.api_schema.response.compute.v2_1 import parameter_types # Note(gmann): This is schema for microversion 2.55 which includes the # following changes: # Add new PUT API # Adds a ``description`` field to the following APIs response: # - ``GET /flavors`` # - ``GET /flavors/detail`` # - ``GET /flavors/{flavor_id}`` # - ``POST /flavors`` flavor_description = { 'type': ['string', 'null'], 'minLength': 0, 'maxLength': 65535 } list_flavors = { 'status_code': [200], 'response_body': { 'type': 'object', 'properties': { 'flavors': { 'type': 'array', 'items': { 'type': 'object', 'properties': { 'name': {'type': 'string'}, 'links': parameter_types.links, 'id': {'type': 'string'}, 'description': flavor_description }, 'additionalProperties': False, 'required': ['name', 'links', 'id', 'description'] } }, 'flavors_links': parameter_types.links }, 'additionalProperties': False, # NOTE(gmann): flavors_links attribute is not necessary # to be present always So it is not 'required'. 'required': ['flavors'] } } common_flavor_info = { 'type': 'object', 'properties': { 'name': {'type': 'string'}, 'links': parameter_types.links, 'ram': {'type': 'integer'}, 'vcpus': {'type': 'integer'}, # 'swap' attributes comes as integer value but if it is empty # it comes as "". So defining type of as string and integer. 'swap': {'type': ['integer', 'string']}, 'disk': {'type': 'integer'}, 'id': {'type': 'string'}, 'OS-FLV-DISABLED:disabled': {'type': 'boolean'}, 'os-flavor-access:is_public': {'type': 'boolean'}, 'rxtx_factor': {'type': 'number'}, 'OS-FLV-EXT-DATA:ephemeral': {'type': 'integer'}, 'description': flavor_description }, 'additionalProperties': False, # 'OS-FLV-DISABLED', 'os-flavor-access', 'rxtx_factor' and # 'OS-FLV-EXT-DATA' are API extensions. So they are not 'required'. 'required': ['name', 'links', 'ram', 'vcpus', 'swap', 'disk', 'id', 'description'] } list_flavors_details = { 'status_code': [200], 'response_body': { 'type': 'object', 'properties': { 'flavors': { 'type': 'array', 'items': common_flavor_info }, # NOTE(gmann): flavors_links attribute is not necessary # to be present always So it is not 'required'. 'flavors_links': parameter_types.links }, 'additionalProperties': False, 'required': ['flavors'] } } create_update_get_flavor_details = { 'status_code': [200], 'response_body': { 'type': 'object', 'properties': { 'flavor': common_flavor_info }, 'additionalProperties': False, 'required': ['flavor'] } } # Note(zhufl): Below are the unchanged schema in this microversion. We need # to keep this schema in this file to have the generic way to select the # right schema based on self.schema_versions_info mapping in service client. # ****** Schemas unchanged since microversion 2.1 *** delete_flavor = copy.deepcopy(flavorsv21.delete_flavor)
apache-2.0
trondeau/gnuradio-old
gr-blocks/python/blocks/qa_skiphead.py
57
3568
#!/usr/bin/env python # # Copyright 2007,2010,2013 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # from gnuradio import gr, gr_unittest, blocks class test_skiphead(gr_unittest.TestCase): def setUp(self): self.tb = gr.top_block() self.src_data = [int(x) for x in range(65536)] def tearDown(self): self.tb = None def test_skip_0(self): skip_cnt = 0 expected_result = tuple(self.src_data[skip_cnt:]) src1 = blocks.vector_source_i(self.src_data) op = blocks.skiphead(gr.sizeof_int, skip_cnt) dst1 = blocks.vector_sink_i() self.tb.connect(src1, op, dst1) self.tb.run() dst_data = dst1.data() self.assertEqual(expected_result, dst_data) def test_skip_1(self): skip_cnt = 1 expected_result = tuple(self.src_data[skip_cnt:]) src1 = blocks.vector_source_i(self.src_data) op = blocks.skiphead(gr.sizeof_int, skip_cnt) dst1 = blocks.vector_sink_i() self.tb.connect(src1, op, dst1) self.tb.run() dst_data = dst1.data() self.assertEqual(expected_result, dst_data) def test_skip_1023(self): skip_cnt = 1023 expected_result = tuple(self.src_data[skip_cnt:]) src1 = blocks.vector_source_i(self.src_data) op = blocks.skiphead(gr.sizeof_int, skip_cnt) dst1 = blocks.vector_sink_i() self.tb.connect(src1, op, dst1) self.tb.run() dst_data = dst1.data() self.assertEqual(expected_result, dst_data) def test_skip_6339(self): skip_cnt = 6339 expected_result = tuple(self.src_data[skip_cnt:]) src1 = blocks.vector_source_i(self.src_data) op = blocks.skiphead(gr.sizeof_int, skip_cnt) dst1 = blocks.vector_sink_i() self.tb.connect(src1, op, dst1) self.tb.run() dst_data = dst1.data() self.assertEqual(expected_result, dst_data) def test_skip_12678(self): skip_cnt = 12678 expected_result = tuple(self.src_data[skip_cnt:]) src1 = blocks.vector_source_i(self.src_data) op = blocks.skiphead(gr.sizeof_int, skip_cnt) dst1 = blocks.vector_sink_i() self.tb.connect(src1, op, dst1) self.tb.run() dst_data = dst1.data() self.assertEqual(expected_result, dst_data) def test_skip_all(self): skip_cnt = len(self.src_data) expected_result = tuple(self.src_data[skip_cnt:]) src1 = blocks.vector_source_i(self.src_data) op = blocks.skiphead(gr.sizeof_int, skip_cnt) dst1 = blocks.vector_sink_i() self.tb.connect(src1, op, dst1) self.tb.run() dst_data = dst1.data() self.assertEqual(expected_result, dst_data) if __name__ == '__main__': gr_unittest.run(test_skiphead, "test_skiphead.xml")
gpl-3.0
thiriel/maps
django/core/management/commands/runserver.py
158
5632
from optparse import make_option import os import re import sys import socket from django.core.management.base import BaseCommand, CommandError from django.core.handlers.wsgi import WSGIHandler from django.core.servers.basehttp import AdminMediaHandler, run, WSGIServerException from django.utils import autoreload naiveip_re = re.compile(r"""^(?: (?P<addr> (?P<ipv4>\d{1,3}(?:\.\d{1,3}){3}) | # IPv4 address (?P<ipv6>\[[a-fA-F0-9:]+\]) | # IPv6 address (?P<fqdn>[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*) # FQDN ):)?(?P<port>\d+)$""", re.X) DEFAULT_PORT = "8000" class BaseRunserverCommand(BaseCommand): option_list = BaseCommand.option_list + ( make_option('--ipv6', '-6', action='store_true', dest='use_ipv6', default=False, help='Tells Django to use a IPv6 address.'), make_option('--noreload', action='store_false', dest='use_reloader', default=True, help='Tells Django to NOT use the auto-reloader.'), ) help = "Starts a lightweight Web server for development." args = '[optional port number, or ipaddr:port]' # Validation is called explicitly each time the server is reloaded. requires_model_validation = False def get_handler(self, *args, **options): """ Returns the default WSGI handler for the runner. """ return WSGIHandler() def handle(self, addrport='', *args, **options): self.use_ipv6 = options.get('use_ipv6') if self.use_ipv6 and not socket.has_ipv6: raise CommandError('Your Python does not support IPv6.') if args: raise CommandError('Usage is runserver %s' % self.args) self._raw_ipv6 = False if not addrport: self.addr = '' self.port = DEFAULT_PORT else: m = re.match(naiveip_re, addrport) if m is None: raise CommandError('"%s" is not a valid port number ' 'or address:port pair.' % addrport) self.addr, _ipv4, _ipv6, _fqdn, self.port = m.groups() if not self.port.isdigit(): raise CommandError("%r is not a valid port number." % self.port) if self.addr: if _ipv6: self.addr = self.addr[1:-1] self.use_ipv6 = True self._raw_ipv6 = True elif self.use_ipv6 and not _fqdn: raise CommandError('"%s" is not a valid IPv6 address.' % self.addr) if not self.addr: self.addr = self.use_ipv6 and '::1' or '127.0.0.1' self._raw_ipv6 = bool(self.use_ipv6) self.run(*args, **options) def run(self, *args, **options): """ Runs the server, using the autoreloader if needed """ use_reloader = options.get('use_reloader', True) if use_reloader: autoreload.main(self.inner_run, args, options) else: self.inner_run(*args, **options) def inner_run(self, *args, **options): from django.conf import settings from django.utils import translation shutdown_message = options.get('shutdown_message', '') quit_command = (sys.platform == 'win32') and 'CTRL-BREAK' or 'CONTROL-C' self.stdout.write("Validating models...\n\n") self.validate(display_num_errors=True) self.stdout.write(( "Django version %(version)s, using settings %(settings)r\n" "Development server is running at http://%(addr)s:%(port)s/\n" "Quit the server with %(quit_command)s.\n" ) % { "version": self.get_version(), "settings": settings.SETTINGS_MODULE, "addr": self._raw_ipv6 and '[%s]' % self.addr or self.addr, "port": self.port, "quit_command": quit_command, }) # django.core.management.base forces the locale to en-us. We should # set it up correctly for the first request (particularly important # in the "--noreload" case). translation.activate(settings.LANGUAGE_CODE) try: handler = self.get_handler(*args, **options) run(self.addr, int(self.port), handler, ipv6=self.use_ipv6) except WSGIServerException, e: # Use helpful error messages instead of ugly tracebacks. ERRORS = { 13: "You don't have permission to access that port.", 98: "That port is already in use.", 99: "That IP address can't be assigned-to.", } try: error_text = ERRORS[e.args[0].args[0]] except (AttributeError, KeyError): error_text = str(e) sys.stderr.write(self.style.ERROR("Error: %s" % error_text) + '\n') # Need to use an OS exit because sys.exit doesn't work in a thread os._exit(1) except KeyboardInterrupt: if shutdown_message: self.stdout.write("%s\n" % shutdown_message) sys.exit(0) class Command(BaseRunserverCommand): option_list = BaseRunserverCommand.option_list + ( make_option('--adminmedia', dest='admin_media_path', default='', help='Specifies the directory from which to serve admin media.'), ) def get_handler(self, *args, **options): """ Serves admin media like old-school (deprecation pending). """ handler = super(Command, self).get_handler(*args, **options) return AdminMediaHandler(handler, options.get('admin_media_path', ''))
bsd-3-clause
endorphinl/horizon-fork
openstack_dashboard/dashboards/admin/hypervisors/compute/views.py
54
3871
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.core.urlresolvers import reverse from django.core.urlresolvers import reverse_lazy from django.utils.translation import ugettext_lazy as _ from horizon import exceptions from horizon import forms from openstack_dashboard import api from openstack_dashboard.dashboards.admin.hypervisors.compute \ import forms as project_forms class EvacuateHostView(forms.ModalFormView): form_class = project_forms.EvacuateHostForm template_name = 'admin/hypervisors/compute/evacuate_host.html' context_object_name = 'compute_host' success_url = reverse_lazy("horizon:admin:hypervisors:index") page_title = _("Evacuate Host") def get_context_data(self, **kwargs): context = super(EvacuateHostView, self).get_context_data(**kwargs) context["compute_host"] = self.kwargs['compute_host'] return context def get_active_compute_hosts_names(self, *args, **kwargs): try: services = api.nova.service_list(self.request, binary='nova-compute') return [service.host for service in services if service.state == 'up'] except Exception: redirect = reverse("horizon:admin:hypervisors:index") msg = _('Unable to retrieve compute host information.') exceptions.handle(self.request, msg, redirect=redirect) def get_initial(self): initial = super(EvacuateHostView, self).get_initial() hosts = self.get_active_compute_hosts_names() current_host = self.kwargs['compute_host'] initial.update({'current_host': current_host, 'hosts': hosts}) return initial class DisableServiceView(forms.ModalFormView): form_class = project_forms.DisableServiceForm template_name = 'admin/hypervisors/compute/disable_service.html' context_object_name = 'compute_host' success_url = reverse_lazy("horizon:admin:hypervisors:index") page_title = _("Disable Service") def get_context_data(self, **kwargs): context = super(DisableServiceView, self).get_context_data(**kwargs) context["compute_host"] = self.kwargs['compute_host'] return context def get_initial(self): initial = super(DisableServiceView, self).get_initial() initial.update({'host': self.kwargs['compute_host']}) return initial class MigrateHostView(forms.ModalFormView): form_class = project_forms.MigrateHostForm template_name = 'admin/hypervisors/compute/migrate_host.html' context_object_name = 'compute_host' success_url = reverse_lazy("horizon:admin:hypervisors:index") page_title = _("Migrate Host") def get_context_data(self, **kwargs): context = super(MigrateHostView, self).get_context_data(**kwargs) context["compute_host"] = self.kwargs['compute_host'] return context def get_initial(self): initial = super(MigrateHostView, self).get_initial() current_host = self.kwargs['compute_host'] initial.update({ 'current_host': current_host, 'live_migrate': True, 'block_migration': False, 'disk_over_commit': False }) return initial
apache-2.0
patrioticcow/MessagesForSkype
packages/win32/bundle/MessagesForSkype/modules/python/1.3.1-beta/Lib/idlelib/CallTipWindow.py
96
5924
"""A CallTip window class for Tkinter/IDLE. After ToolTip.py, which uses ideas gleaned from PySol Used by the CallTips IDLE extension. """ from Tkinter import * HIDE_VIRTUAL_EVENT_NAME = "<<calltipwindow-hide>>" HIDE_SEQUENCES = ("<Key-Escape>", "<FocusOut>") CHECKHIDE_VIRTUAL_EVENT_NAME = "<<calltipwindow-checkhide>>" CHECKHIDE_SEQUENCES = ("<KeyRelease>", "<ButtonRelease>") CHECKHIDE_TIME = 100 # miliseconds MARK_RIGHT = "calltipwindowregion_right" class CallTip: def __init__(self, widget): self.widget = widget self.tipwindow = self.label = None self.parenline = self.parencol = None self.lastline = None self.hideid = self.checkhideid = None def position_window(self): """Check if needs to reposition the window, and if so - do it.""" curline = int(self.widget.index("insert").split('.')[0]) if curline == self.lastline: return self.lastline = curline self.widget.see("insert") if curline == self.parenline: box = self.widget.bbox("%d.%d" % (self.parenline, self.parencol)) else: box = self.widget.bbox("%d.0" % curline) if not box: box = list(self.widget.bbox("insert")) # align to left of window box[0] = 0 box[2] = 0 x = box[0] + self.widget.winfo_rootx() + 2 y = box[1] + box[3] + self.widget.winfo_rooty() self.tipwindow.wm_geometry("+%d+%d" % (x, y)) def showtip(self, text, parenleft, parenright): """Show the calltip, bind events which will close it and reposition it. """ # truncate overly long calltip if len(text) >= 79: textlines = text.splitlines() for i, line in enumerate(textlines): if len(line) > 79: textlines[i] = line[:75] + ' ...' text = '\n'.join(textlines) self.text = text if self.tipwindow or not self.text: return self.widget.mark_set(MARK_RIGHT, parenright) self.parenline, self.parencol = map( int, self.widget.index(parenleft).split(".")) self.tipwindow = tw = Toplevel(self.widget) self.position_window() # remove border on calltip window tw.wm_overrideredirect(1) try: # This command is only needed and available on Tk >= 8.4.0 for OSX # Without it, call tips intrude on the typing process by grabbing # the focus. tw.tk.call("::tk::unsupported::MacWindowStyle", "style", tw._w, "help", "noActivates") except TclError: pass self.label = Label(tw, text=self.text, justify=LEFT, background="#ffffe0", relief=SOLID, borderwidth=1, font = self.widget['font']) self.label.pack() self.checkhideid = self.widget.bind(CHECKHIDE_VIRTUAL_EVENT_NAME, self.checkhide_event) for seq in CHECKHIDE_SEQUENCES: self.widget.event_add(CHECKHIDE_VIRTUAL_EVENT_NAME, seq) self.widget.after(CHECKHIDE_TIME, self.checkhide_event) self.hideid = self.widget.bind(HIDE_VIRTUAL_EVENT_NAME, self.hide_event) for seq in HIDE_SEQUENCES: self.widget.event_add(HIDE_VIRTUAL_EVENT_NAME, seq) def checkhide_event(self, event=None): if not self.tipwindow: # If the event was triggered by the same event that unbinded # this function, the function will be called nevertheless, # so do nothing in this case. return curline, curcol = map(int, self.widget.index("insert").split('.')) if curline < self.parenline or \ (curline == self.parenline and curcol <= self.parencol) or \ self.widget.compare("insert", ">", MARK_RIGHT): self.hidetip() else: self.position_window() self.widget.after(CHECKHIDE_TIME, self.checkhide_event) def hide_event(self, event): if not self.tipwindow: # See the explanation in checkhide_event. return self.hidetip() def hidetip(self): if not self.tipwindow: return for seq in CHECKHIDE_SEQUENCES: self.widget.event_delete(CHECKHIDE_VIRTUAL_EVENT_NAME, seq) self.widget.unbind(CHECKHIDE_VIRTUAL_EVENT_NAME, self.checkhideid) self.checkhideid = None for seq in HIDE_SEQUENCES: self.widget.event_delete(HIDE_VIRTUAL_EVENT_NAME, seq) self.widget.unbind(HIDE_VIRTUAL_EVENT_NAME, self.hideid) self.hideid = None self.label.destroy() self.label = None self.tipwindow.destroy() self.tipwindow = None self.widget.mark_unset(MARK_RIGHT) self.parenline = self.parencol = self.lastline = None def is_active(self): return bool(self.tipwindow) ############################### # # Test Code # class container: # Conceptually an editor_window def __init__(self): root = Tk() text = self.text = Text(root) text.pack(side=LEFT, fill=BOTH, expand=1) text.insert("insert", "string.split") root.update() self.calltip = CallTip(text) text.event_add("<<calltip-show>>", "(") text.event_add("<<calltip-hide>>", ")") text.bind("<<calltip-show>>", self.calltip_show) text.bind("<<calltip-hide>>", self.calltip_hide) text.focus_set() root.mainloop() def calltip_show(self, event): self.calltip.showtip("Hello world") def calltip_hide(self, event): self.calltip.hidetip() def main(): # Test code c=container() if __name__=='__main__': main()
mit
Kismon/kismon
kismon/widgets/networklist.py
1
15685
from gi.repository import Gtk from gi.repository import Gdk from gi.repository import GObject from gi.repository import GLib import kismon.utils as utils class NetworkList: def __init__(self, networks, locate_network_on_map, on_signal_graph, config): self.network_lines = {} self.network_iter = {} self.network_selected = None self.locate_network_on_map = locate_network_on_map self.on_signal_graph = on_signal_graph self.networks = networks self.config = config self.value_cache = {} for key in ('time', 'crypt', 'server', 'type', 'channel', 'signal', 'ssid'): self.value_cache[key] = {} self.networks.notify_add_list["network_list"] = self.add_network self.networks.notify_remove_list["network_list"] = self.remove_network self.networks.disable_refresh_functions.append(self.pause) self.networks.resume_refresh_functions.append(self.resume) self.treeview = Gtk.TreeView() num = 0 self.enabled_columns = {} self.columns = ("BSSID", "Type", "SSID", "Ch", "Crypt", "First Seen", "Last Seen", "Latitude", "Longitude", "Signal dbm", "Comment", "Servers") self.available_columns = {} if len(self.config['network_list_columns']) == 0: self.config['network_list_columns'] = list(self.columns) for column in self.columns: renderer = Gtk.CellRendererText() if column == "Comment": renderer.set_property('editable', True) renderer.connect("editing-started", self.on_comment_editing_started) elif column == "Signal dbm": renderer = Gtk.CellRendererProgress() tvcolumn = Gtk.TreeViewColumn(column, renderer, text=num) self.available_columns[column] = tvcolumn cell = Gtk.CellRendererText() tvcolumn.pack_start(cell, True) tvcolumn.set_sort_column_id(num) tvcolumn.set_clickable(True) tvcolumn.set_resizable(True) tvcolumn.set_reorderable(True) if column == "Signal dbm": tvcolumn.add_attribute(renderer, "value", 12) num += 1 self.treeview.insert_column(tvcolumn, 0) # the button only gets created when the column is inserted tvcolumbutton = tvcolumn.get_button() tvcolumbutton.connect('button-press-event', self.on_column_clicked, num) self.treeview.remove_column(tvcolumn) # the columns get added again in the right order # read the column list from the config to preserve their order for column in self.config['network_list_columns']: self.add_column(column) self.treeview.connect("button-press-event", self.on_treeview_clicked) # has to be done after TreeViewColumn's self.treeview.connect("columns-changed", self.on_columns_changed) self.treeview.show() self.store = Gtk.ListStore( GObject.TYPE_STRING, # mac GObject.TYPE_STRING, # type GObject.TYPE_STRING, # ssid GObject.TYPE_INT, # channel GObject.TYPE_STRING, # cryptset GObject.TYPE_STRING, # firsttime GObject.TYPE_STRING, # lasttime GObject.TYPE_FLOAT, # lat GObject.TYPE_FLOAT, # lon GObject.TYPE_INT, # signal dbm GObject.TYPE_STRING, # comment GObject.TYPE_STRING, # servers GObject.TYPE_INT, # signal dbm + 100 (progressbar) ) self.treeview.set_model(self.store) scrolled = Gtk.ScrolledWindow() scrolled.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC) scrolled.add(self.treeview) frame = Gtk.Frame() frame.set_label("Networks") frame.add(scrolled) self.scrolled_window = scrolled self.widget = frame self.store.set_sort_column_id(6, Gtk.SortType.DESCENDING) network_popup = Gtk.Menu() locate_item = Gtk.MenuItem.new_with_label('Copy field') network_popup.append(locate_item) locate_item.connect("activate", self.on_copy_field) locate_item = Gtk.MenuItem.new_with_label('Copy network') network_popup.append(locate_item) locate_item.connect("activate", self.on_copy_network) locate_item = Gtk.MenuItem.new_with_label('Locate on map') network_popup.append(locate_item) locate_item.connect("activate", self.on_locate_marker) signal_item = Gtk.MenuItem.new_with_label('Signal graph') network_popup.append(signal_item) signal_item.connect("activate", self.on_signal_graph) network_popup.show_all() self.network_popup = network_popup self.clipboard = Gtk.Clipboard.get(Gdk.SELECTION_CLIPBOARD) self.treeview_click_event = None def add_column(self, column): if column not in self.config["network_list_columns"]: # Position the column next to its original neighbor column as defined in self.columns. # If that column is not enabled, go further to the left. x = 1 while True: left_column_position = self.columns.index(column) - x if self.columns[left_column_position] in self.config["network_list_columns"]: break if x < 0: break x += 1 column_position = left_column_position + 1 self.config["network_list_columns"].insert(column_position, column) else: column_position = self.config["network_list_columns"].index(column) self.treeview.insert_column(self.available_columns[column], column_position) self.enabled_columns[column] = self.available_columns[column] def remove_column(self, column): self.treeview.remove_column(self.enabled_columns[column]) del self.enabled_columns[column] self.config["network_list_columns"].remove(column) def on_column_clicked(self, widget, event, num=None): self.treeview_click_event = event if event.button == 1: # left click self.treeview.set_search_column(num) elif event.button == 3: # right click self.open_column_popup(event) def on_columns_changed(self, widget): columns = self.treeview.get_columns() if len(columns) != len(self.enabled_columns): # when the widget gets destroyed, the event is triggered after each column was removed return new_list = [] for column in columns: new_list.append(column.get_title()) self.config["network_list_columns"] = new_list def open_column_popup(self, event): column_popup = Gtk.Menu() for column in self.available_columns: item = Gtk.CheckMenuItem.new_with_label(column) column_popup.append(item) if column in self.enabled_columns: item.activate() item.connect("activate", self.on_column_activated, column) column_popup.show_all() column_popup.popup_at_pointer(event) def on_column_activated(self, widget, column): active = widget.get_active() if active: self.add_column(column) else: self.remove_column(column) def on_comment_editing_started(self, widget, editable, path): editable.connect("editing-done", self.on_comment_editing_done) def on_comment_editing_done(self, widget): network = self.networks.get_network(self.network_selected) network['comment'] = widget.get_text() self.add_network(self.network_selected) def prepare_network_servers(self, value): if len(value) == 0 or value is None: servers = None else: servers = [] for server in value: if server.endswith(':2501'): # remove the default port server = server.rsplit(':', 1)[0] servers.append(server) servers_str = ", ".join(sorted(servers)) try: servers = self.value_cache['server'][servers_str] except KeyError: servers = GObject.Value(GObject.TYPE_STRING, servers_str) self.value_cache['server'][servers_str] = servers return servers def prepare_network_time(self, value): try: result = self.value_cache['time'][value] except KeyError: result = GObject.Value(GObject.TYPE_STRING, utils.format_timestamp(value)) self.value_cache['time'][value] = result return result def prepare_network_crypt(self, value): try: crypt = self.value_cache['crypt'][value] except KeyError: crypt = GObject.Value(GObject.TYPE_STRING, value) self.value_cache['crypt'][value] = crypt return crypt def prepare_network_channel(self, value): try: channel = self.value_cache['channel'][value] except KeyError: channel = GObject.Value(GObject.TYPE_INT, value) self.value_cache['channel'][value] = channel return channel def prepare_network_type(self, value): try: network_type = self.value_cache['type'][value] except KeyError: network_type = GObject.Value(GObject.TYPE_STRING, value) self.value_cache['type'][value] = network_type return network_type def prepare_network_signal(self, value): try: return self.value_cache['signal'][value] except KeyError: pass """ Wifi cards report different ranges for the signal, some use -1xx to 0 and others 0 to 100. The CellRendererProgress needs a percentage value between 0 and 100, so we convert the value if necessary. """ if -100 <= value <= 0: signal_strength = value + 100 elif value < -100: signal_strength = 0 elif 1 <= value <= 100: signal_strength = value else: signal_strength = 0 signal = GObject.Value(GObject.TYPE_INT, value) signal_strength = GObject.Value(GObject.TYPE_INT, signal_strength) self.value_cache['signal'][value] = (signal, signal_strength) return signal, signal_strength def prepare_network_ssid(self, value): if value == "": ssid_str = "<no ssid>" else: ssid_str = value try: ssid = self.value_cache['ssid'][ssid_str] except KeyError: ssid = GObject.Value(GObject.TYPE_STRING, ssid_str) self.value_cache['ssid'][ssid_str] = ssid return ssid @staticmethod def prepare_network_coordinate(value): if value == 0.0: return None else: return value def add_network(self, mac): network = self.networks.get_network(mac) """ The Gtk.ListStore will convert every Python-type value to its GObject equivalent. Most of the prepare_network_* functions cache and return the value as a GObject, this speed things up as we have a lot of duplicate values. Furthermore a None value is faster then an zero size string, so we replace it where possible. """ if "signal_dbm" not in network or len(network["signal_dbm"]) != 3: signal = 0 else: signal = network["signal_dbm"]["last"] signal, signal_strength = self.prepare_network_signal(signal) if network['comment'] == '': comment = None else: comment = network['comment'] line = [mac, self.prepare_network_type(network["type"]), self.prepare_network_ssid(network["ssid"]), self.prepare_network_channel(network["channel"]), self.prepare_network_crypt(network["crypt"]), self.prepare_network_time(network["firsttime"]), self.prepare_network_time(network["lasttime"]), self.prepare_network_coordinate(network["lat"]), self.prepare_network_coordinate(network["lon"]), signal, comment, self.prepare_network_servers(network["servers"]), signal_strength ] try: old_line = self.network_lines[mac] except: old_line = None self.network_lines[mac] = line if mac in self.network_iter: network_iter = self.network_iter[mac] num = 0 for value in line: if old_line is not None and old_line.pop(0) == value: num += 1 continue self.store.set_value(network_iter, num, value) num += 1 else: self.network_iter[mac] = self.store.append(line) # stick to the top of the table after adding a new row adj = self.scrolled_window.get_vadjustment() self.scroll_value = int(adj.get_value()) if self.scroll_value == 0: GLib.idle_add(self.treeview.scroll_to_point, -1, 0) def remove_network(self, mac): try: network_iter = self.network_iter[mac] except KeyError: return self.store.remove(network_iter) del (self.network_iter[mac]) def pause(self): self.treeview.freeze_child_notify() self.treeview.set_model(None) def resume(self): self.treeview.set_model(self.store) self.treeview.thaw_child_notify() def on_treeview_clicked(self, treeview, event): if self.treeview_click_event == event: return x = int(event.x) y = int(event.y) pthinfo = treeview.get_path_at_pos(x, y) if pthinfo is None: return path, col, cellx, celly = pthinfo treeview.grab_focus() treeview.set_cursor(path, col, 0) network_iter = self.store.get_iter(path) mac = self.store.get_value(network_iter, 0) self.network_selected = mac self.column_selected = self.columns.index(col.get_title()) if event.type == Gdk.EventType.DOUBLE_BUTTON_PRESS: # double click self.on_locate_marker(None) elif event.button == 3: # right click self.network_popup.popup(None, None, None, 0, event.button, event.time, ) def on_locate_marker(self, widget): if self.locate_network_on_map is not None: self.locate_network_on_map(self.network_selected) def on_copy_field(self, widget): selected_text = self.get_value_from_cell(self.network_selected, self.column_selected) self.set_clipboard(selected_text) def on_copy_network(self, widget): text = [] num = 0 for column in self.available_columns: value = self.get_value_from_cell(self.network_selected, num) text.append("%s: %s" % (column, value)) num += 1 self.set_clipboard('\n'.join(text)) def set_clipboard(self, text): self.clipboard.set_text("%s" % text, -1) self.clipboard.store() def get_value_from_cell(self, mac, column): value = self.network_lines[mac][column] try: value = value.get_value() except AttributeError: pass return value
bsd-3-clause
bloyl/mne-python
mne/decoding/mixin.py
14
2851
class TransformerMixin(object): """Mixin class for all transformers in scikit-learn.""" def fit_transform(self, X, y=None, **fit_params): """Fit to data, then transform it. Fits transformer to X and y with optional parameters fit_params and returns a transformed version of X. Parameters ---------- X : array, shape (n_samples, n_features) Training set. y : array, shape (n_samples,) Target values. **fit_params : dict Additional fitting parameters passed to ``self.fit``. Returns ------- X_new : array, shape (n_samples, n_features_new) Transformed array. """ # non-optimized default implementation; override when a better # method is possible for a given clustering algorithm if y is None: # fit method of arity 1 (unsupervised transformation) return self.fit(X, **fit_params).transform(X) else: # fit method of arity 2 (supervised transformation) return self.fit(X, y, **fit_params).transform(X) class EstimatorMixin(object): """Mixin class for estimators.""" def get_params(self, deep=True): """Get the estimator params. Parameters ---------- deep : bool Deep. """ return def set_params(self, **params): """Set parameters (mimics sklearn API). Parameters ---------- **params : dict Extra parameters. Returns ------- inst : object The instance. """ if not params: return self valid_params = self.get_params(deep=True) for key, value in params.items(): split = key.split('__', 1) if len(split) > 1: # nested objects case name, sub_name = split if name not in valid_params: raise ValueError('Invalid parameter %s for estimator %s. ' 'Check the list of available parameters ' 'with `estimator.get_params().keys()`.' % (name, self)) sub_object = valid_params[name] sub_object.set_params(**{sub_name: value}) else: # simple objects case if key not in valid_params: raise ValueError('Invalid parameter %s for estimator %s. ' 'Check the list of available parameters ' 'with `estimator.get_params().keys()`.' % (key, self.__class__.__name__)) setattr(self, key, value) return self
bsd-3-clause
nitirohilla/upm
examples/python/hwxpxx.py
7
2921
#!/usr/bin/python # Author: Jon Trulson <jtrulson@ics.com> # Copyright (c) 2016 Intel Corporation. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. from __future__ import print_function import time, sys, signal, atexit from upm import pyupm_hwxpxx as sensorObj def main(): ## Exit handlers ## # This function stops python from printing a stacktrace when you hit control-C def SIGINTHandler(signum, frame): raise SystemExit # This function lets you run code on exit def exitHandler(): print("Exiting...") sys.exit(0) # Register exit handlers atexit.register(exitHandler) signal.signal(signal.SIGINT, SIGINTHandler) defaultDev = "/dev/ttyUSB0" # if an argument was specified, use it as the device instead if (len(sys.argv) > 1): defaultDev = sys.argv[1] print("Using device", defaultDev) print("Initializing...") # Instantiate an HWXPXX instance, using MODBUS slave address 3, and # default comm parameters (19200, 8, N, 2) sensor = sensorObj.HWXPXX(defaultDev, 3) # output the serial number and firmware revision print("Slave ID:", sensor.getSlaveID()) # stored temperature and humidity offsets print("Temperature Offset:", sensor.getTemperatureOffset()) print("Humidity Offset:", sensor.getHumidityOffset()) print() # update and print available values every second while (1): # update our values from the sensor sensor.update() # we show both C and F for temperature print("Temperature:", sensor.getTemperature(), "C /", end=' ') print(sensor.getTemperature(True), "F") print("Humidity:", sensor.getHumidity(), "%") print("Slider:", sensor.getSlider(), "%") print("Override Switch Status:", sensor.getOverrideSwitchStatus()) print() time.sleep(1) if __name__ == '__main__': main()
mit
tima/ansible
lib/ansible/plugins/lookup/fileglob.py
54
1880
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com> # (c) 2017 Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type DOCUMENTATION = """ lookup: fileglob author: Michael DeHaan <michael.dehaan@gmail.com> version_added: "1.4" short_description: list files matching a pattern description: - Matches all files in a single directory, non-recursively, that match a pattern. It calls Python's "glob" library. options: _terms: description: path(s) of files to read required: True notes: - Patterns ore only supported on files, not directory/paths. """ EXAMPLES = """ - name: display content of all .txt files in dir debug: msg={{lookup('fileglob', '/my/path/*.txt')}} - name: Copy each file over that matches the given pattern copy: src: "{{ item }}" dest: "/etc/fooapp/" owner: "root" mode: 0600 with_fileglob: - "/playbooks/files/fooapp/*" """ RETURN = """ _raw: description: - content of file(s) """ import os import glob from ansible.plugins.lookup import LookupBase from ansible.errors import AnsibleFileNotFound from ansible.module_utils._text import to_bytes, to_text class LookupModule(LookupBase): def run(self, terms, variables=None, **kwargs): ret = [] for term in terms: term_file = os.path.basename(term) dwimmed_path = self.find_file_in_search_path(variables, 'files', os.path.dirname(term)) if dwimmed_path: globbed = glob.glob(to_bytes(os.path.join(dwimmed_path, term_file), errors='surrogate_or_strict')) ret.extend(to_text(g, errors='surrogate_or_strict') for g in globbed if os.path.isfile(g)) return ret
gpl-3.0
Warrior-Meditation/warriormeditation
bootstrap/test-infra/s3_cache.py
2166
5734
#!/usr/bin/env python2.7 # pylint: disable=C0301 from __future__ import absolute_import, unicode_literals, print_function, division from sys import argv from os import environ, stat, chdir, remove as _delete_file from os.path import dirname, basename, abspath, realpath, expandvars from hashlib import sha256 from subprocess import check_call as run from json import load, dump as save from contextlib import contextmanager from datetime import datetime from boto.s3.connection import S3Connection from boto.s3.key import Key from boto.exception import S3ResponseError CONFIG_FILE = './S3Cachefile.json' UPLOAD_TODO_FILE = './S3CacheTodo.json' BYTES_PER_MB = 1024 * 1024 @contextmanager def timer(): start = datetime.utcnow() yield end = datetime.utcnow() elapsed = end - start print("\tDone. Took", int(elapsed.total_seconds()), "second(s).") @contextmanager def todo_file(writeback=True): try: with open(UPLOAD_TODO_FILE, 'rt') as json_file: todo = load(json_file) except (IOError, OSError, ValueError): todo = {} yield todo if writeback: try: with open(UPLOAD_TODO_FILE, 'wt') as json_file: save(todo, json_file) except (OSError, IOError) as save_err: print("Error saving {}:".format(UPLOAD_TODO_FILE), save_err) def _sha256_of_file(filename): hasher = sha256() with open(filename, 'rb') as input_file: hasher.update(input_file.read()) file_hash = hasher.hexdigest() print('sha256({}) = {}'.format(filename, file_hash)) return file_hash def _delete_file_quietly(filename): try: _delete_file(filename) except (OSError, IOError): pass def mark_needs_uploading(cache_name): with todo_file() as todo: todo[cache_name] = True def mark_uploaded(cache_name): with todo_file() as todo: todo.pop(cache_name, None) def need_to_upload(cache_name): with todo_file(writeback=False) as todo: return todo.get(cache_name, False) def _tarball_size(directory): kib = stat(_tarball_filename_for(directory)).st_size // BYTES_PER_MB return "{} MiB".format(kib) def _tarball_filename_for(directory): return abspath('./{}.tar.gz'.format(basename(directory))) def _create_tarball(directory): print("Creating tarball of {}...".format(directory)) with timer(): run(['tar', '-czf', _tarball_filename_for(directory), '-C', dirname(directory), basename(directory)]) def _extract_tarball(directory): print("Extracting tarball of {}...".format(directory)) with timer(): run(['tar', '-xzf', _tarball_filename_for(directory), '-C', dirname(directory)]) def download(directory): mark_uploaded(cache_name) # reset try: print("Downloading {} tarball from S3...".format(cache_name)) with timer(): key.get_contents_to_filename(_tarball_filename_for(directory)) except S3ResponseError as err: mark_needs_uploading(cache_name) raise SystemExit("Cached {} download failed!".format(cache_name)) print("Downloaded {}.".format(_tarball_size(directory))) _extract_tarball(directory) print("{} successfully installed from cache.".format(cache_name)) def upload(directory): _create_tarball(directory) print("Uploading {} tarball to S3... ({})".format(cache_name, _tarball_size(directory))) with timer(): key.set_contents_from_filename(_tarball_filename_for(directory)) print("{} cache successfully updated.".format(cache_name)) mark_uploaded(cache_name) if __name__ == '__main__': # Uses environment variables: # AWS_ACCESS_KEY_ID -- AWS Access Key ID # AWS_SECRET_ACCESS_KEY -- AWS Secret Access Key argv.pop(0) if len(argv) != 2: raise SystemExit("USAGE: s3_cache.py <download | upload> <cache name>") mode, cache_name = argv script_dir = dirname(realpath(__file__)) chdir(script_dir) try: with open(CONFIG_FILE, 'rt') as config_file: config = load(config_file) except (IOError, OSError, ValueError) as config_err: print(config_err) raise SystemExit("Error when trying to load config from JSON file!") try: cache_info = config[cache_name] key_file = expandvars(cache_info["key"]) fallback_cmd = cache_info["generate"] directory = expandvars(cache_info["cache"]) except (TypeError, KeyError) as load_err: print(load_err) raise SystemExit("Config for cache named {!r} is missing or malformed!".format(cache_name)) try: try: BUCKET_NAME = environ['TWBS_S3_BUCKET'] except KeyError: raise SystemExit("TWBS_S3_BUCKET environment variable not set!") conn = S3Connection() bucket = conn.lookup(BUCKET_NAME) if bucket is None: raise SystemExit("Could not access bucket!") key_file_hash = _sha256_of_file(key_file) key = Key(bucket, key_file_hash) key.storage_class = 'REDUCED_REDUNDANCY' if mode == 'download': download(directory) elif mode == 'upload': if need_to_upload(cache_name): upload(directory) else: print("No need to upload anything.") else: raise SystemExit("Unrecognized mode {!r}".format(mode)) except BaseException as exc: if mode != 'download': raise print("Error!:", exc) print("Unable to download from cache.") print("Running fallback command to generate cache directory {!r}: {}".format(directory, fallback_cmd)) with timer(): run(fallback_cmd, shell=True)
mit
kampanita/pelisalacarta
python/main-classic/lib/mechanize/_msiecookiejar.py
134
14694
"""Microsoft Internet Explorer cookie loading on Windows. Copyright 2002-2003 Johnny Lee <typo_pl@hotmail.com> (MSIE Perl code) Copyright 2002-2006 John J Lee <jjl@pobox.com> (The Python port) This code is free software; you can redistribute it and/or modify it under the terms of the BSD or ZPL 2.1 licenses (see the file COPYING.txt included with the distribution). """ # XXX names and comments are not great here import os, re, time, struct, logging if os.name == "nt": import _winreg from _clientcookie import FileCookieJar, CookieJar, Cookie, \ MISSING_FILENAME_TEXT, LoadError debug = logging.getLogger("mechanize").debug def regload(path, leaf): key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, path, 0, _winreg.KEY_ALL_ACCESS) try: value = _winreg.QueryValueEx(key, leaf)[0] except WindowsError: value = None return value WIN32_EPOCH = 0x019db1ded53e8000L # 1970 Jan 01 00:00:00 in Win32 FILETIME def epoch_time_offset_from_win32_filetime(filetime): """Convert from win32 filetime to seconds-since-epoch value. MSIE stores create and expire times as Win32 FILETIME, which is 64 bits of 100 nanosecond intervals since Jan 01 1601. mechanize expects time in 32-bit value expressed in seconds since the epoch (Jan 01 1970). """ if filetime < WIN32_EPOCH: raise ValueError("filetime (%d) is before epoch (%d)" % (filetime, WIN32_EPOCH)) return divmod((filetime - WIN32_EPOCH), 10000000L)[0] def binary_to_char(c): return "%02X" % ord(c) def binary_to_str(d): return "".join(map(binary_to_char, list(d))) class MSIEBase: magic_re = re.compile(r"Client UrlCache MMF Ver \d\.\d.*") padding = "\x0d\xf0\xad\x0b" msie_domain_re = re.compile(r"^([^/]+)(/.*)$") cookie_re = re.compile("Cookie\:.+\@([\x21-\xFF]+).*?" "(.+\@[\x21-\xFF]+\.txt)") # path under HKEY_CURRENT_USER from which to get location of index.dat reg_path = r"software\microsoft\windows" \ r"\currentversion\explorer\shell folders" reg_key = "Cookies" def __init__(self): self._delayload_domains = {} def _delayload_domain(self, domain): # if necessary, lazily load cookies for this domain delayload_info = self._delayload_domains.get(domain) if delayload_info is not None: cookie_file, ignore_discard, ignore_expires = delayload_info try: self.load_cookie_data(cookie_file, ignore_discard, ignore_expires) except (LoadError, IOError): debug("error reading cookie file, skipping: %s", cookie_file) else: del self._delayload_domains[domain] def _load_cookies_from_file(self, filename): debug("Loading MSIE cookies file: %s", filename) cookies = [] cookies_fh = open(filename) try: while 1: key = cookies_fh.readline() if key == "": break rl = cookies_fh.readline def getlong(rl=rl): return long(rl().rstrip()) def getstr(rl=rl): return rl().rstrip() key = key.rstrip() value = getstr() domain_path = getstr() flags = getlong() # 0x2000 bit is for secure I think lo_expire = getlong() hi_expire = getlong() lo_create = getlong() hi_create = getlong() sep = getstr() if "" in (key, value, domain_path, flags, hi_expire, lo_expire, hi_create, lo_create, sep) or (sep != "*"): break m = self.msie_domain_re.search(domain_path) if m: domain = m.group(1) path = m.group(2) cookies.append({"KEY": key, "VALUE": value, "DOMAIN": domain, "PATH": path, "FLAGS": flags, "HIXP": hi_expire, "LOXP": lo_expire, "HICREATE": hi_create, "LOCREATE": lo_create}) finally: cookies_fh.close() return cookies def load_cookie_data(self, filename, ignore_discard=False, ignore_expires=False): """Load cookies from file containing actual cookie data. Old cookies are kept unless overwritten by newly loaded ones. You should not call this method if the delayload attribute is set. I think each of these files contain all cookies for one user, domain, and path. filename: file containing cookies -- usually found in a file like C:\WINNT\Profiles\joe\Cookies\joe@blah[1].txt """ now = int(time.time()) cookie_data = self._load_cookies_from_file(filename) for cookie in cookie_data: flags = cookie["FLAGS"] secure = ((flags & 0x2000) != 0) filetime = (cookie["HIXP"] << 32) + cookie["LOXP"] expires = epoch_time_offset_from_win32_filetime(filetime) if expires < now: discard = True else: discard = False domain = cookie["DOMAIN"] initial_dot = domain.startswith(".") if initial_dot: domain_specified = True else: # MSIE 5 does not record whether the domain cookie-attribute # was specified. # Assuming it wasn't is conservative, because with strict # domain matching this will match less frequently; with regular # Netscape tail-matching, this will match at exactly the same # times that domain_specified = True would. It also means we # don't have to prepend a dot to achieve consistency with our # own & Mozilla's domain-munging scheme. domain_specified = False # assume path_specified is false # XXX is there other stuff in here? -- e.g. comment, commentURL? c = Cookie(0, cookie["KEY"], cookie["VALUE"], None, False, domain, domain_specified, initial_dot, cookie["PATH"], False, secure, expires, discard, None, None, {"flags": flags}) if not ignore_discard and c.discard: continue if not ignore_expires and c.is_expired(now): continue CookieJar.set_cookie(self, c) def load_from_registry(self, ignore_discard=False, ignore_expires=False, username=None): """ username: only required on win9x """ cookies_dir = regload(self.reg_path, self.reg_key) filename = os.path.normpath(os.path.join(cookies_dir, "INDEX.DAT")) self.load(filename, ignore_discard, ignore_expires, username) def _really_load(self, index, filename, ignore_discard, ignore_expires, username): now = int(time.time()) if username is None: username = os.environ['USERNAME'].lower() cookie_dir = os.path.dirname(filename) data = index.read(256) if len(data) != 256: raise LoadError("%s file is too short" % filename) # Cookies' index.dat file starts with 32 bytes of signature # followed by an offset to the first record, stored as a little- # endian DWORD. sig, size, data = data[:32], data[32:36], data[36:] size = struct.unpack("<L", size)[0] # check that sig is valid if not self.magic_re.match(sig) or size != 0x4000: raise LoadError("%s ['%s' %s] does not seem to contain cookies" % (str(filename), sig, size)) # skip to start of first record index.seek(size, 0) sector = 128 # size of sector in bytes while 1: data = "" # Cookies are usually in two contiguous sectors, so read in two # sectors and adjust if not a Cookie. to_read = 2 * sector d = index.read(to_read) if len(d) != to_read: break data = data + d # Each record starts with a 4-byte signature and a count # (little-endian DWORD) of sectors for the record. sig, size, data = data[:4], data[4:8], data[8:] size = struct.unpack("<L", size)[0] to_read = (size - 2) * sector ## from urllib import quote ## print "data", quote(data) ## print "sig", quote(sig) ## print "size in sectors", size ## print "size in bytes", size*sector ## print "size in units of 16 bytes", (size*sector) / 16 ## print "size to read in bytes", to_read ## print if sig != "URL ": assert sig in ("HASH", "LEAK", \ self.padding, "\x00\x00\x00\x00"), \ "unrecognized MSIE index.dat record: %s" % \ binary_to_str(sig) if sig == "\x00\x00\x00\x00": # assume we've got all the cookies, and stop break if sig == self.padding: continue # skip the rest of this record assert to_read >= 0 if size != 2: assert to_read != 0 index.seek(to_read, 1) continue # read in rest of record if necessary if size > 2: more_data = index.read(to_read) if len(more_data) != to_read: break data = data + more_data cookie_re = ("Cookie\:%s\@([\x21-\xFF]+).*?" % username + "(%s\@[\x21-\xFF]+\.txt)" % username) m = re.search(cookie_re, data, re.I) if m: cookie_file = os.path.join(cookie_dir, m.group(2)) if not self.delayload: try: self.load_cookie_data(cookie_file, ignore_discard, ignore_expires) except (LoadError, IOError): debug("error reading cookie file, skipping: %s", cookie_file) else: domain = m.group(1) i = domain.find("/") if i != -1: domain = domain[:i] self._delayload_domains[domain] = ( cookie_file, ignore_discard, ignore_expires) class MSIECookieJar(MSIEBase, FileCookieJar): """FileCookieJar that reads from the Windows MSIE cookies database. MSIECookieJar can read the cookie files of Microsoft Internet Explorer (MSIE) for Windows version 5 on Windows NT and version 6 on Windows XP and Windows 98. Other configurations may also work, but are untested. Saving cookies in MSIE format is NOT supported. If you save cookies, they'll be in the usual Set-Cookie3 format, which you can read back in using an instance of the plain old CookieJar class. Don't save using the same filename that you loaded cookies from, because you may succeed in clobbering your MSIE cookies index file! You should be able to have LWP share Internet Explorer's cookies like this (note you need to supply a username to load_from_registry if you're on Windows 9x or Windows ME): cj = MSIECookieJar(delayload=1) # find cookies index file in registry and load cookies from it cj.load_from_registry() opener = mechanize.build_opener(mechanize.HTTPCookieProcessor(cj)) response = opener.open("http://example.com/") Iterating over a delayloaded MSIECookieJar instance will not cause any cookies to be read from disk. To force reading of all cookies from disk, call read_all_cookies. Note that the following methods iterate over self: clear_temporary_cookies, clear_expired_cookies, __len__, __repr__, __str__ and as_string. Additional methods: load_from_registry(ignore_discard=False, ignore_expires=False, username=None) load_cookie_data(filename, ignore_discard=False, ignore_expires=False) read_all_cookies() """ def __init__(self, filename=None, delayload=False, policy=None): MSIEBase.__init__(self) FileCookieJar.__init__(self, filename, delayload, policy) def set_cookie(self, cookie): if self.delayload: self._delayload_domain(cookie.domain) CookieJar.set_cookie(self, cookie) def _cookies_for_request(self, request): """Return a list of cookies to be returned to server.""" domains = self._cookies.copy() domains.update(self._delayload_domains) domains = domains.keys() cookies = [] for domain in domains: cookies.extend(self._cookies_for_domain(domain, request)) return cookies def _cookies_for_domain(self, domain, request): if not self._policy.domain_return_ok(domain, request): return [] debug("Checking %s for cookies to return", domain) if self.delayload: self._delayload_domain(domain) return CookieJar._cookies_for_domain(self, domain, request) def read_all_cookies(self): """Eagerly read in all cookies.""" if self.delayload: for domain in self._delayload_domains.keys(): self._delayload_domain(domain) def load(self, filename, ignore_discard=False, ignore_expires=False, username=None): """Load cookies from an MSIE 'index.dat' cookies index file. filename: full path to cookie index file username: only required on win9x """ if filename is None: if self.filename is not None: filename = self.filename else: raise ValueError(MISSING_FILENAME_TEXT) index = open(filename, "rb") try: self._really_load(index, filename, ignore_discard, ignore_expires, username) finally: index.close()
gpl-3.0
maestrano/odoo
addons/auth_signup/res_config.py
445
2860
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2012-today OpenERP SA (<http://www.openerp.com>) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> # ############################################################################## from openerp.osv import osv, fields from openerp.tools.safe_eval import safe_eval class base_config_settings(osv.TransientModel): _inherit = 'base.config.settings' _columns = { 'auth_signup_reset_password': fields.boolean('Enable password reset from Login page', help="This allows users to trigger a password reset from the Login page."), 'auth_signup_uninvited': fields.boolean('Allow external users to sign up', help="If unchecked, only invited users may sign up."), 'auth_signup_template_user_id': fields.many2one('res.users', string='Template user for new users created through signup'), } def get_default_auth_signup_template_user_id(self, cr, uid, fields, context=None): icp = self.pool.get('ir.config_parameter') # we use safe_eval on the result, since the value of the parameter is a nonempty string return { 'auth_signup_reset_password': safe_eval(icp.get_param(cr, uid, 'auth_signup.reset_password', 'False')), 'auth_signup_uninvited': safe_eval(icp.get_param(cr, uid, 'auth_signup.allow_uninvited', 'False')), 'auth_signup_template_user_id': safe_eval(icp.get_param(cr, uid, 'auth_signup.template_user_id', 'False')), } def set_auth_signup_template_user_id(self, cr, uid, ids, context=None): config = self.browse(cr, uid, ids[0], context=context) icp = self.pool.get('ir.config_parameter') # we store the repr of the values, since the value of the parameter is a required string icp.set_param(cr, uid, 'auth_signup.reset_password', repr(config.auth_signup_reset_password)) icp.set_param(cr, uid, 'auth_signup.allow_uninvited', repr(config.auth_signup_uninvited)) icp.set_param(cr, uid, 'auth_signup.template_user_id', repr(config.auth_signup_template_user_id.id))
agpl-3.0
cosmiclattes/TPBviz
torrent/lib/python2.7/site-packages/django/contrib/gis/geos/prototypes/io.py
113
9365
import threading from ctypes import byref, c_char_p, c_int, c_char, c_size_t, Structure, POINTER from django.contrib.gis import memoryview from django.contrib.gis.geos.base import GEOSBase from django.contrib.gis.geos.libgeos import GEOM_PTR from django.contrib.gis.geos.prototypes.errcheck import check_geom, check_string, check_sized_string from django.contrib.gis.geos.prototypes.geom import c_uchar_p, geos_char_p from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc from django.utils import six from django.utils.encoding import force_bytes ### The WKB/WKT Reader/Writer structures and pointers ### class WKTReader_st(Structure): pass class WKTWriter_st(Structure): pass class WKBReader_st(Structure): pass class WKBWriter_st(Structure): pass WKT_READ_PTR = POINTER(WKTReader_st) WKT_WRITE_PTR = POINTER(WKTWriter_st) WKB_READ_PTR = POINTER(WKBReader_st) WKB_WRITE_PTR = POINTER(WKBReader_st) ### WKTReader routines ### wkt_reader_create = GEOSFunc('GEOSWKTReader_create') wkt_reader_create.restype = WKT_READ_PTR wkt_reader_destroy = GEOSFunc('GEOSWKTReader_destroy') wkt_reader_destroy.argtypes = [WKT_READ_PTR] wkt_reader_read = GEOSFunc('GEOSWKTReader_read') wkt_reader_read.argtypes = [WKT_READ_PTR, c_char_p] wkt_reader_read.restype = GEOM_PTR wkt_reader_read.errcheck = check_geom ### WKTWriter routines ### wkt_writer_create = GEOSFunc('GEOSWKTWriter_create') wkt_writer_create.restype = WKT_WRITE_PTR wkt_writer_destroy = GEOSFunc('GEOSWKTWriter_destroy') wkt_writer_destroy.argtypes = [WKT_WRITE_PTR] wkt_writer_write = GEOSFunc('GEOSWKTWriter_write') wkt_writer_write.argtypes = [WKT_WRITE_PTR, GEOM_PTR] wkt_writer_write.restype = geos_char_p wkt_writer_write.errcheck = check_string try: wkt_writer_get_outdim = GEOSFunc('GEOSWKTWriter_getOutputDimension') wkt_writer_get_outdim.argtypes = [WKT_WRITE_PTR] wkt_writer_get_outdim.restype = c_int wkt_writer_set_outdim = GEOSFunc('GEOSWKTWriter_setOutputDimension') wkt_writer_set_outdim.argtypes = [WKT_WRITE_PTR, c_int] except AttributeError: # GEOSWKTWriter_get/setOutputDimension has been introduced in GEOS 3.3.0 # Always return 2 if not available wkt_writer_get_outdim = lambda ptr: 2 wkt_writer_set_outdim = lambda ptr, dim: None ### WKBReader routines ### wkb_reader_create = GEOSFunc('GEOSWKBReader_create') wkb_reader_create.restype = WKB_READ_PTR wkb_reader_destroy = GEOSFunc('GEOSWKBReader_destroy') wkb_reader_destroy.argtypes = [WKB_READ_PTR] def wkb_read_func(func): # Although the function definitions take `const unsigned char *` # as their parameter, we use c_char_p here so the function may # take Python strings directly as parameters. Inside Python there # is not a difference between signed and unsigned characters, so # it is not a problem. func.argtypes = [WKB_READ_PTR, c_char_p, c_size_t] func.restype = GEOM_PTR func.errcheck = check_geom return func wkb_reader_read = wkb_read_func(GEOSFunc('GEOSWKBReader_read')) wkb_reader_read_hex = wkb_read_func(GEOSFunc('GEOSWKBReader_readHEX')) ### WKBWriter routines ### wkb_writer_create = GEOSFunc('GEOSWKBWriter_create') wkb_writer_create.restype = WKB_WRITE_PTR wkb_writer_destroy = GEOSFunc('GEOSWKBWriter_destroy') wkb_writer_destroy.argtypes = [WKB_WRITE_PTR] # WKB Writing prototypes. def wkb_write_func(func): func.argtypes = [WKB_WRITE_PTR, GEOM_PTR, POINTER(c_size_t)] func.restype = c_uchar_p func.errcheck = check_sized_string return func wkb_writer_write = wkb_write_func(GEOSFunc('GEOSWKBWriter_write')) wkb_writer_write_hex = wkb_write_func(GEOSFunc('GEOSWKBWriter_writeHEX')) # WKBWriter property getter/setter prototypes. def wkb_writer_get(func, restype=c_int): func.argtypes = [WKB_WRITE_PTR] func.restype = restype return func def wkb_writer_set(func, argtype=c_int): func.argtypes = [WKB_WRITE_PTR, argtype] return func wkb_writer_get_byteorder = wkb_writer_get(GEOSFunc('GEOSWKBWriter_getByteOrder')) wkb_writer_set_byteorder = wkb_writer_set(GEOSFunc('GEOSWKBWriter_setByteOrder')) wkb_writer_get_outdim = wkb_writer_get(GEOSFunc('GEOSWKBWriter_getOutputDimension')) wkb_writer_set_outdim = wkb_writer_set(GEOSFunc('GEOSWKBWriter_setOutputDimension')) wkb_writer_get_include_srid = wkb_writer_get(GEOSFunc('GEOSWKBWriter_getIncludeSRID'), restype=c_char) wkb_writer_set_include_srid = wkb_writer_set(GEOSFunc('GEOSWKBWriter_setIncludeSRID'), argtype=c_char) ### Base I/O Class ### class IOBase(GEOSBase): "Base class for GEOS I/O objects." def __init__(self): # Getting the pointer with the constructor. self.ptr = self._constructor() def __del__(self): # Cleaning up with the appropriate destructor. if self._ptr: self._destructor(self._ptr) ### Base WKB/WKT Reading and Writing objects ### # Non-public WKB/WKT reader classes for internal use because # their `read` methods return _pointers_ instead of GEOSGeometry # objects. class _WKTReader(IOBase): _constructor = wkt_reader_create _destructor = wkt_reader_destroy ptr_type = WKT_READ_PTR def read(self, wkt): if not isinstance(wkt, (bytes, six.string_types)): raise TypeError return wkt_reader_read(self.ptr, force_bytes(wkt)) class _WKBReader(IOBase): _constructor = wkb_reader_create _destructor = wkb_reader_destroy ptr_type = WKB_READ_PTR def read(self, wkb): "Returns a _pointer_ to C GEOS Geometry object from the given WKB." if isinstance(wkb, memoryview): wkb_s = bytes(wkb) return wkb_reader_read(self.ptr, wkb_s, len(wkb_s)) elif isinstance(wkb, (bytes, six.string_types)): return wkb_reader_read_hex(self.ptr, wkb, len(wkb)) else: raise TypeError ### WKB/WKT Writer Classes ### class WKTWriter(IOBase): _constructor = wkt_writer_create _destructor = wkt_writer_destroy ptr_type = WKT_WRITE_PTR def write(self, geom): "Returns the WKT representation of the given geometry." return wkt_writer_write(self.ptr, geom.ptr) @property def outdim(self): return wkt_writer_get_outdim(self.ptr) @outdim.setter def outdim(self, new_dim): if not new_dim in (2, 3): raise ValueError('WKT output dimension must be 2 or 3') wkt_writer_set_outdim(self.ptr, new_dim) class WKBWriter(IOBase): _constructor = wkb_writer_create _destructor = wkb_writer_destroy ptr_type = WKB_WRITE_PTR def write(self, geom): "Returns the WKB representation of the given geometry." return memoryview(wkb_writer_write(self.ptr, geom.ptr, byref(c_size_t()))) def write_hex(self, geom): "Returns the HEXEWKB representation of the given geometry." return wkb_writer_write_hex(self.ptr, geom.ptr, byref(c_size_t())) ### WKBWriter Properties ### # Property for getting/setting the byteorder. def _get_byteorder(self): return wkb_writer_get_byteorder(self.ptr) def _set_byteorder(self, order): if not order in (0, 1): raise ValueError('Byte order parameter must be 0 (Big Endian) or 1 (Little Endian).') wkb_writer_set_byteorder(self.ptr, order) byteorder = property(_get_byteorder, _set_byteorder) # Property for getting/setting the output dimension. def _get_outdim(self): return wkb_writer_get_outdim(self.ptr) def _set_outdim(self, new_dim): if not new_dim in (2, 3): raise ValueError('WKB output dimension must be 2 or 3') wkb_writer_set_outdim(self.ptr, new_dim) outdim = property(_get_outdim, _set_outdim) # Property for getting/setting the include srid flag. def _get_include_srid(self): return bool(ord(wkb_writer_get_include_srid(self.ptr))) def _set_include_srid(self, include): if bool(include): flag = b'\x01' else: flag = b'\x00' wkb_writer_set_include_srid(self.ptr, flag) srid = property(_get_include_srid, _set_include_srid) # `ThreadLocalIO` object holds instances of the WKT and WKB reader/writer # objects that are local to the thread. The `GEOSGeometry` internals # access these instances by calling the module-level functions, defined # below. class ThreadLocalIO(threading.local): wkt_r = None wkt_w = None wkb_r = None wkb_w = None ewkb_w = None thread_context = ThreadLocalIO() # These module-level routines return the I/O object that is local to the # thread. If the I/O object does not exist yet it will be initialized. def wkt_r(): if not thread_context.wkt_r: thread_context.wkt_r = _WKTReader() return thread_context.wkt_r def wkt_w(dim=2): if not thread_context.wkt_w: thread_context.wkt_w = WKTWriter() thread_context.wkt_w.outdim = dim return thread_context.wkt_w def wkb_r(): if not thread_context.wkb_r: thread_context.wkb_r = _WKBReader() return thread_context.wkb_r def wkb_w(dim=2): if not thread_context.wkb_w: thread_context.wkb_w = WKBWriter() thread_context.wkb_w.outdim = dim return thread_context.wkb_w def ewkb_w(dim=2): if not thread_context.ewkb_w: thread_context.ewkb_w = WKBWriter() thread_context.ewkb_w.srid = True thread_context.ewkb_w.outdim = dim return thread_context.ewkb_w
gpl-3.0
shadyueh/pyranking
env/lib/python2.7/site-packages/pip/_vendor/progress/__init__.py
916
3023
# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com> # # Permission to use, copy, modify, and distribute this software for any # purpose with or without fee is hereby granted, provided that the above # copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. from __future__ import division from collections import deque from datetime import timedelta from math import ceil from sys import stderr from time import time __version__ = '1.2' class Infinite(object): file = stderr sma_window = 10 def __init__(self, *args, **kwargs): self.index = 0 self.start_ts = time() self._ts = self.start_ts self._dt = deque(maxlen=self.sma_window) for key, val in kwargs.items(): setattr(self, key, val) def __getitem__(self, key): if key.startswith('_'): return None return getattr(self, key, None) @property def avg(self): return sum(self._dt) / len(self._dt) if self._dt else 0 @property def elapsed(self): return int(time() - self.start_ts) @property def elapsed_td(self): return timedelta(seconds=self.elapsed) def update(self): pass def start(self): pass def finish(self): pass def next(self, n=1): if n > 0: now = time() dt = (now - self._ts) / n self._dt.append(dt) self._ts = now self.index = self.index + n self.update() def iter(self, it): for x in it: yield x self.next() self.finish() class Progress(Infinite): def __init__(self, *args, **kwargs): super(Progress, self).__init__(*args, **kwargs) self.max = kwargs.get('max', 100) @property def eta(self): return int(ceil(self.avg * self.remaining)) @property def eta_td(self): return timedelta(seconds=self.eta) @property def percent(self): return self.progress * 100 @property def progress(self): return min(1, self.index / self.max) @property def remaining(self): return max(self.max - self.index, 0) def start(self): self.update() def goto(self, index): incr = index - self.index self.next(incr) def iter(self, it): try: self.max = len(it) except TypeError: pass for x in it: yield x self.next() self.finish()
mit
coolsvap/dox
dox/tests/config/test_dox_yaml.py
1
2320
# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import dox.config.base as cfg_base from dox.config import dox_yaml from dox.tests import base class TestDoxYaml(base.TestCase): def setUp(self): super(TestDoxYaml, self).setUp() self.doxyaml = dox_yaml.DoxYaml({}) self.doxyaml.dox_file = os.path.join(base.SAMPLEDIR, 'dox.yaml') def test_base_class(self): self.assertIsInstance(self.doxyaml, cfg_base.ConfigBase) def test_dox_yaml_old_parsing(self): self.doxyaml = dox_yaml.DoxYaml({}) self.doxyaml.dox_file = os.path.join(base.SAMPLEDIR, 'dox-old.yaml') for key in self.doxyaml.default_keys_of_section: self.assertIn( key, self.doxyaml._open_dox_yaml().keys()) def test_dox_yaml_not_finding_section(self): self.doxyaml = dox_yaml.DoxYaml({'section': 'foobar'}) self.doxyaml.dox_file = os.path.join(base.SAMPLEDIR, 'dox.yaml') self.assertRaises( dox_yaml.DoxYamlSectionNotFound, self.doxyaml._open_dox_yaml) def test_dox_yaml_with_default_session(self): self.doxyaml = dox_yaml.DoxYaml({}) self.doxyaml.dox_file = os.path.join(base.SAMPLEDIR, 'dox.yaml') for key in self.doxyaml.default_keys_of_section: self.assertIn( key, self.doxyaml._open_dox_yaml().keys()) def test_dox_yaml_new_parsing(self): for key in self.doxyaml.default_keys_of_section: self.assertIn( key, self.doxyaml._open_dox_yaml().keys()) # TOOD(chmou): Finish tests of dox_yaml.py
apache-2.0
ivanamihalek/blender
pymove/09_texture_uv_mapping.py
1
3188
#!bpy """ Name: 'mat1.py' Blender: 2.71 Group: 'Sample' Tooltip: 'Put one image on each site of a cube' """ import bpy import os ##################################################### def delete_old_stuff(): # escape edit mode if bpy.ops.object.mode_set.poll(): bpy.ops.object.mode_set(mode='OBJECT') # delete all mesh objects bpy.ops.object.select_by_type(type='MESH') bpy.ops.object.delete() # delete all materials for i in bpy.data.materials.values(): bpy.data.materials.remove(i) # delete all textures for i in bpy.data.textures.values(): bpy.data.textures.remove(i) # delete all images for i in bpy.data.images.values(): # delete image path, this is only possible without a user i.user_clear() # delete all, except »Render Result« if i.name != "Render Result": bpy.data.images.remove(i) ##################################################### def make_materials(): materials = [] images = ["blume.jpg", "boot.jpg", "jueterbog.jpg", "kopf.jpg", "moster.jpg", "telefon.jpg", "warnung.jpg"] for image in images: # new material matname = "mat" + image[:-4] if not matname in bpy.data.materials: material = bpy.data.materials.new(matname) material.diffuse_color = (0, .5, .4) # new texture texname = "tex" + image[0:-4] texUV = bpy.data.textures.new(texname, type="IMAGE") image_path = os.path.expanduser("exercises/blender/pymove/uv_mapping_img/" + image) print ("image_path: " + image_path) texUV.image = bpy.data.images.load(image_path) # bind material and texture material.texture_slots.add() material.active_texture = texUV material.texture_slots[0].texture_coords = "ORCO" material.texture_slots[0].mapping = "CUBE" materials.append(material) return materials ##################################################### def same_image_to_all_sides(obj, material): # it looks like we are using the last texture in the array obj.data.materials.append(material) ##################################################### def different_image_to_each_side (obj, materials): # it looks like we are using the last texture in the array for material in materials: obj.data.materials.append(material) for face in obj.data.polygons: face.material_index = face.index ################################################ if __name__ == "__main__": # housekeeping delete_old_stuff() # otherwise we can't see jack squat bpy.types.WorldLighting.use_environment_light = True materials = make_materials() bpy.ops.mesh.primitive_cube_add(location=(0, 0, 0)) obj = bpy.context.scene.objects.active obj.name = 'uvmapped_cube_1' same_image_to_all_sides(obj, materials[0]) bpy.ops.mesh.primitive_cube_add(location=(6, 0, 0)) obj = bpy.context.scene.objects.active obj.name = 'uvmapped_cube_2' different_image_to_each_side (obj, materials)
gpl-2.0
pipermerriam/ethereum-abi-utils
eth_abi/exceptions.py
1
2911
import parsimonious class EncodingError(Exception): """ Base exception for any error that occurs during encoding. """ pass class EncodingTypeError(EncodingError): """ Raised when trying to encode a python value whose type is not supported for the output ABI type. """ pass class IllegalValue(EncodingError): """ Raised when trying to encode a python value with the correct type but with a value that is not considered legal for the output ABI type. Example: .. code-block:: python fixed128x19_encoder(Decimal('NaN')) # cannot encode NaN """ pass class ValueOutOfBounds(IllegalValue): """ Raised when trying to encode a python value with the correct type but with a value that appears outside the range of valid values for the output ABI type. Example: .. code-block:: python ufixed8x1_encoder(Decimal('25.6')) # out of bounds """ pass class DecodingError(Exception): """ Base exception for any error that occurs during decoding. """ pass class InsufficientDataBytes(DecodingError): """ Raised when there are insufficient data to decode a value for a given ABI type. """ pass class NonEmptyPaddingBytes(DecodingError): """ Raised when the padding bytes of an ABI value are malformed. """ pass class ParseError(parsimonious.ParseError): """ Raised when an ABI type string cannot be parsed. """ def __str__(self): return "Parse error at '{}' (column {}) in type string '{}'".format( self.text[self.pos:self.pos + 5], self.column(), self.text, ) class ABITypeError(ValueError): """ Raised when a parsed ABI type has inconsistent properties; for example, when trying to parse the type string ``'uint7'`` (which has a bit-width that is not congruent with zero modulo eight). """ pass class PredicateMappingError(Exception): """ Raised when an error occurs in a registry's internal mapping. """ pass class NoEntriesFound(ValueError, PredicateMappingError): """ Raised when no registration is found for a type string in a registry's internal mapping. .. warning:: In a future version of ``eth-abi``, this error class will no longer inherit from ``ValueError``. """ pass class MultipleEntriesFound(ValueError, PredicateMappingError): """ Raised when multiple registrations are found for a type string in a registry's internal mapping. This error is non-recoverable and indicates that a registry was configured incorrectly. Registrations are expected to cover completely distinct ranges of type strings. .. warning:: In a future version of ``eth-abi``, this error class will no longer inherit from ``ValueError``. """ pass
mit
boshnivolo/TIY-Assignments
node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
1812
9537
# Copyright (c) 2013 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Utility functions shared amongst the Windows generators.""" import copy import os # A dictionary mapping supported target types to extensions. TARGET_TYPE_EXT = { 'executable': 'exe', 'loadable_module': 'dll', 'shared_library': 'dll', 'static_library': 'lib', } def _GetLargePdbShimCcPath(): """Returns the path of the large_pdb_shim.cc file.""" this_dir = os.path.abspath(os.path.dirname(__file__)) src_dir = os.path.abspath(os.path.join(this_dir, '..', '..')) win_data_dir = os.path.join(src_dir, 'data', 'win') large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc') return large_pdb_shim_cc def _DeepCopySomeKeys(in_dict, keys): """Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|. Arguments: in_dict: The dictionary to copy. keys: The keys to be copied. If a key is in this list and doesn't exist in |in_dict| this is not an error. Returns: The partially deep-copied dictionary. """ d = {} for key in keys: if key not in in_dict: continue d[key] = copy.deepcopy(in_dict[key]) return d def _SuffixName(name, suffix): """Add a suffix to the end of a target. Arguments: name: name of the target (foo#target) suffix: the suffix to be added Returns: Target name with suffix added (foo_suffix#target) """ parts = name.rsplit('#', 1) parts[0] = '%s_%s' % (parts[0], suffix) return '#'.join(parts) def _ShardName(name, number): """Add a shard number to the end of a target. Arguments: name: name of the target (foo#target) number: shard number Returns: Target name with shard added (foo_1#target) """ return _SuffixName(name, str(number)) def ShardTargets(target_list, target_dicts): """Shard some targets apart to work around the linkers limits. Arguments: target_list: List of target pairs: 'base/base.gyp:base'. target_dicts: Dict of target properties keyed on target pair. Returns: Tuple of the new sharded versions of the inputs. """ # Gather the targets to shard, and how many pieces. targets_to_shard = {} for t in target_dicts: shards = int(target_dicts[t].get('msvs_shard', 0)) if shards: targets_to_shard[t] = shards # Shard target_list. new_target_list = [] for t in target_list: if t in targets_to_shard: for i in range(targets_to_shard[t]): new_target_list.append(_ShardName(t, i)) else: new_target_list.append(t) # Shard target_dict. new_target_dicts = {} for t in target_dicts: if t in targets_to_shard: for i in range(targets_to_shard[t]): name = _ShardName(t, i) new_target_dicts[name] = copy.copy(target_dicts[t]) new_target_dicts[name]['target_name'] = _ShardName( new_target_dicts[name]['target_name'], i) sources = new_target_dicts[name].get('sources', []) new_sources = [] for pos in range(i, len(sources), targets_to_shard[t]): new_sources.append(sources[pos]) new_target_dicts[name]['sources'] = new_sources else: new_target_dicts[t] = target_dicts[t] # Shard dependencies. for t in new_target_dicts: for deptype in ('dependencies', 'dependencies_original'): dependencies = copy.copy(new_target_dicts[t].get(deptype, [])) new_dependencies = [] for d in dependencies: if d in targets_to_shard: for i in range(targets_to_shard[d]): new_dependencies.append(_ShardName(d, i)) else: new_dependencies.append(d) new_target_dicts[t][deptype] = new_dependencies return (new_target_list, new_target_dicts) def _GetPdbPath(target_dict, config_name, vars): """Returns the path to the PDB file that will be generated by a given configuration. The lookup proceeds as follows: - Look for an explicit path in the VCLinkerTool configuration block. - Look for an 'msvs_large_pdb_path' variable. - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is specified. - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'. Arguments: target_dict: The target dictionary to be searched. config_name: The name of the configuration of interest. vars: A dictionary of common GYP variables with generator-specific values. Returns: The path of the corresponding PDB file. """ config = target_dict['configurations'][config_name] msvs = config.setdefault('msvs_settings', {}) linker = msvs.get('VCLinkerTool', {}) pdb_path = linker.get('ProgramDatabaseFile') if pdb_path: return pdb_path variables = target_dict.get('variables', {}) pdb_path = variables.get('msvs_large_pdb_path', None) if pdb_path: return pdb_path pdb_base = target_dict.get('product_name', target_dict['target_name']) pdb_base = '%s.%s.pdb' % (pdb_base, TARGET_TYPE_EXT[target_dict['type']]) pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base return pdb_path def InsertLargePdbShims(target_list, target_dicts, vars): """Insert a shim target that forces the linker to use 4KB pagesize PDBs. This is a workaround for targets with PDBs greater than 1GB in size, the limit for the 1KB pagesize PDBs created by the linker by default. Arguments: target_list: List of target pairs: 'base/base.gyp:base'. target_dicts: Dict of target properties keyed on target pair. vars: A dictionary of common GYP variables with generator-specific values. Returns: Tuple of the shimmed version of the inputs. """ # Determine which targets need shimming. targets_to_shim = [] for t in target_dicts: target_dict = target_dicts[t] # We only want to shim targets that have msvs_large_pdb enabled. if not int(target_dict.get('msvs_large_pdb', 0)): continue # This is intended for executable, shared_library and loadable_module # targets where every configuration is set up to produce a PDB output. # If any of these conditions is not true then the shim logic will fail # below. targets_to_shim.append(t) large_pdb_shim_cc = _GetLargePdbShimCcPath() for t in targets_to_shim: target_dict = target_dicts[t] target_name = target_dict.get('target_name') base_dict = _DeepCopySomeKeys(target_dict, ['configurations', 'default_configuration', 'toolset']) # This is the dict for copying the source file (part of the GYP tree) # to the intermediate directory of the project. This is necessary because # we can't always build a relative path to the shim source file (on Windows # GYP and the project may be on different drives), and Ninja hates absolute # paths (it ends up generating the .obj and .obj.d alongside the source # file, polluting GYPs tree). copy_suffix = 'large_pdb_copy' copy_target_name = target_name + '_' + copy_suffix full_copy_target_name = _SuffixName(t, copy_suffix) shim_cc_basename = os.path.basename(large_pdb_shim_cc) shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name shim_cc_path = shim_cc_dir + '/' + shim_cc_basename copy_dict = copy.deepcopy(base_dict) copy_dict['target_name'] = copy_target_name copy_dict['type'] = 'none' copy_dict['sources'] = [ large_pdb_shim_cc ] copy_dict['copies'] = [{ 'destination': shim_cc_dir, 'files': [ large_pdb_shim_cc ] }] # This is the dict for the PDB generating shim target. It depends on the # copy target. shim_suffix = 'large_pdb_shim' shim_target_name = target_name + '_' + shim_suffix full_shim_target_name = _SuffixName(t, shim_suffix) shim_dict = copy.deepcopy(base_dict) shim_dict['target_name'] = shim_target_name shim_dict['type'] = 'static_library' shim_dict['sources'] = [ shim_cc_path ] shim_dict['dependencies'] = [ full_copy_target_name ] # Set up the shim to output its PDB to the same location as the final linker # target. for config_name, config in shim_dict.get('configurations').iteritems(): pdb_path = _GetPdbPath(target_dict, config_name, vars) # A few keys that we don't want to propagate. for key in ['msvs_precompiled_header', 'msvs_precompiled_source', 'test']: config.pop(key, None) msvs = config.setdefault('msvs_settings', {}) # Update the compiler directives in the shim target. compiler = msvs.setdefault('VCCLCompilerTool', {}) compiler['DebugInformationFormat'] = '3' compiler['ProgramDataBaseFileName'] = pdb_path # Set the explicit PDB path in the appropriate configuration of the # original target. config = target_dict['configurations'][config_name] msvs = config.setdefault('msvs_settings', {}) linker = msvs.setdefault('VCLinkerTool', {}) linker['GenerateDebugInformation'] = 'true' linker['ProgramDatabaseFile'] = pdb_path # Add the new targets. They must go to the beginning of the list so that # the dependency generation works as expected in ninja. target_list.insert(0, full_copy_target_name) target_list.insert(0, full_shim_target_name) target_dicts[full_copy_target_name] = copy_dict target_dicts[full_shim_target_name] = shim_dict # Update the original target to depend on the shim target. target_dict.setdefault('dependencies', []).append(full_shim_target_name) return (target_list, target_dicts)
cc0-1.0
blueraster/poly-intersect
gunicorn.py
2
2055
import os import multiprocessing bind = '0.0.0.0:5700' backlog = 2048 worker_class = 'gevent' workers = 4 threads = 1 worker_connections = 500 timeout = 60 keepalive = 2 max_requests = 1000 max_requests_jitter = 500 graceful_timeout = 60 spew = False daemon = False pidfile = None umask = 666 user = os.getenv('USER') group = os.getenv('USER') tmp_upload_dir = None errorlog = '-' loglevel = 'info' accesslog = '-' access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"' proc_name = None # # Server hooks # # post_fork - Called just after a worker has been forked. # # A callable that takes a server and worker instance # as arguments. # # pre_fork - Called just prior to forking the worker subprocess. # # A callable that accepts the same arguments as after_fork # # pre_exec - Called just prior to forking off a secondary # master process during things like config reloading. # # A callable that takes a server instance as the sole argument. # def post_fork(server, worker): server.log.info("Worker spawned (pid: %s)", worker.pid) def pre_fork(server, worker): pass def pre_exec(server): server.log.info("Forked child, re-executing.") def when_ready(server): server.log.info("Server is ready. Spawning workers") def worker_int(worker): worker.log.info("worker received INT or QUIT signal") ## get traceback info import threading, sys, traceback id2name = dict([(th.ident, th.name) for th in threading.enumerate()]) code = [] for threadId, stack in sys._current_frames().items(): code.append("\n# Thread: %s(%d)" % (id2name.get(threadId,""), threadId)) for filename, lineno, name, line in traceback.extract_stack(stack): code.append('File: "%s", line %d, in %s' % (filename, lineno, name)) if line: code.append(" %s" % (line.strip())) worker.log.debug("\n".join(code)) def worker_abort(worker): worker.log.info("worker received SIGABRT signal")
mit
GoogleChrome/big-rig
app/src/thirdparty/telemetry/internal/forwarders/cros_forwarder.py
24
2058
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import logging import subprocess from telemetry.core import util from telemetry.internal import forwarders from telemetry.internal.forwarders import do_nothing_forwarder class CrOsForwarderFactory(forwarders.ForwarderFactory): def __init__(self, cri): super(CrOsForwarderFactory, self).__init__() self._cri = cri # pylint: disable=arguments-differ def Create(self, port_pairs, use_remote_port_forwarding=True): if self._cri.local: return do_nothing_forwarder.DoNothingForwarder(port_pairs) return CrOsSshForwarder(self._cri, use_remote_port_forwarding, port_pairs) class CrOsSshForwarder(forwarders.Forwarder): def __init__(self, cri, use_remote_port_forwarding, port_pairs): super(CrOsSshForwarder, self).__init__(port_pairs) self._cri = cri self._proc = None forwarding_args = self._ForwardingArgs( use_remote_port_forwarding, self.host_ip, port_pairs) self._proc = subprocess.Popen( self._cri.FormSSHCommandLine(['sleep', '999999999'], forwarding_args), stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, shell=False) util.WaitFor( lambda: self._cri.IsHTTPServerRunningOnPort(self.host_port), 60) logging.debug('Server started on %s:%d', self.host_ip, self.host_port) # pylint: disable=unused-argument @staticmethod def _ForwardingArgs(use_remote_port_forwarding, host_ip, port_pairs): if use_remote_port_forwarding: arg_format = '-R{pp.remote_port}:{host_ip}:{pp.local_port}' else: arg_format = '-L{pp.local_port}:{host_ip}:{pp.remote_port}' return [arg_format.format(**locals()) for pp in port_pairs if pp] @property def host_port(self): return self._port_pairs.http.remote_port def Close(self): if self._proc: self._proc.kill() self._proc = None super(CrOsSshForwarder, self).Close()
apache-2.0
impowski/servo
tests/wpt/web-platform-tests/tools/py/testing/path/test_svnwc.py
162
19775
import py import os, sys import pytest from py._path.svnwc import InfoSvnWCCommand, XMLWCStatus, parse_wcinfotime from py._path import svnwc as svncommon from svntestbase import CommonSvnTests def test_make_repo(path1, tmpdir): repo = tmpdir.join("repo") py.process.cmdexec('svnadmin create %s' % repo) if sys.platform == 'win32': repo = '/' + str(repo).replace('\\', '/') repo = py.path.svnurl("file://%s" % repo) wc = py.path.svnwc(tmpdir.join("wc")) wc.checkout(repo) assert wc.rev == 0 assert len(wc.listdir()) == 0 p = wc.join("a_file") p.write("test file") p.add() rev = wc.commit("some test") assert p.info().rev == 1 assert rev == 1 rev = wc.commit() assert rev is None def pytest_funcarg__path1(request): repo, repourl, wc = request.getfuncargvalue("repowc1") return wc class TestWCSvnCommandPath(CommonSvnTests): def test_status_attributes_simple(self, path1): def assert_nochange(p): s = p.status() assert not s.modified assert not s.prop_modified assert not s.added assert not s.deleted assert not s.replaced dpath = path1.join('sampledir') assert_nochange(path1.join('sampledir')) assert_nochange(path1.join('samplefile')) def test_status_added(self, path1): nf = path1.join('newfile') nf.write('hello') nf.add() try: s = nf.status() assert s.added assert not s.modified assert not s.prop_modified assert not s.replaced finally: nf.revert() def test_status_change(self, path1): nf = path1.join('samplefile') try: nf.write(nf.read() + 'change') s = nf.status() assert not s.added assert s.modified assert not s.prop_modified assert not s.replaced finally: nf.revert() def test_status_added_ondirectory(self, path1): sampledir = path1.join('sampledir') try: t2 = sampledir.mkdir('t2') t1 = t2.join('t1') t1.write('test') t1.add() s = sampledir.status(rec=1) # Comparing just the file names, because paths are unpredictable # on Windows. (long vs. 8.3 paths) assert t1.basename in [item.basename for item in s.added] assert t2.basename in [item.basename for item in s.added] finally: t2.revert(rec=1) t2.localpath.remove(rec=1) def test_status_unknown(self, path1): t1 = path1.join('un1') try: t1.write('test') s = path1.status() # Comparing just the file names, because paths are unpredictable # on Windows. (long vs. 8.3 paths) assert t1.basename in [item.basename for item in s.unknown] finally: t1.localpath.remove() def test_status_unchanged(self, path1): r = path1 s = path1.status(rec=1) # Comparing just the file names, because paths are unpredictable # on Windows. (long vs. 8.3 paths) assert r.join('samplefile').basename in [item.basename for item in s.unchanged] assert r.join('sampledir').basename in [item.basename for item in s.unchanged] assert r.join('sampledir/otherfile').basename in [item.basename for item in s.unchanged] @pytest.mark.xfail(reason="svn-1.7 has buggy 'status --xml' output") def test_status_update(self, path1): r = path1 try: r.update(rev=1) s = r.status(updates=1, rec=1) # Comparing just the file names, because paths are unpredictable # on Windows. (long vs. 8.3 paths) py.std.pprint.pprint(s.allpath()) assert r.join('anotherfile').basename in [item.basename for item in s.update_available] #assert len(s.update_available) == 1 finally: r.update() def test_status_replaced(self, path1): p = path1.join("samplefile") p.remove() p.ensure(dir=0) try: s = path1.status() assert p.basename in [item.basename for item in s.replaced] finally: path1.revert(rec=1) def test_status_ignored(self, path1): try: d = path1.join('sampledir') p = py.path.local(d).join('ignoredfile') p.ensure(file=True) s = d.status() assert [x.basename for x in s.unknown] == ['ignoredfile'] assert [x.basename for x in s.ignored] == [] d.propset('svn:ignore', 'ignoredfile') s = d.status() assert [x.basename for x in s.unknown] == [] assert [x.basename for x in s.ignored] == ['ignoredfile'] finally: path1.revert(rec=1) def test_status_conflict(self, path1, tmpdir): wc = path1 wccopy = py.path.svnwc(tmpdir.join("conflict_copy")) wccopy.checkout(wc.url) p = wc.ensure('conflictsamplefile', file=1) p.write('foo') wc.commit('added conflictsamplefile') wccopy.update() assert wccopy.join('conflictsamplefile').check() p.write('bar') wc.commit('wrote some data') wccopy.join('conflictsamplefile').write('baz') wccopy.update(interactive=False) s = wccopy.status() assert [x.basename for x in s.conflict] == ['conflictsamplefile'] def test_status_external(self, path1, repowc2): otherrepo, otherrepourl, otherwc = repowc2 d = path1.ensure('sampledir', dir=1) try: d.update() d.propset('svn:externals', 'otherwc %s' % (otherwc.url,)) d.update() s = d.status() assert [x.basename for x in s.external] == ['otherwc'] assert 'otherwc' not in [x.basename for x in s.unchanged] s = d.status(rec=1) assert [x.basename for x in s.external] == ['otherwc'] assert 'otherwc' in [x.basename for x in s.unchanged] finally: path1.revert(rec=1) def test_status_deleted(self, path1): d = path1.ensure('sampledir', dir=1) d.remove() d.ensure(dir=1) path1.commit() d.ensure('deletefile', dir=0) d.commit() s = d.status() assert 'deletefile' in [x.basename for x in s.unchanged] assert not s.deleted p = d.join('deletefile') p.remove() s = d.status() assert 'deletefile' not in s.unchanged assert [x.basename for x in s.deleted] == ['deletefile'] def test_status_noauthor(self, path1): # testing for XML without author - this used to raise an exception xml = '''\ <entry path="/tmp/pytest-23/wc"> <wc-status item="normal" props="none" revision="0"> <commit revision="0"> <date>2008-08-19T16:50:53.400198Z</date> </commit> </wc-status> </entry> ''' XMLWCStatus.fromstring(xml, path1) def test_status_wrong_xml(self, path1): # testing for XML without author - this used to raise an exception xml = '<entry path="/home/jean/zope/venv/projectdb/parts/development-products/DataGridField">\n<wc-status item="incomplete" props="none" revision="784">\n</wc-status>\n</entry>' st = XMLWCStatus.fromstring(xml, path1) assert len(st.incomplete) == 1 def test_diff(self, path1): p = path1 / 'anotherfile' out = p.diff(rev=2) assert out.find('hello') != -1 def test_blame(self, path1): p = path1.join('samplepickle') lines = p.blame() assert sum([l[0] for l in lines]) == len(lines) for l1, l2 in zip(p.readlines(), [l[2] for l in lines]): assert l1 == l2 assert [l[1] for l in lines] == ['hpk'] * len(lines) p = path1.join('samplefile') lines = p.blame() assert sum([l[0] for l in lines]) == len(lines) for l1, l2 in zip(p.readlines(), [l[2] for l in lines]): assert l1 == l2 assert [l[1] for l in lines] == ['hpk'] * len(lines) def test_join_abs(self, path1): s = str(path1.localpath) n = path1.join(s, abs=1) assert path1 == n def test_join_abs2(self, path1): assert path1.join('samplefile', abs=1) == path1.join('samplefile') def test_str_gives_localpath(self, path1): assert str(path1) == str(path1.localpath) def test_versioned(self, path1): assert path1.check(versioned=1) # TODO: Why does my copy of svn think .svn is versioned? #assert path1.join('.svn').check(versioned=0) assert path1.join('samplefile').check(versioned=1) assert not path1.join('notexisting').check(versioned=1) notexisting = path1.join('hello').localpath try: notexisting.write("") assert path1.join('hello').check(versioned=0) finally: notexisting.remove() def test_listdir_versioned(self, path1): assert path1.check(versioned=1) p = path1.localpath.ensure("not_a_versioned_file") l = [x.localpath for x in path1.listdir(lambda x: x.check(versioned=True))] assert p not in l def test_nonversioned_remove(self, path1): assert path1.check(versioned=1) somefile = path1.join('nonversioned/somefile') nonwc = py.path.local(somefile) nonwc.ensure() assert somefile.check() assert not somefile.check(versioned=True) somefile.remove() # this used to fail because it tried to 'svn rm' def test_properties(self, path1): try: path1.propset('gaga', 'this') assert path1.propget('gaga') == 'this' # Comparing just the file names, because paths are unpredictable # on Windows. (long vs. 8.3 paths) assert path1.basename in [item.basename for item in path1.status().prop_modified] assert 'gaga' in path1.proplist() assert path1.proplist()['gaga'] == 'this' finally: path1.propdel('gaga') def test_proplist_recursive(self, path1): s = path1.join('samplefile') s.propset('gugu', 'that') try: p = path1.proplist(rec=1) # Comparing just the file names, because paths are unpredictable # on Windows. (long vs. 8.3 paths) assert (path1 / 'samplefile').basename in [item.basename for item in p] finally: s.propdel('gugu') def test_long_properties(self, path1): value = """ vadm:posix : root root 0100755 Properties on 'chroot/dns/var/bind/db.net.xots': """ try: path1.propset('gaga', value) backvalue = path1.propget('gaga') assert backvalue == value #assert len(backvalue.split('\n')) == 1 finally: path1.propdel('gaga') def test_ensure(self, path1): newpath = path1.ensure('a', 'b', 'c') try: assert newpath.check(exists=1, versioned=1) newpath.write("hello") newpath.ensure() assert newpath.read() == "hello" finally: path1.join('a').remove(force=1) def test_not_versioned(self, path1): p = path1.localpath.mkdir('whatever') f = path1.localpath.ensure('testcreatedfile') try: assert path1.join('whatever').check(versioned=0) assert path1.join('testcreatedfile').check(versioned=0) assert not path1.join('testcreatedfile').check(versioned=1) finally: p.remove(rec=1) f.remove() def test_lock_unlock(self, path1): root = path1 somefile = root.join('somefile') somefile.ensure(file=True) # not yet added to repo py.test.raises(Exception, 'somefile.lock()') somefile.write('foo') somefile.commit('test') assert somefile.check(versioned=True) somefile.lock() try: locked = root.status().locked assert len(locked) == 1 assert locked[0].basename == somefile.basename assert locked[0].dirpath().basename == somefile.dirpath().basename #assert somefile.locked() py.test.raises(Exception, 'somefile.lock()') finally: somefile.unlock() #assert not somefile.locked() locked = root.status().locked assert locked == [] py.test.raises(Exception, 'somefile,unlock()') somefile.remove() def test_commit_nonrecursive(self, path1): somedir = path1.join('sampledir') somedir.mkdir("subsubdir") somedir.propset('foo', 'bar') status = somedir.status() assert len(status.prop_modified) == 1 assert len(status.added) == 1 somedir.commit('non-recursive commit', rec=0) status = somedir.status() assert len(status.prop_modified) == 0 assert len(status.added) == 1 somedir.commit('recursive commit') status = somedir.status() assert len(status.prop_modified) == 0 assert len(status.added) == 0 def test_commit_return_value(self, path1): testfile = path1.join('test.txt').ensure(file=True) testfile.write('test') rev = path1.commit('testing') assert type(rev) == int anotherfile = path1.join('another.txt').ensure(file=True) anotherfile.write('test') rev2 = path1.commit('testing more') assert type(rev2) == int assert rev2 == rev + 1 #def test_log(self, path1): # l = path1.log() # assert len(l) == 3 # might need to be upped if more tests are added class XTestWCSvnCommandPathSpecial: rooturl = 'http://codespeak.net/svn/py.path/trunk/dist/py.path/test/data' #def test_update_none_rev(self, path1): # path = tmpdir.join('checkouttest') # wcpath = newpath(xsvnwc=str(path), url=path1url) # try: # wcpath.checkout(rev=2100) # wcpath.update() # assert wcpath.info().rev > 2100 # finally: # wcpath.localpath.remove(rec=1) def test_parse_wcinfotime(): assert (parse_wcinfotime('2006-05-30 20:45:26 +0200 (Tue, 30 May 2006)') == 1149021926) assert (parse_wcinfotime('2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)') == 1067287394) class TestInfoSvnWCCommand: def test_svn_1_2(self, path1): output = """ Path: test_svnwc.py Name: test_svnwc.py URL: http://codespeak.net/svn/py/dist/py/path/svn/wccommand.py Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada Revision: 28137 Node Kind: file Schedule: normal Last Changed Author: jan Last Changed Rev: 27939 Last Changed Date: 2006-05-30 20:45:26 +0200 (Tue, 30 May 2006) Text Last Updated: 2006-06-01 00:42:53 +0200 (Thu, 01 Jun 2006) Properties Last Updated: 2006-05-23 11:54:59 +0200 (Tue, 23 May 2006) Checksum: 357e44880e5d80157cc5fbc3ce9822e3 """ path = py.path.local(__file__).dirpath().chdir() try: info = InfoSvnWCCommand(output) finally: path.chdir() assert info.last_author == 'jan' assert info.kind == 'file' assert info.mtime == 1149021926.0 assert info.url == 'http://codespeak.net/svn/py/dist/py/path/svn/wccommand.py' assert info.time == 1149021926000000.0 assert info.rev == 28137 def test_svn_1_3(self, path1): output = """ Path: test_svnwc.py Name: test_svnwc.py URL: http://codespeak.net/svn/py/dist/py/path/svn/wccommand.py Repository Root: http://codespeak.net/svn Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada Revision: 28124 Node Kind: file Schedule: normal Last Changed Author: jan Last Changed Rev: 27939 Last Changed Date: 2006-05-30 20:45:26 +0200 (Tue, 30 May 2006) Text Last Updated: 2006-06-02 23:46:11 +0200 (Fri, 02 Jun 2006) Properties Last Updated: 2006-06-02 23:45:28 +0200 (Fri, 02 Jun 2006) Checksum: 357e44880e5d80157cc5fbc3ce9822e3 """ path = py.path.local(__file__).dirpath().chdir() try: info = InfoSvnWCCommand(output) finally: path.chdir() assert info.last_author == 'jan' assert info.kind == 'file' assert info.mtime == 1149021926.0 assert info.url == 'http://codespeak.net/svn/py/dist/py/path/svn/wccommand.py' assert info.rev == 28124 assert info.time == 1149021926000000.0 def test_characters_at(): py.test.raises(ValueError, "py.path.svnwc('/tmp/@@@:')") def test_characters_tilde(): py.path.svnwc('/tmp/test~') class TestRepo: def test_trailing_slash_is_stripped(self, path1): # XXX we need to test more normalizing properties url = path1.join("/") assert path1 == url #def test_different_revs_compare_unequal(self, path1): # newpath = path1.new(rev=1199) # assert newpath != path1 def test_exists_svn_root(self, path1): assert path1.check() #def test_not_exists_rev(self, path1): # url = path1.__class__(path1url, rev=500) # assert url.check(exists=0) #def test_nonexisting_listdir_rev(self, path1): # url = path1.__class__(path1url, rev=500) # raises(py.error.ENOENT, url.listdir) #def test_newrev(self, path1): # url = path1.new(rev=None) # assert url.rev == None # assert url.strpath == path1.strpath # url = path1.new(rev=10) # assert url.rev == 10 #def test_info_rev(self, path1): # url = path1.__class__(path1url, rev=1155) # url = url.join("samplefile") # res = url.info() # assert res.size > len("samplefile") and res.created_rev == 1155 # the following tests are easier if we have a path class def test_repocache_simple(self, path1): repocache = svncommon.RepoCache() repocache.put(path1.strpath, 42) url, rev = repocache.get(path1.join('test').strpath) assert rev == 42 assert url == path1.strpath def test_repocache_notimeout(self, path1): repocache = svncommon.RepoCache() repocache.timeout = 0 repocache.put(path1.strpath, path1.rev) url, rev = repocache.get(path1.strpath) assert rev == -1 assert url == path1.strpath def test_repocache_outdated(self, path1): repocache = svncommon.RepoCache() repocache.put(path1.strpath, 42, timestamp=0) url, rev = repocache.get(path1.join('test').strpath) assert rev == -1 assert url == path1.strpath def _test_getreporev(self): """ this test runs so slow it's usually disabled """ old = svncommon.repositories.repos try: _repocache.clear() root = path1.new(rev=-1) url, rev = cache.repocache.get(root.strpath) assert rev>=0 assert url == svnrepourl finally: repositories.repos = old
mpl-2.0
jobiols/odoomrp-wip
procurement_plan_mrp_sale_forecast/tests/test_procurement_plan_mrp_sale_forecast.py
10
2410
# -*- coding: utf-8 -*- # (c) 2016 Alfredo de la Fuente - AvanzOSC # License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html import openerp.tests.common as common class TestProcurementPlanMrpSaleForecast(common.TransactionCase): def setUp(self): super(TestProcurementPlanMrpSaleForecast, self).setUp() self.sale_forecast_model = self.env['procurement.sale.forecast'] self.wiz_model = self.env['make.procurement'] forecast_vals = { 'name': 'Procurement plan mrp sale forecast test', 'date_from': '2025-01-01', 'date_to': '2025-01-31', 'warehouse_id': self.ref('stock.stock_warehouse_shop0'), 'forecast_lines': [ (0, 0, {'partner_id': self.ref('base.res_partner_address_2'), 'date': '2025-01-15', 'product_id': self.ref('product.product_product_4b'), 'qty': 1, 'unit_price': 5}), (0, 0, {'partner_id': self.ref('base.res_partner_address_2'), 'date': '2025-01-20', 'product_id': self.ref('product.product_product_4c'), 'qty': 4, 'unit_price': 12})]} self.sale_forecast = self.sale_forecast_model.create(forecast_vals) def test_procurement_plan_mrp_sale_forecast(self): line = self.sale_forecast.forecast_lines[0] wiz_vals = {'warehouse_id': self.ref('stock.stock_warehouse_shop0'), 'product_id': self.ref('product.product_product_4b'), 'uom_id': self.env.ref('product.product_product_4b').uom_id.id, 'qty': 1, 'date_planned': '2025-01-15'} wiz = self.wiz_model.create(wiz_vals) wiz.with_context({'active_model': 'procurement.sale.forecast.line', 'active_ids': [line.id], 'active_id': line.id}).make_procurement() self.assertNotEqual( self.sale_forecast.forecast_lines[0].procurement_id, False, 'Line without procurement') self.sale_forecast.create_procurements() for line in self.sale_forecast.forecast_lines: self.assertNotEqual(line.procurement_id, False, 'Case 2, line without procurement')
agpl-3.0
Noviat/odoo
addons/auth_signup/__init__.py
446
1039
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2012-today OpenERP SA (<http://www.openerp.com>) # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> # ############################################################################## import controllers import res_config import res_users
agpl-3.0
low-sky/spectral-cube
spectral_cube/tests/test_moments.py
4
7235
from __future__ import print_function, absolute_import, division import warnings from distutils.version import LooseVersion import pytest import numpy as np import astropy from astropy.wcs import WCS from astropy import units as u from astropy.io import fits from ..spectral_cube import SpectralCube, VarianceWarning from .helpers import assert_allclose # the back of the book dv = 3e-2 * u.Unit('m/s') dy = 2e-5 * u.Unit('deg') dx = 1e-5 * u.Unit('deg') data_unit = u.K m0v = np.array([[27, 30, 33], [36, 39, 42], [45, 48, 51]]) * data_unit * dv m0y = np.array([[9, 12, 15], [36, 39, 42], [63, 66, 69]]) * data_unit * dy m0x = np.array([[3, 12, 21], [30, 39, 48], [57, 66, 75]]) * data_unit * dx # M1V is a special case, where we return the actual coordinate m1v = np.array([[1.66666667, 1.6, 1.54545455], [1.5, 1.46153846, 1.42857143], [1.4, 1.375, 1.35294118]]) * dv + 2 * u.Unit('m/s') m1y = np.array([[1.66666667, 1.5, 1.4], [1.16666667, 1.15384615, 1.14285714], [1.0952381, 1.09090909, 1.08695652]]) * dy m1x = np.array([[1.66666667, 1.16666667, 1.0952381], [1.06666667, 1.05128205, 1.04166667], [1.03508772, 1.03030303, 1.02666667]]) * dx m2v = np.array([[0.22222222, 0.30666667, 0.36914601], [0.41666667, 0.45364892, 0.4829932], [0.50666667, 0.52604167, 0.54209919]]) * dv ** 2 m2y = np.array([[0.22222222, 0.41666667, 0.50666667], [0.63888889, 0.64299803, 0.6462585], [0.65759637, 0.6584022, 0.65910523]]) * dy ** 2 m2x = np.array([[0.22222222, 0.63888889, 0.65759637], [0.66222222, 0.66403682, 0.66493056], [0.66543552, 0.66574839, 0.66595556]]) * dx ** 2 MOMENTS = [[m0v, m0y, m0x], [m1v, m1y, m1x], [m2v, m2y, m2x]] # In issue 184, the cubes were corrected such that they all have valid units # Therefore, no separate tests are needed for moments-with-units and those # without MOMENTSu = MOMENTS def moment_cube(): cube = np.arange(27).reshape([3, 3, 3]).astype(np.float) wcs = WCS(naxis=3) wcs.wcs.ctype = ['RA---TAN', 'DEC--TAN', 'VELO'] # choose values to minimize spherical distortions wcs.wcs.cdelt = np.array([-1, 2, 3], dtype='float32') / 1e5 wcs.wcs.crpix = np.array([1, 1, 1], dtype='float32') wcs.wcs.crval = np.array([0, 1e-3, 2e-3], dtype='float32') wcs.wcs.cunit = ['deg', 'deg', 'km/s'] header = wcs.to_header() header['BUNIT'] = 'K' hdu = fits.PrimaryHDU(data=cube, header=header) return hdu axis_order = pytest.mark.parametrize(('axis', 'order'), ((0, 0), (0, 1), (0, 2), (1, 0), (1, 1), (1, 2), (2, 0), (2, 1), (2, 2))) if LooseVersion(astropy.__version__[:3]) >= LooseVersion('1.0'): # The relative error is slightly larger on astropy-dev # There is no obvious reason for this. rtol = 2e-7 atol = 1e-30 else: rtol = 1e-7 atol = 0.0 @axis_order def test_strategies_consistent(axis, order, use_dask): mc_hdu = moment_cube() sc = SpectralCube.read(mc_hdu, use_dask=use_dask) cwise = sc.moment(axis=axis, order=order, how='cube') swise = sc.moment(axis=axis, order=order, how='slice') rwise = sc.moment(axis=axis, order=order, how='ray') assert_allclose(cwise, swise, rtol=rtol, atol=atol) assert_allclose(cwise, rwise, rtol=rtol, atol=atol) @pytest.mark.parametrize(('order', 'axis', 'how'), [(o, a, h) for o in [0, 1, 2] for a in [0, 1, 2] for h in ['cube', 'slice', 'auto', 'ray']]) def test_reference(order, axis, how, use_dask): mc_hdu = moment_cube() sc = SpectralCube.read(mc_hdu, use_dask=use_dask) mom_sc = sc.moment(order=order, axis=axis, how=how) assert_allclose(mom_sc, MOMENTS[order][axis]) @axis_order def test_consistent_mask_handling(axis, order, use_dask): mc_hdu = moment_cube() sc = SpectralCube.read(mc_hdu, use_dask=use_dask) sc._mask = sc > 4*u.K cwise = sc.moment(axis=axis, order=order, how='cube') swise = sc.moment(axis=axis, order=order, how='slice') rwise = sc.moment(axis=axis, order=order, how='ray') assert_allclose(cwise, swise, rtol=rtol, atol=atol) assert_allclose(cwise, rwise, rtol=rtol, atol=atol) def test_convenience_methods(use_dask): mc_hdu = moment_cube() sc = SpectralCube.read(mc_hdu, use_dask=use_dask) assert_allclose(sc.moment0(axis=0), MOMENTS[0][0]) assert_allclose(sc.moment1(axis=2), MOMENTS[1][2]) assert_allclose(sc.moment2(axis=1), MOMENTS[2][1]) def test_linewidth(use_dask): mc_hdu = moment_cube() sc = SpectralCube.read(mc_hdu, use_dask=use_dask) with warnings.catch_warnings(record=True) as w: assert_allclose(sc.moment2(), MOMENTS[2][0]) assert len(w) == 1 assert w[0].category == VarianceWarning assert str(w[0].message) == ("Note that the second moment returned will be a " "variance map. To get a linewidth map, use the " "SpectralCube.linewidth_fwhm() or " "SpectralCube.linewidth_sigma() methods instead.") with warnings.catch_warnings(record=True) as w: assert_allclose(sc.linewidth_sigma(), MOMENTS[2][0] ** 0.5) assert_allclose(sc.linewidth_fwhm(), MOMENTS[2][0] ** 0.5 * 2.3548200450309493) assert len(w) == 0 def test_preserve_unit(use_dask): mc_hdu = moment_cube() sc = SpectralCube.read(mc_hdu, use_dask=use_dask) sc_kms = sc.with_spectral_unit(u.km/u.s) m0 = sc_kms.moment0(axis=0) m1 = sc_kms.moment1(axis=0) assert_allclose(m0, MOMENTS[0][0].to(u.K*u.km/u.s)) assert_allclose(m1, MOMENTS[1][0].to(u.km/u.s)) def test_with_flux_unit(use_dask): """ As of Issue 184, redundant with test_reference """ mc_hdu = moment_cube() sc = SpectralCube.read(mc_hdu, use_dask=use_dask) sc._unit = u.K sc_kms = sc.with_spectral_unit(u.km/u.s) m0 = sc_kms.moment0(axis=0) m1 = sc_kms.moment1(axis=0) assert sc.unit == u.K assert sc.filled_data[:].unit == u.K assert_allclose(m0, MOMENTS[0][0].to(u.K*u.km/u.s)) assert_allclose(m1, MOMENTS[1][0].to(u.km/u.s)) @pytest.mark.parametrize(('order', 'axis', 'how'), [(o, a, h) for o in [0, 1, 2] for a in [0, 1, 2] for h in ['cube', 'slice', 'auto', 'ray']]) def test_how_withfluxunit(order, axis, how, use_dask): """ Regression test for issue 180 As of issue 184, this is mostly redundant with test_reference except that it (kind of) checks that units are set """ mc_hdu = moment_cube() sc = SpectralCube.read(mc_hdu, use_dask=use_dask) sc._unit = u.K mom_sc = sc.moment(order=order, axis=axis, how=how) assert sc.unit == u.K assert sc.filled_data[:].unit == u.K assert_allclose(mom_sc, MOMENTSu[order][axis])
bsd-3-clause
andrewyoung1991/scons
test/CacheDir/multi-targets.py
5
2184
#!/usr/bin/env python # # __COPYRIGHT__ # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Test that multiple target files get retrieved from a CacheDir correctly. """ import TestSCons test = TestSCons.TestSCons() test.subdir('cache', 'multiple') cache = test.workpath('cache') multiple_bar = test.workpath('multiple', 'bar') multiple_foo = test.workpath('multiple', 'foo') test.write(['multiple', 'SConstruct'], """\ def touch(env, source, target): open('foo', 'w').write("") open('bar', 'w').write("") CacheDir(r'%(cache)s') env = Environment() env.Command(['foo', 'bar'], ['input'], touch) """ % locals()) test.write(['multiple', 'input'], "multiple/input\n") test.run(chdir = 'multiple') test.must_exist(multiple_foo) test.must_exist(multiple_bar) test.run(chdir = 'multiple', arguments = '-c') test.must_not_exist(multiple_foo) test.must_not_exist(multiple_bar) test.run(chdir = 'multiple') test.must_exist(multiple_foo) test.must_exist(multiple_bar) test.pass_test() # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
mit
xiaom/zenodo
tests/unit/jsonschemas/test_utils.py
8
1915
# -*- coding: utf-8 -*- # # This file is part of Zenodo. # Copyright (C) 2016 CERN. # # Zenodo is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Zenodo is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Zenodo. If not, see <http://www.gnu.org/licenses/>. # # In applying this licence, CERN does not waive the privileges and immunities # granted to it by virtue of its status as an Intergovernmental Organization # or submit itself to any jurisdiction. """Unit tests Zenodo JSON schemas utils.""" from __future__ import absolute_import, print_function from zenodo.modules.jsonschemas.utils import merge_dicts def test_merge_dicts(): """Test jsonschema merging util.""" a1 = { 'd': { 'k1': 1, 'k2': 'v2', 'd2': { 'k3': 'v3', }, }, 'l': [1, 2, 3, ], } b1 = { 'd': { 'k1': 10, # Updated value in nested 'k2': 'v2', 'k3': 'v3', # New key in nested 'd2': { 'k4': 'v4', }, }, 'l': [4, 5, 6, ], # Updated list 'v': 'value', # New key at root } exp1 = { 'd': { 'k1': 10, 'k2': 'v2', 'k3': 'v3', 'd2': { 'k3': 'v3', 'k4': 'v4', }, }, 'l': [4, 5, 6, ], 'v': 'value', } ab1 = merge_dicts(a1, b1) assert ab1 == exp1
gpl-2.0
Vavius/moai-ide
editor/watchdog/observers/winapi.py
4
9300
#!/usr/bin/env python # -*- coding: utf-8 -*- # winapi.py: Windows API-Python interface (removes dependency on pywin32) # # Copyright (C) 2007 Thomas Heller <theller@ctypes.org> # Copyright (C) 2010 Will McGugan <will@willmcgugan.com> # Copyright (C) 2010 Ryan Kelly <ryan@rfk.id.au> # Copyright (C) 2010 Yesudeep Mangalapilly <yesudeep@gmail.com> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and / or other materials provided with the distribution. # * Neither the name of the organization nor the names of its contributors may # be used to endorse or promote products derived from this software without # specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # Portions of this code were taken from pyfilesystem, which uses the above # new BSD license. from __future__ import with_statement from watchdog.utils import platform if not platform.is_windows(): raise ImportError import ctypes.wintypes import struct try: LPVOID = ctypes.wintypes.LPVOID except AttributeError: # LPVOID wasn't defined in Py2.5, guess it was introduced in Py2.6 LPVOID = ctypes.c_void_p # Invalid handle value. INVALID_HANDLE_VALUE = 0xFFFFFFFF # -1 # File notification contants. FILE_NOTIFY_CHANGE_FILE_NAME = 0x01 FILE_NOTIFY_CHANGE_DIR_NAME = 0x02 FILE_NOTIFY_CHANGE_ATTRIBUTES = 0x04 FILE_NOTIFY_CHANGE_SIZE = 0x08 FILE_NOTIFY_CHANGE_LAST_WRITE = 0x010 FILE_NOTIFY_CHANGE_LAST_ACCESS = 0x020 FILE_NOTIFY_CHANGE_CREATION = 0x040 FILE_NOTIFY_CHANGE_SECURITY = 0x0100 FILE_FLAG_BACKUP_SEMANTICS = 0x02000000 FILE_FLAG_OVERLAPPED = 0x40000000 FILE_LIST_DIRECTORY = 0x01 FILE_SHARE_READ = 0x01 FILE_SHARE_WRITE = 0x02 FILE_SHARE_DELETE = 0x04 OPEN_EXISTING = 3 # File action constants. FILE_ACTION_CREATED = 1 FILE_ACTION_DELETED = 2 FILE_ACTION_MODIFIED = 3 FILE_ACTION_RENAMED_OLD_NAME = 4 FILE_ACTION_RENAMED_NEW_NAME = 5 FILE_ACTION_OVERFLOW = 0xFFFF # Aliases FILE_ACTION_ADDED = FILE_ACTION_CREATED FILE_ACTION_REMOVED = FILE_ACTION_DELETED THREAD_TERMINATE = 0x0001 # IO waiting constants. WAIT_ABANDONED = 0x00000080 WAIT_IO_COMPLETION = 0x000000C0 WAIT_OBJECT_0 = 0x00000000 WAIT_TIMEOUT = 0x00000102 class OVERLAPPED(ctypes.Structure): _fields_ = [('Internal', LPVOID), ('InternalHigh', LPVOID), ('Offset', ctypes.wintypes.DWORD), ('OffsetHigh', ctypes.wintypes.DWORD), ('Pointer', LPVOID), ('hEvent', ctypes.wintypes.HANDLE), ] def _errcheck_bool(value, func, args): if not value: raise ctypes.WinError() return args def _errcheck_handle(value, func, args): if not value: raise ctypes.WinError() if value == INVALID_HANDLE_VALUE: raise ctypes.WinError() return args def _errcheck_dword(value, func, args): if value == 0xFFFFFFFF: raise ctypes.WinError() return args try: ReadDirectoryChangesW = ctypes.windll.kernel32.ReadDirectoryChangesW except AttributeError: raise ImportError("ReadDirectoryChangesW is not available") ReadDirectoryChangesW.restype = ctypes.wintypes.BOOL ReadDirectoryChangesW.errcheck = _errcheck_bool ReadDirectoryChangesW.argtypes = ( ctypes.wintypes.HANDLE, # hDirectory LPVOID, # lpBuffer ctypes.wintypes.DWORD, # nBufferLength ctypes.wintypes.BOOL, # bWatchSubtree ctypes.wintypes.DWORD, # dwNotifyFilter ctypes.POINTER(ctypes.wintypes.DWORD), # lpBytesReturned ctypes.POINTER(OVERLAPPED), # lpOverlapped LPVOID # FileIOCompletionRoutine # lpCompletionRoutine ) CreateFileW = ctypes.windll.kernel32.CreateFileW CreateFileW.restype = ctypes.wintypes.HANDLE CreateFileW.errcheck = _errcheck_handle CreateFileW.argtypes = ( ctypes.wintypes.LPCWSTR, # lpFileName ctypes.wintypes.DWORD, # dwDesiredAccess ctypes.wintypes.DWORD, # dwShareMode LPVOID, # lpSecurityAttributes ctypes.wintypes.DWORD, # dwCreationDisposition ctypes.wintypes.DWORD, # dwFlagsAndAttributes ctypes.wintypes.HANDLE # hTemplateFile ) CloseHandle = ctypes.windll.kernel32.CloseHandle CloseHandle.restype = ctypes.wintypes.BOOL CloseHandle.argtypes = ( ctypes.wintypes.HANDLE, # hObject ) CancelIoEx = ctypes.windll.kernel32.CancelIoEx CancelIoEx.restype = ctypes.wintypes.BOOL CancelIoEx.errcheck = _errcheck_bool CancelIoEx.argtypes = ( ctypes.wintypes.HANDLE, # hObject ctypes.POINTER(OVERLAPPED) # lpOverlapped ) CreateEvent = ctypes.windll.kernel32.CreateEventW CreateEvent.restype = ctypes.wintypes.HANDLE CreateEvent.errcheck = _errcheck_handle CreateEvent.argtypes = ( LPVOID, # lpEventAttributes ctypes.wintypes.BOOL, # bManualReset ctypes.wintypes.BOOL, # bInitialState ctypes.wintypes.LPCWSTR, # lpName ) SetEvent = ctypes.windll.kernel32.SetEvent SetEvent.restype = ctypes.wintypes.BOOL SetEvent.errcheck = _errcheck_bool SetEvent.argtypes = ( ctypes.wintypes.HANDLE, # hEvent ) WaitForSingleObjectEx = ctypes.windll.kernel32.WaitForSingleObjectEx WaitForSingleObjectEx.restype = ctypes.wintypes.DWORD WaitForSingleObjectEx.errcheck = _errcheck_dword WaitForSingleObjectEx.argtypes = ( ctypes.wintypes.HANDLE, # hObject ctypes.wintypes.DWORD, # dwMilliseconds ctypes.wintypes.BOOL, # bAlertable ) CreateIoCompletionPort = ctypes.windll.kernel32.CreateIoCompletionPort CreateIoCompletionPort.restype = ctypes.wintypes.HANDLE CreateIoCompletionPort.errcheck = _errcheck_handle CreateIoCompletionPort.argtypes = ( ctypes.wintypes.HANDLE, # FileHandle ctypes.wintypes.HANDLE, # ExistingCompletionPort LPVOID, # CompletionKey ctypes.wintypes.DWORD, # NumberOfConcurrentThreads ) GetQueuedCompletionStatus = ctypes.windll.kernel32.GetQueuedCompletionStatus GetQueuedCompletionStatus.restype = ctypes.wintypes.BOOL GetQueuedCompletionStatus.errcheck = _errcheck_bool GetQueuedCompletionStatus.argtypes = ( ctypes.wintypes.HANDLE, # CompletionPort LPVOID, # lpNumberOfBytesTransferred LPVOID, # lpCompletionKey ctypes.POINTER(OVERLAPPED), # lpOverlapped ctypes.wintypes.DWORD, # dwMilliseconds ) PostQueuedCompletionStatus = ctypes.windll.kernel32.PostQueuedCompletionStatus PostQueuedCompletionStatus.restype = ctypes.wintypes.BOOL PostQueuedCompletionStatus.errcheck = _errcheck_bool PostQueuedCompletionStatus.argtypes = ( ctypes.wintypes.HANDLE, # CompletionPort ctypes.wintypes.DWORD, # lpNumberOfBytesTransferred ctypes.wintypes.DWORD, # lpCompletionKey ctypes.POINTER(OVERLAPPED), # lpOverlapped ) class FILE_NOTIFY_INFORMATION(ctypes.Structure): _fields_ = [("NextEntryOffset", ctypes.wintypes.DWORD), ("Action", ctypes.wintypes.DWORD), ("FileNameLength", ctypes.wintypes.DWORD), #("FileName", (ctypes.wintypes.WCHAR * 1))] ("FileName", (ctypes.c_char * 1))] LPFNI = ctypes.POINTER(FILE_NOTIFY_INFORMATION) def get_FILE_NOTIFY_INFORMATION(readBuffer, nBytes): results = [] while nBytes > 0: fni = ctypes.cast(readBuffer, LPFNI)[0] ptr = ctypes.addressof(fni) + FILE_NOTIFY_INFORMATION.FileName.offset #filename = ctypes.wstring_at(ptr, fni.FileNameLength) filename = ctypes.string_at(ptr, fni.FileNameLength) results.append((fni.Action, filename.decode('utf-16'))) numToSkip = fni.NextEntryOffset if numToSkip <= 0: break readBuffer = readBuffer[numToSkip:] nBytes -= numToSkip # numToSkip is long. nBytes should be long too. return results def get_FILE_NOTIFY_INFORMATION_alt(event_buffer, nBytes): """Extract the information out of a FILE_NOTIFY_INFORMATION structure.""" pos = 0 event_buffer = event_buffer[:nBytes] while pos < len(event_buffer): jump, action, namelen = struct.unpack("iii", event_buffer[pos:pos + 12]) # TODO: this may return a shortname or a longname, with no way # to tell which. Normalise them somehow? name = event_buffer[pos + 12:pos + 12 + namelen].decode("utf-16") yield (name, action) if not jump: break pos += jump
mit
ahuarte47/QGIS
python/plugins/MetaSearch/pavement.py
67
7402
# -*- coding: utf-8 -*- ############################################################################### # # Copyright (C) 2014 Tom Kralidis (tomkralidis@gmail.com) # # This source is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free # Software Foundation; either version 2 of the License, or (at your option) # any later version. # # This code is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # ############################################################################### from configparser import ConfigParser import getpass import os import shutil import xml.etree.ElementTree as etree import xmlrpc.client import zipfile from paver.easy import (call_task, cmdopts, error, info, options, path, sh, task, Bunch) from owslib.csw import CatalogueServiceWeb # spellok PLUGIN_NAME = 'MetaSearch' BASEDIR = os.path.abspath(os.path.dirname(__file__)) USERDIR = os.path.expanduser('~') with open('metadata.txt') as mf: cp = ConfigParser() cp.readfp(mf) VERSION = cp.get('general', 'version') options( base=Bunch( home=BASEDIR, plugin=path(BASEDIR), ui=path(BASEDIR) / 'plugin' / PLUGIN_NAME / 'ui', install=path('%s/.qgis3/python/plugins/MetaSearch' % USERDIR), ext_libs=path('plugin/MetaSearch/ext-libs'), tmp=path(path('%s/MetaSearch-dist' % USERDIR)), version=VERSION ), upload=Bunch( host='plugins.qgis.org', port=80, endpoint='plugins/RPC2/' ) ) @task def clean(): """clean environment""" if os.path.exists(options.base.install): if os.path.islink(options.base.install): os.unlink(options.base.install) else: shutil.rmtree(options.base.install) if os.path.exists(options.base.tmp): shutil.rmtree(options.base.tmp) if os.path.exists(options.base.ext_libs): shutil.rmtree(options.base.ext_libs) for ui_file in os.listdir(options.base.ui): if ui_file.endswith('.py') and ui_file != '__init__.py': os.remove(options.base.plugin / 'ui' / ui_file) os.remove(path(options.base.home) / '%s.pro' % PLUGIN_NAME) sh('git clean -dxf') @task def install(): """install plugin into user QGIS environment""" plugins_dir = path(USERDIR) / '.qgis3/python/plugins' if os.path.exists(options.base.install): if os.path.islink(options.base.install): os.unlink(options.base.install) else: shutil.rmtree(options.base.install) if not os.path.exists(plugins_dir): raise OSError('The directory %s does not exist.' % plugins_dir) if not hasattr(os, 'symlink'): shutil.copytree(options.base.plugin, options.base.install) elif not os.path.exists(options.base.install): os.symlink(options.base.plugin, options.base.install) @task def package(): """create zip file of plugin""" skip_files = [ 'AUTHORS.txt', 'CMakeLists.txt', 'requirements.txt', 'requirements-dev.txt', 'pavement.txt' ] package_file = get_package_filename() if not os.path.exists(options.base.tmp): options.base.tmp.mkdir() if os.path.exists(package_file): os.unlink(package_file) with zipfile.ZipFile(package_file, 'w', zipfile.ZIP_DEFLATED) as zipf: for root, dirs, files in os.walk(options.base.plugin): for file_add in files: if file_add.endswith('.pyc') or file_add in skip_files: continue filepath = os.path.join(root, file_add) relpath = os.path.join(PLUGIN_NAME, os.path.relpath(filepath)) zipf.write(filepath, relpath) return package_file # return name of created zipfile @task @cmdopts([ ('user=', 'u', 'OSGeo userid'), ]) def upload(): """upload package zipfile to server""" user = options.get('user', False) if not user: raise ValueError('OSGeo userid required') password = getpass.getpass('Enter your password: ') if password.strip() == '': raise ValueError('password required') call_task('package') zipf = get_package_filename() url = 'http://%s:%s@%s:%d/%s' % (user, password, options.upload.host, options.upload.port, options.upload.endpoint) info('Uploading to http://%s/%s' % (options.upload.host, options.upload.endpoint)) server = xmlrpc.client.ServerProxy(url, verbose=False) try: with open(zipf) as zfile: plugin_id, version_id = \ server.plugin.upload(xmlrpc.client.Binary(zfile.read())) info('Plugin ID: %s', plugin_id) info('Version ID: %s', version_id) except xmlrpc.client.Fault as err: error('ERROR: fault error') error('Fault code: %d', err.faultCode) error('Fault string: %s', err.faultString) except xmlrpc.client.ProtocolError as err: error('Error: Protocol error') error("%s : %s", err.errcode, err.errmsg) if err.errcode == 403: error('Invalid name and password') @task def test_default_csw_connections(): """test that the default CSW connections work""" relpath = 'resources%sconnections-default.xml' % os.sep csw_connections_xml = options.base.plugin / relpath conns = etree.parse(csw_connections_xml) for conn in conns.findall('csw'): try: csw = CatalogueServiceWeb(conn.attrib.get('url')) # spellok info('Success: %s', csw.identification.title) csw.getrecords2() except Exception as err: raise ValueError('ERROR: %s', err) @task @cmdopts([ ('filename=', 'f', 'Path to file of CSW URLs'), ]) def generate_csw_connections_file(): """generate a CSW connections file from a flat file of CSW URLs""" filename = options.get('filename', False) if not filename: raise ValueError('path to file of CSW URLs required') conns = etree.Element('qgsCSWConnections') conns.attrib['version'] = '1.0' with open(filename) as connsfh: for line in connsfh: url = line.strip() if not url: # blank line continue try: csw = CatalogueServiceWeb(url) # spellok title = str(csw.identification.title) etree.SubElement(conns, 'csw', name=title, url=url) except Exception as err: error('ERROR on CSW %s: %s', url, err) with open('%s.xml' % filename, 'w') as connsxmlfh: connsxmlfh.write(etree.tostring(conns, encoding='utf-8')) def get_package_filename(): """return filepath of plugin zipfile""" filename = '%s-%s.zip' % (PLUGIN_NAME, options.base.version) package_file = '%s/%s' % (options.base.tmp, filename) return package_file
gpl-2.0
jjas0nn/solvem
tensorflow/lib/python2.7/site-packages/numpy/core/tests/test_abc.py
168
2014
from __future__ import division, absolute_import, print_function from numpy.testing import TestCase, assert_, run_module_suite import numbers from numpy.core.numerictypes import sctypes class ABC(TestCase): def test_floats(self): for t in sctypes['float']: assert_(isinstance(t(), numbers.Real), "{0} is not instance of Real".format(t.__name__)) assert_(issubclass(t, numbers.Real), "{0} is not subclass of Real".format(t.__name__)) assert_(not isinstance(t(), numbers.Rational), "{0} is instance of Rational".format(t.__name__)) assert_(not issubclass(t, numbers.Rational), "{0} is subclass of Rational".format(t.__name__)) def test_complex(self): for t in sctypes['complex']: assert_(isinstance(t(), numbers.Complex), "{0} is not instance of Complex".format(t.__name__)) assert_(issubclass(t, numbers.Complex), "{0} is not subclass of Complex".format(t.__name__)) assert_(not isinstance(t(), numbers.Real), "{0} is instance of Real".format(t.__name__)) assert_(not issubclass(t, numbers.Real), "{0} is subclass of Real".format(t.__name__)) def test_int(self): for t in sctypes['int']: assert_(isinstance(t(), numbers.Integral), "{0} is not instance of Integral".format(t.__name__)) assert_(issubclass(t, numbers.Integral), "{0} is not subclass of Integral".format(t.__name__)) def test_uint(self): for t in sctypes['uint']: assert_(isinstance(t(), numbers.Integral), "{0} is not instance of Integral".format(t.__name__)) assert_(issubclass(t, numbers.Integral), "{0} is not subclass of Integral".format(t.__name__)) if __name__ == "__main__": run_module_suite()
mit
8191/ansible
lib/ansible/runner/lookup_plugins/lines.py
176
1432
# (c) 2012, Daniel Hokka Zakrisson <daniel@hozac.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. import subprocess from ansible import utils, errors class LookupModule(object): def __init__(self, basedir=None, **kwargs): self.basedir = basedir def run(self, terms, inject=None, **kwargs): terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject) ret = [] for term in terms: p = subprocess.Popen(term, cwd=self.basedir, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE) (stdout, stderr) = p.communicate() if p.returncode == 0: ret.extend(stdout.splitlines()) else: raise errors.AnsibleError("lookup_plugin.lines(%s) returned %d" % (term, p.returncode)) return ret
gpl-3.0
CodeMath/jinrockets
BluePrint/lib/werkzeug/contrib/cache.py
72
23068
# -*- coding: utf-8 -*- """ werkzeug.contrib.cache ~~~~~~~~~~~~~~~~~~~~~~ The main problem with dynamic Web sites is, well, they're dynamic. Each time a user requests a page, the webserver executes a lot of code, queries the database, renders templates until the visitor gets the page he sees. This is a lot more expensive than just loading a file from the file system and sending it to the visitor. For most Web applications, this overhead isn't a big deal but once it becomes, you will be glad to have a cache system in place. How Caching Works ================= Caching is pretty simple. Basically you have a cache object lurking around somewhere that is connected to a remote cache or the file system or something else. When the request comes in you check if the current page is already in the cache and if so, you're returning it from the cache. Otherwise you generate the page and put it into the cache. (Or a fragment of the page, you don't have to cache the full thing) Here is a simple example of how to cache a sidebar for a template:: def get_sidebar(user): identifier = 'sidebar_for/user%d' % user.id value = cache.get(identifier) if value is not None: return value value = generate_sidebar_for(user=user) cache.set(identifier, value, timeout=60 * 5) return value Creating a Cache Object ======================= To create a cache object you just import the cache system of your choice from the cache module and instantiate it. Then you can start working with that object: >>> from werkzeug.contrib.cache import SimpleCache >>> c = SimpleCache() >>> c.set("foo", "value") >>> c.get("foo") 'value' >>> c.get("missing") is None True Please keep in mind that you have to create the cache and put it somewhere you have access to it (either as a module global you can import or you just put it into your WSGI application). :copyright: (c) 2011 by the Werkzeug Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ import os import re import tempfile try: from hashlib import md5 except ImportError: from md5 import new as md5 from itertools import izip from time import time from werkzeug.posixemulation import rename try: import cPickle as pickle except ImportError: import pickle def _items(mappingorseq): """Wrapper for efficient iteration over mappings represented by dicts or sequences:: >>> for k, v in _items((i, i*i) for i in xrange(5)): ... assert k*k == v >>> for k, v in _items(dict((i, i*i) for i in xrange(5))): ... assert k*k == v """ return mappingorseq.iteritems() if hasattr(mappingorseq, 'iteritems') \ else mappingorseq class BaseCache(object): """Baseclass for the cache systems. All the cache systems implement this API or a superset of it. :param default_timeout: the default timeout that is used if no timeout is specified on :meth:`set`. """ def __init__(self, default_timeout=300): self.default_timeout = default_timeout def get(self, key): """Looks up key in the cache and returns the value for it. If the key does not exist `None` is returned instead. :param key: the key to be looked up. """ return None def delete(self, key): """Deletes `key` from the cache. If it does not exist in the cache nothing happens. :param key: the key to delete. """ pass def get_many(self, *keys): """Returns a list of values for the given keys. For each key a item in the list is created. Example:: foo, bar = cache.get_many("foo", "bar") If a key can't be looked up `None` is returned for that key instead. :param keys: The function accepts multiple keys as positional arguments. """ return map(self.get, keys) def get_dict(self, *keys): """Works like :meth:`get_many` but returns a dict:: d = cache.get_dict("foo", "bar") foo = d["foo"] bar = d["bar"] :param keys: The function accepts multiple keys as positional arguments. """ return dict(izip(keys, self.get_many(*keys))) def set(self, key, value, timeout=None): """Adds a new key/value to the cache (overwrites value, if key already exists in the cache). :param key: the key to set :param value: the value for the key :param timeout: the cache timeout for the key (if not specified, it uses the default timeout). """ pass def add(self, key, value, timeout=None): """Works like :meth:`set` but does not overwrite the values of already existing keys. :param key: the key to set :param value: the value for the key :param timeout: the cache timeout for the key or the default timeout if not specified. """ pass def set_many(self, mapping, timeout=None): """Sets multiple keys and values from a mapping. :param mapping: a mapping with the keys/values to set. :param timeout: the cache timeout for the key (if not specified, it uses the default timeout). """ for key, value in _items(mapping): self.set(key, value, timeout) def delete_many(self, *keys): """Deletes multiple keys at once. :param keys: The function accepts multiple keys as positional arguments. """ for key in keys: self.delete(key) def clear(self): """Clears the cache. Keep in mind that not all caches support completely clearing the cache. """ pass def inc(self, key, delta=1): """Increments the value of a key by `delta`. If the key does not yet exist it is initialized with `delta`. For supporting caches this is an atomic operation. :param key: the key to increment. :param delta: the delta to add. """ self.set(key, (self.get(key) or 0) + delta) def dec(self, key, delta=1): """Decrements the value of a key by `delta`. If the key does not yet exist it is initialized with `-delta`. For supporting caches this is an atomic operation. :param key: the key to increment. :param delta: the delta to subtract. """ self.set(key, (self.get(key) or 0) - delta) class NullCache(BaseCache): """A cache that doesn't cache. This can be useful for unit testing. :param default_timeout: a dummy parameter that is ignored but exists for API compatibility with other caches. """ class SimpleCache(BaseCache): """Simple memory cache for single process environments. This class exists mainly for the development server and is not 100% thread safe. It tries to use as many atomic operations as possible and no locks for simplicity but it could happen under heavy load that keys are added multiple times. :param threshold: the maximum number of items the cache stores before it starts deleting some. :param default_timeout: the default timeout that is used if no timeout is specified on :meth:`~BaseCache.set`. """ def __init__(self, threshold=500, default_timeout=300): BaseCache.__init__(self, default_timeout) self._cache = {} self.clear = self._cache.clear self._threshold = threshold def _prune(self): if len(self._cache) > self._threshold: now = time() for idx, (key, (expires, _)) in enumerate(self._cache.items()): if expires <= now or idx % 3 == 0: self._cache.pop(key, None) def get(self, key): now = time() expires, value = self._cache.get(key, (0, None)) if expires > time(): return pickle.loads(value) def set(self, key, value, timeout=None): if timeout is None: timeout = self.default_timeout self._prune() self._cache[key] = (time() + timeout, pickle.dumps(value, pickle.HIGHEST_PROTOCOL)) def add(self, key, value, timeout=None): if timeout is None: timeout = self.default_timeout if len(self._cache) > self._threshold: self._prune() item = (time() + timeout, pickle.dumps(value, pickle.HIGHEST_PROTOCOL)) self._cache.setdefault(key, item) def delete(self, key): self._cache.pop(key, None) _test_memcached_key = re.compile(r'[^\x00-\x21\xff]{1,250}$').match class MemcachedCache(BaseCache): """A cache that uses memcached as backend. The first argument can either be an object that resembles the API of a :class:`memcache.Client` or a tuple/list of server addresses. In the event that a tuple/list is passed, Werkzeug tries to import the best available memcache library. Implementation notes: This cache backend works around some limitations in memcached to simplify the interface. For example unicode keys are encoded to utf-8 on the fly. Methods such as :meth:`~BaseCache.get_dict` return the keys in the same format as passed. Furthermore all get methods silently ignore key errors to not cause problems when untrusted user data is passed to the get methods which is often the case in web applications. :param servers: a list or tuple of server addresses or alternatively a :class:`memcache.Client` or a compatible client. :param default_timeout: the default timeout that is used if no timeout is specified on :meth:`~BaseCache.set`. :param key_prefix: a prefix that is added before all keys. This makes it possible to use the same memcached server for different applications. Keep in mind that :meth:`~BaseCache.clear` will also clear keys with a different prefix. """ def __init__(self, servers=None, default_timeout=300, key_prefix=None): BaseCache.__init__(self, default_timeout) if servers is None or isinstance(servers, (list, tuple)): if servers is None: servers = ['127.0.0.1:11211'] self._client = self.import_preferred_memcache_lib(servers) if self._client is None: raise RuntimeError('no memcache module found') else: # NOTE: servers is actually an already initialized memcache # client. self._client = servers self.key_prefix = key_prefix def get(self, key): if isinstance(key, unicode): key = key.encode('utf-8') if self.key_prefix: key = self.key_prefix + key # memcached doesn't support keys longer than that. Because often # checks for so long keys can occour because it's tested from user # submitted data etc we fail silently for getting. if _test_memcached_key(key): return self._client.get(key) def get_dict(self, *keys): key_mapping = {} have_encoded_keys = False for key in keys: if isinstance(key, unicode): encoded_key = key.encode('utf-8') have_encoded_keys = True else: encoded_key = key if self.key_prefix: encoded_key = self.key_prefix + encoded_key if _test_memcached_key(key): key_mapping[encoded_key] = key d = rv = self._client.get_multi(key_mapping.keys()) if have_encoded_keys or self.key_prefix: rv = {} for key, value in d.iteritems(): rv[key_mapping[key]] = value if len(rv) < len(keys): for key in keys: if key not in rv: rv[key] = None return rv def add(self, key, value, timeout=None): if timeout is None: timeout = self.default_timeout if isinstance(key, unicode): key = key.encode('utf-8') if self.key_prefix: key = self.key_prefix + key self._client.add(key, value, timeout) def set(self, key, value, timeout=None): if timeout is None: timeout = self.default_timeout if isinstance(key, unicode): key = key.encode('utf-8') if self.key_prefix: key = self.key_prefix + key self._client.set(key, value, timeout) def get_many(self, *keys): d = self.get_dict(*keys) return [d[key] for key in keys] def set_many(self, mapping, timeout=None): if timeout is None: timeout = self.default_timeout new_mapping = {} for key, value in _items(mapping): if isinstance(key, unicode): key = key.encode('utf-8') if self.key_prefix: key = self.key_prefix + key new_mapping[key] = value self._client.set_multi(new_mapping, timeout) def delete(self, key): if isinstance(key, unicode): key = key.encode('utf-8') if self.key_prefix: key = self.key_prefix + key if _test_memcached_key(key): self._client.delete(key) def delete_many(self, *keys): new_keys = [] for key in keys: if isinstance(key, unicode): key = key.encode('utf-8') if self.key_prefix: key = self.key_prefix + key if _test_memcached_key(key): new_keys.append(key) self._client.delete_multi(new_keys) def clear(self): self._client.flush_all() def inc(self, key, delta=1): if isinstance(key, unicode): key = key.encode('utf-8') if self.key_prefix: key = self.key_prefix + key self._client.incr(key, delta) def dec(self, key, delta=1): if isinstance(key, unicode): key = key.encode('utf-8') if self.key_prefix: key = self.key_prefix + key self._client.decr(key, delta) def import_preferred_memcache_lib(self, servers): """Returns an initialized memcache client. Used by the constructor.""" try: import pylibmc except ImportError: pass else: return pylibmc.Client(servers) try: from google.appengine.api import memcache except ImportError: pass else: return memcache.Client() try: import memcache except ImportError: pass else: return memcache.Client(servers) # backwards compatibility GAEMemcachedCache = MemcachedCache class RedisCache(BaseCache): """Uses the Redis key-value store as a cache backend. The first argument can be either a string denoting address of the Redis server or an object resembling an instance of a redis.Redis class. Note: Python Redis API already takes care of encoding unicode strings on the fly. .. versionadded:: 0.7 .. versionadded:: 0.8 `key_prefix` was added. .. versionchanged:: 0.8 This cache backend now properly serializes objects. :param host: address of the Redis server or an object which API is compatible with the official Python Redis client (redis-py). :param port: port number on which Redis server listens for connections :param default_timeout: the default timeout that is used if no timeout is specified on :meth:`~BaseCache.set`. :param key_prefix: A prefix that should be added to all keys. """ def __init__(self, host='localhost', port=6379, password=None, default_timeout=300, key_prefix=None): BaseCache.__init__(self, default_timeout) if isinstance(host, basestring): try: import redis except ImportError: raise RuntimeError('no redis module found') self._client = redis.Redis(host=host, port=port, password=password) else: self._client = host self.key_prefix = key_prefix or '' def dump_object(self, value): """Dumps an object into a string for redis. By default it serializes integers as regular string and pickle dumps everything else. """ t = type(value) if t is int or t is long: return str(value) return '!' + pickle.dumps(value) def load_object(self, value): """The reversal of :meth:`dump_object`. This might be callde with None. """ if value is None: return None if value.startswith('!'): return pickle.loads(value[1:]) try: return int(value) except ValueError: # before 0.8 we did not have serialization. Still support that. return value def get(self, key): return self.load_object(self._client.get(self.key_prefix + key)) def get_many(self, *keys): if self.key_prefix: keys = [self.key_prefix + key for key in keys] return [self.load_object(x) for x in self._client.mget(keys)] def set(self, key, value, timeout=None): if timeout is None: timeout = self.default_timeout dump = self.dump_object(value) self._client.setex(self.key_prefix + key, dump, timeout) def add(self, key, value, timeout=None): if timeout is None: timeout = self.default_timeout dump = self.dump_object(value) added = self._client.setnx(self.key_prefix + key, dump) if added: self._client.expire(self.key_prefix + key, timeout) def set_many(self, mapping, timeout=None): if timeout is None: timeout = self.default_timeout pipe = self._client.pipeline() for key, value in _items(mapping): dump = self.dump_object(value) pipe.setex(self.key_prefix + key, dump, timeout) pipe.execute() def delete(self, key): self._client.delete(self.key_prefix + key) def delete_many(self, *keys): if not keys: return if self.key_prefix: keys = [self.key_prefix + key for key in keys] self._client.delete(*keys) def clear(self): if self.key_prefix: keys = self._client.keys(self.key_prefix + '*') if keys: self._client.delete(*keys) else: self._client.flushdb() def inc(self, key, delta=1): return self._client.incr(self.key_prefix + key, delta) def dec(self, key, delta=1): return self._client.decr(self.key_prefix + key, delta) class FileSystemCache(BaseCache): """A cache that stores the items on the file system. This cache depends on being the only user of the `cache_dir`. Make absolutely sure that nobody but this cache stores files there or otherwise the cache will randomly delete files therein. :param cache_dir: the directory where cache files are stored. :param threshold: the maximum number of items the cache stores before it starts deleting some. :param default_timeout: the default timeout that is used if no timeout is specified on :meth:`~BaseCache.set`. :param mode: the file mode wanted for the cache files, default 0600 """ #: used for temporary files by the FileSystemCache _fs_transaction_suffix = '.__wz_cache' def __init__(self, cache_dir, threshold=500, default_timeout=300, mode=0600): BaseCache.__init__(self, default_timeout) self._path = cache_dir self._threshold = threshold self._mode = mode if not os.path.exists(self._path): os.makedirs(self._path) def _list_dir(self): """return a list of (fully qualified) cache filenames """ return [os.path.join(self._path, fn) for fn in os.listdir(self._path) if not fn.endswith(self._fs_transaction_suffix)] def _prune(self): entries = self._list_dir() if len(entries) > self._threshold: now = time() for idx, fname in enumerate(entries): remove = False f = None try: try: f = open(fname, 'rb') expires = pickle.load(f) remove = expires <= now or idx % 3 == 0 finally: if f is not None: f.close() except Exception: pass if remove: try: os.remove(fname) except (IOError, OSError): pass def clear(self): for fname in self._list_dir(): try: os.remove(fname) except (IOError, OSError): pass def _get_filename(self, key): hash = md5(key).hexdigest() return os.path.join(self._path, hash) def get(self, key): filename = self._get_filename(key) try: f = open(filename, 'rb') try: if pickle.load(f) >= time(): return pickle.load(f) finally: f.close() os.remove(filename) except Exception: return None def add(self, key, value, timeout=None): filename = self._get_filename(key) if not os.path.exists(filename): self.set(key, value, timeout) def set(self, key, value, timeout=None): if timeout is None: timeout = self.default_timeout filename = self._get_filename(key) self._prune() try: fd, tmp = tempfile.mkstemp(suffix=self._fs_transaction_suffix, dir=self._path) f = os.fdopen(fd, 'wb') try: pickle.dump(int(time() + timeout), f, 1) pickle.dump(value, f, pickle.HIGHEST_PROTOCOL) finally: f.close() rename(tmp, filename) os.chmod(filename, self._mode) except (IOError, OSError): pass def delete(self, key): try: os.remove(self._get_filename(key)) except (IOError, OSError): pass
mit
maria/PyGithub
github/tests/Framework.py
25
11595
# -*- coding: utf-8 -*- # ########################## Copyrights and license ############################ # # # Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> # # Copyright 2012 Zearin <zearin@gonk.net> # # Copyright 2013 AKFish <akfish@gmail.com> # # Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> # # # # This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ # # # # PyGithub is free software: you can redistribute it and/or modify it under # # the terms of the GNU Lesser General Public License as published by the Free # # Software Foundation, either version 3 of the License, or (at your option) # # any later version. # # # # PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY # # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # # details. # # # # You should have received a copy of the GNU Lesser General Public License # # along with PyGithub. If not, see <http://www.gnu.org/licenses/>. # # # # ############################################################################## import os import sys import unittest import httplib import traceback import github atLeastPython26 = sys.hexversion >= 0x02060000 atLeastPython3 = sys.hexversion >= 0x03000000 atMostPython32 = sys.hexversion < 0x03030000 if atLeastPython26: import json else: # pragma no cover (Covered by all tests with Python 2.5) import simplejson as json # pragma no cover (Covered by all tests with Python 2.5) def readLine(file): if atLeastPython3: return file.readline().decode("utf-8").strip() else: return file.readline().strip() class FakeHttpResponse: def __init__(self, status, headers, output): self.status = status self.__headers = headers self.__output = output def getheaders(self): return self.__headers def read(self): return self.__output def fixAuthorizationHeader(headers): if "Authorization" in headers: if headers["Authorization"].endswith("ZmFrZV9sb2dpbjpmYWtlX3Bhc3N3b3Jk"): # This special case is here to test the real Authorization header # sent by PyGithub. It would have avoided issue https://github.com/jacquev6/PyGithub/issues/153 # because we would have seen that Python 3 was not generating the same # header as Python 2 pass elif headers["Authorization"].startswith("token "): headers["Authorization"] = "token private_token_removed" elif headers["Authorization"].startswith("Basic "): headers["Authorization"] = "Basic login_and_password_removed" class RecordingConnection: # pragma no cover (Class useful only when recording new tests, not used during automated tests) def __init__(self, file, protocol, host, port, *args, **kwds): self.__file = file self.__protocol = protocol self.__host = host self.__port = str(port) self.__cnx = self._realConnection(host, port, *args, **kwds) def request(self, verb, url, input, headers): print verb, url, input, headers, self.__cnx.request(verb, url, input, headers) fixAuthorizationHeader(headers) self.__writeLine(self.__protocol) self.__writeLine(verb) self.__writeLine(self.__host) self.__writeLine(self.__port) self.__writeLine(url) self.__writeLine(str(headers)) self.__writeLine(input.replace('\n', '').replace('\r', '')) def getresponse(self): res = self.__cnx.getresponse() status = res.status print "=>", status headers = res.getheaders() output = res.read() self.__writeLine(str(status)) self.__writeLine(str(headers)) self.__writeLine(str(output)) return FakeHttpResponse(status, headers, output) def close(self): self.__writeLine("") return self.__cnx.close() def __writeLine(self, line): self.__file.write(line + "\n") class RecordingHttpConnection(RecordingConnection): # pragma no cover (Class useful only when recording new tests, not used during automated tests) _realConnection = httplib.HTTPConnection def __init__(self, file, *args, **kwds): RecordingConnection.__init__(self, file, "http", *args, **kwds) class RecordingHttpsConnection(RecordingConnection): # pragma no cover (Class useful only when recording new tests, not used during automated tests) _realConnection = httplib.HTTPSConnection def __init__(self, file, *args, **kwds): RecordingConnection.__init__(self, file, "https", *args, **kwds) class ReplayingConnection: def __init__(self, testCase, file, protocol, host, port, *args, **kwds): self.__testCase = testCase self.__file = file self.__protocol = protocol self.__host = host self.__port = str(port) def request(self, verb, url, input, headers): fixAuthorizationHeader(headers) self.__testCase.assertEqual(self.__protocol, readLine(self.__file)) self.__testCase.assertEqual(verb, readLine(self.__file)) self.__testCase.assertEqual(self.__host, readLine(self.__file)) self.__testCase.assertEqual(self.__port, readLine(self.__file)) self.__testCase.assertEqual(self.__splitUrl(url), self.__splitUrl(readLine(self.__file))) self.__testCase.assertEqual(headers, eval(readLine(self.__file))) expectedInput = readLine(self.__file) if input.startswith("{"): self.__testCase.assertEqual(json.loads(input.replace('\n', '').replace('\r', '')), json.loads(expectedInput)) elif atMostPython32: # @todo Test in all cases, including Python 3.3 # In Python 3.3, dicts are not output in the same order as in Python 2.5 -> 3.2. # So, form-data encoding is not deterministic and is difficult to test. self.__testCase.assertEqual(input.replace('\n', '').replace('\r', ''), expectedInput) def __splitUrl(self, url): splitedUrl = url.split("?") if len(splitedUrl) == 1: return splitedUrl self.__testCase.assertEqual(len(splitedUrl), 2) base, qs = splitedUrl return (base, sorted(qs.split("&"))) def getresponse(self): status = int(readLine(self.__file)) headers = eval(readLine(self.__file)) output = readLine(self.__file) return FakeHttpResponse(status, headers, output) def close(self): readLine(self.__file) def ReplayingHttpConnection(testCase, file, *args, **kwds): return ReplayingConnection(testCase, file, "http", *args, **kwds) def ReplayingHttpsConnection(testCase, file, *args, **kwds): return ReplayingConnection(testCase, file, "https", *args, **kwds) class BasicTestCase(unittest.TestCase): recordMode = False def setUp(self): unittest.TestCase.setUp(self) self.__fileName = "" self.__file = None if self.recordMode: # pragma no cover (Branch useful only when recording new tests, not used during automated tests) github.Requester.Requester.injectConnectionClasses( lambda ignored, *args, **kwds: RecordingHttpConnection(self.__openFile("wb"), *args, **kwds), lambda ignored, *args, **kwds: RecordingHttpsConnection(self.__openFile("wb"), *args, **kwds) ) import GithubCredentials self.login = GithubCredentials.login self.password = GithubCredentials.password self.oauth_token = GithubCredentials.oauth_token # @todo Remove client_id and client_secret from ReplayData (as we already remove login, password and oauth_token) # self.client_id = GithubCredentials.client_id # self.client_secret = GithubCredentials.client_secret else: github.Requester.Requester.injectConnectionClasses( lambda ignored, *args, **kwds: ReplayingHttpConnection(self, self.__openFile("rb"), *args, **kwds), lambda ignored, *args, **kwds: ReplayingHttpsConnection(self, self.__openFile("rb"), *args, **kwds) ) self.login = "login" self.password = "password" self.oauth_token = "oauth_token" self.client_id = "client_id" self.client_secret = "client_secret" def tearDown(self): unittest.TestCase.tearDown(self) self.__closeReplayFileIfNeeded() github.Requester.Requester.resetConnectionClasses() def __openFile(self, mode): for (_, _, functionName, _) in traceback.extract_stack(): if functionName.startswith("test") or functionName == "setUp" or functionName == "tearDown": if functionName != "test": # because in class Hook(Framework.TestCase), method testTest calls Hook.test fileName = os.path.join(os.path.dirname(__file__), "ReplayData", self.__class__.__name__ + "." + functionName + ".txt") if fileName != self.__fileName: self.__closeReplayFileIfNeeded() self.__fileName = fileName self.__file = open(self.__fileName, mode) return self.__file def __closeReplayFileIfNeeded(self): if self.__file is not None: if not self.recordMode: # pragma no branch (Branch useful only when recording new tests, not used during automated tests) self.assertEqual(readLine(self.__file), "") self.__file.close() def assertListKeyEqual(self, elements, key, expectedKeys): realKeys = [key(element) for element in elements] self.assertEqual(realKeys, expectedKeys) def assertListKeyBegin(self, elements, key, expectedKeys): realKeys = [key(element) for element in elements[: len(expectedKeys)]] self.assertEqual(realKeys, expectedKeys) class TestCase(BasicTestCase): def doCheckFrame(self, obj, frame): if obj._headers == {} and frame is None: return if obj._headers is None and frame == {}: return self.assertEqual(obj._headers, frame[2]) def getFrameChecker(self): return lambda requester, obj, frame: self.doCheckFrame(obj, frame) def setUp(self): BasicTestCase.setUp(self) # Set up frame debugging github.GithubObject.GithubObject.setCheckAfterInitFlag(True) github.Requester.Requester.setDebugFlag(True) github.Requester.Requester.setOnCheckMe(self.getFrameChecker()) self.g = github.Github(self.login, self.password) def activateRecordMode(): # pragma no cover (Function useful only when recording new tests, not used during automated tests) BasicTestCase.recordMode = True
gpl-3.0
oxtopus/nupic
examples/opf/experiments/multistep/hotgym_best_sp_5step/description.py
17
3249
# ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2013, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- ## This file defines parameters for a prediction experiment. import os from nupic.frameworks.opf.expdescriptionhelpers import importBaseDescription # the sub-experiment configuration config = \ { 'modelParams': { 'clParams': { 'clVerbosity': 0}, 'inferenceType': 'NontemporalMultiStep', 'sensorParams': { 'encoders': { 'consumption': { 'clipInput': True, 'fieldname': u'consumption', 'n': 28, 'name': u'consumption', 'type': 'AdaptiveScalarEncoder', 'w': 21}, 'timestamp_dayOfWeek': { 'dayOfWeek': ( 21, 3), 'fieldname': u'timestamp', 'name': u'timestamp_dayOfWeek', 'type': 'DateEncoder'}, 'timestamp_timeOfDay': { 'fieldname': u'timestamp', 'name': u'timestamp_timeOfDay', 'timeOfDay': ( 21, 1), 'type': 'DateEncoder'}, 'timestamp_weekend': None}, 'verbosity': 0}, 'spParams': { }, 'tpParams': { 'activationThreshold': 13, 'minThreshold': 9, 'verbosity': 0}}} mod = importBaseDescription('../hotgym/description.py', config) locals().update(mod.__dict__)
gpl-3.0
junmin-zhu/chromium-rivertrail
native_client_sdk/src/build_tools/nacl-mono-archive.py
14
2279
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import optparse import os import sys import tarfile import buildbot_common def main(args): parser = optparse.OptionParser() parser.add_option('--install-dir', help='Install Directory', dest='install_dir', default='naclmono') parser.add_option('--tar-path', help='Tarfile path', dest='tar_path', default='naclmono_%pepperrev%.bz2') parser.add_option('--upload-path', help='Upload path (nativeclient-mirror/nacl/nacl_sdk/XXX)', dest='upload_path', default=None) parser.add_option('--pepper-revision', help='Pepper revision', dest='pepper_revision', default=None) parser.add_option('--skip-upload', help='Skips upload step', action="store_true", dest='skip_upload') (options, args) = parser.parse_args(args[1:]) if not options.upload_path: buildbot_common.ErrorExit('--upload-path is required') if not options.pepper_revision: buildbot_common.ErrorExit('--pepper-revision is required') options.tar_path = options.tar_path.replace('%pepperrev%', options.pepper_revision) install_folders = ['bin', 'etc', 'include', 'lib', 'lib32', 'share'] buildbot_common.BuildStep('Archive Build') tar_file = None buildbot_common.RemoveFile(options.tar_path) try: tar_file = tarfile.open(options.tar_path, mode='w:bz2', dereference=True) for subfolder in install_folders: tar_file.add(os.path.join(options.install_dir, subfolder), arcname=subfolder) finally: if tar_file: tar_file.close() if not options.skip_upload: buildbot_common.Archive(os.path.basename(options.tar_path), 'nativeclient-mirror/nacl/nacl_sdk/%s' % options.upload_path, cwd=os.path.dirname(os.path.abspath(options.tar_path))) if __name__ == '__main__': sys.exit(main(sys.argv))
bsd-3-clause
azverkan/scons
src/engine/SCons/compat/_scons_io.py
5
1724
# # __COPYRIGHT__ # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __doc__ = """ io compatibility module for older (pre-2.6) Python versions This does not not NOT (repeat, *NOT*) provide complete io functionality. It only wraps the portions of io functionality used by SCons, in an interface that looks enough like io for our purposes. """ __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" # Use the "imp" module to protect the imports below from fixers. import imp _cStringIO = imp.load_module('cStringIO', *imp.find_module('cStringIO')) StringIO = _cStringIO.StringIO del _cStringIO # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
mit
alphagov/notifications-admin
tests/app/main/views/test_performance.py
1
4876
import random import uuid from datetime import date from freezegun import freeze_time from tests.conftest import normalize_spaces def _get_example_performance_data(): return { "total_notifications": 1_789_000_000, "email_notifications": 1_123_000_000, "sms_notifications": 987_654_321, "letter_notifications": 1_234_567, "live_service_count": random.randrange(1, 1000), "notifications_by_type": [ { "date": "2021-02-21", "emails": 1_234_567, "sms": 123_456, "letters": 123, }, { "date": "2021-02-22", "emails": 1, "sms": 2, "letters": 3, }, { "date": "2021-02-23", "emails": 1, "sms": 2, "letters": 3, }, { "date": "2021-02-24", "emails": 1, "sms": 2, "letters": 3, }, { "date": "2021-02-25", "emails": 1, "sms": 2, "letters": 3, }, { "date": "2021-02-26", "emails": 1, "sms": 2, "letters": 3, }, { "date": "2021-02-27", "emails": 1, "sms": 2, "letters": 3, }, ], "processing_time": [ { "date": "2021-02-21", "percentage_under_10_seconds": 99.2 }, { "date": "2021-02-22", "percentage_under_10_seconds": 95.3 }, { "date": "2021-02-23", "percentage_under_10_seconds": 95.6 }, { "date": "2021-02-24", "percentage_under_10_seconds": 96.7 }, { "date": "2021-02-25", "percentage_under_10_seconds": 95.7 }, { "date": "2021-02-26", "percentage_under_10_seconds": 96.5 }, { "date": "2021-02-27", "percentage_under_10_seconds": 98.6 }, ], "services_using_notify": [ { "organisation_id": uuid.uuid4(), "organisation_name": "Department of Examples and Patterns", "service_id": uuid.uuid4(), "service_name": "Example service" }, { "organisation_id": uuid.uuid4(), "organisation_name": "Department of Examples and Patterns", "service_id": uuid.uuid4(), "service_name": "Example service 2" }, { "organisation_id": uuid.uuid4(), "organisation_name": "Department of One Service", "service_id": uuid.uuid4(), "service_name": "Example service 3" }, { # On production there should be no live services without an # organisation, but this isn’t always true in people’s local # environments "organisation_id": None, "organisation_name": None, "service_id": uuid.uuid4(), "service_name": "Example service 4" }, ], } @freeze_time('2021-01-01') def test_should_render_performance_page( mocker, client_request, mock_get_service_and_organisation_counts, ): mock_get_performance_data = mocker.patch( 'app.performance_dashboard_api_client.get_performance_dashboard_stats', return_value=_get_example_performance_data(), ) page = client_request.get('main.performance') mock_get_performance_data.assert_called_once_with( start_date=date(2020, 12, 25), end_date=date(2021, 1, 1), ) assert normalize_spaces(page.select_one('main').text) == ( 'Performance data ' '' 'Messages sent since May 2016 ' '1.8 billion total ' '1.1 billion emails ' '987.7 million text messages ' '1.2 million letters ' '' 'Messages sent since May 2016 ' 'Date Emails Text messages Letters ' '27 February 2021 1 2 3 ' '26 February 2021 1 2 3 ' '25 February 2021 1 2 3 ' '24 February 2021 1 2 3 ' '23 February 2021 1 2 3 ' '22 February 2021 1 2 3 ' '21 February 2021 1,234,567 123,456 123 ' 'Only showing the last 7 days ' '' 'Messages sent within 10 seconds ' '96.8% on average ' 'Messages sent within 10 seconds ' 'Date Percentage ' '27 February 2021 98.6% ' '26 February 2021 96.5% ' '25 February 2021 95.7% ' '24 February 2021 96.7% ' '23 February 2021 95.6% ' '22 February 2021 95.3% ' '21 February 2021 99.2% ' 'Only showing the last 7 days ' '' 'Organisations using Notify ' 'There are 111 organisations and 9,999 services using Notify. ' 'Organisations using Notify ' 'Organisation Number of live services ' 'Department of Examples and Patterns 2 ' 'Department of One Service 1 ' 'No organisation 1' )
mit
miptliot/edx-platform
common/djangoapps/xblock_django/migrations/0003_add_new_config_models.py
24
2875
# -*- coding: utf-8 -*- from __future__ import unicode_literals import django.db.models.deletion from django.conf import settings from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('xblock_django', '0002_auto_20160204_0809'), ] operations = [ migrations.CreateModel( name='XBlockConfiguration', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')), ('enabled', models.BooleanField(default=False, verbose_name='Enabled')), ('name', models.CharField(max_length=255, db_index=True)), ('deprecated', models.BooleanField(default=False, verbose_name='show deprecation messaging in Studio')), ('changed_by', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='Changed by')), ], options={ 'ordering': ('-change_date',), 'abstract': False, }, ), migrations.CreateModel( name='XBlockStudioConfiguration', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')), ('enabled', models.BooleanField(default=False, verbose_name='Enabled')), ('name', models.CharField(max_length=255, db_index=True)), ('template', models.CharField(default=b'', max_length=255, blank=True)), ('support_level', models.CharField(default=b'us', max_length=2, choices=[(b'fs', 'Fully Supported'), (b'ps', 'Provisionally Supported'), (b'us', 'Unsupported')])), ('changed_by', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='Changed by')), ], ), migrations.CreateModel( name='XBlockStudioConfigurationFlag', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')), ('enabled', models.BooleanField(default=False, verbose_name='Enabled')), ('changed_by', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='Changed by')), ], ), ]
agpl-3.0
xaviercobain88/framework-python
build/lib.linux-i686-2.7/openerp/addons/account/account_financial_report.py
52
7727
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import time from datetime import datetime from dateutil.relativedelta import relativedelta from operator import itemgetter from openerp import netsvc from openerp import pooler from openerp.osv import fields, osv import openerp.addons.decimal_precision as dp from openerp.tools.translate import _ # --------------------------------------------------------- # Account Financial Report # --------------------------------------------------------- class account_financial_report(osv.osv): _name = "account.financial.report" _description = "Account Report" def _get_level(self, cr, uid, ids, field_name, arg, context=None): '''Returns a dictionary with key=the ID of a record and value = the level of this record in the tree structure.''' res = {} for report in self.browse(cr, uid, ids, context=context): level = 0 if report.parent_id: level = report.parent_id.level + 1 res[report.id] = level return res def _get_children_by_order(self, cr, uid, ids, context=None): '''returns a dictionary with the key= the ID of a record and value = all its children, computed recursively, and sorted by sequence. Ready for the printing''' res = [] for id in ids: res.append(id) ids2 = self.search(cr, uid, [('parent_id', '=', id)], order='sequence ASC', context=context) res += self._get_children_by_order(cr, uid, ids2, context=context) return res def _get_balance(self, cr, uid, ids, field_names, args, context=None): '''returns a dictionary with key=the ID of a record and value=the balance amount computed for this record. If the record is of type : 'accounts' : it's the sum of the linked accounts 'account_type' : it's the sum of leaf accoutns with such an account_type 'account_report' : it's the amount of the related report 'sum' : it's the sum of the children of this record (aka a 'view' record)''' account_obj = self.pool.get('account.account') res = {} for report in self.browse(cr, uid, ids, context=context): if report.id in res: continue res[report.id] = dict((fn, 0.0) for fn in field_names) if report.type == 'accounts': # it's the sum of the linked accounts for a in report.account_ids: for field in field_names: res[report.id][field] += getattr(a, field) elif report.type == 'account_type': # it's the sum the leaf accounts with such an account type report_types = [x.id for x in report.account_type_ids] account_ids = account_obj.search(cr, uid, [('user_type','in', report_types), ('type','!=','view')], context=context) for a in account_obj.browse(cr, uid, account_ids, context=context): for field in field_names: res[report.id][field] += getattr(a, field) elif report.type == 'account_report' and report.account_report_id: # it's the amount of the linked report res2 = self._get_balance(cr, uid, [report.account_report_id.id], field_names, False, context=context) for key, value in res2.items(): for field in field_names: res[report.id][field] += value[field] elif report.type == 'sum': # it's the sum of the children of this account.report res2 = self._get_balance(cr, uid, [rec.id for rec in report.children_ids], field_names, False, context=context) for key, value in res2.items(): for field in field_names: res[report.id][field] += value[field] return res _columns = { 'name': fields.char('Report Name', size=128, required=True, translate=True), 'parent_id': fields.many2one('account.financial.report', 'Parent'), 'children_ids': fields.one2many('account.financial.report', 'parent_id', 'Account Report'), 'sequence': fields.integer('Sequence'), 'balance': fields.function(_get_balance, 'Balance', multi='balance'), 'debit': fields.function(_get_balance, 'Debit', multi='balance'), 'credit': fields.function(_get_balance, 'Credit', multi="balance"), 'level': fields.function(_get_level, string='Level', store=True, type='integer'), 'type': fields.selection([ ('sum','View'), ('accounts','Accounts'), ('account_type','Account Type'), ('account_report','Report Value'), ],'Type'), 'account_ids': fields.many2many('account.account', 'account_account_financial_report', 'report_line_id', 'account_id', 'Accounts'), 'account_report_id': fields.many2one('account.financial.report', 'Report Value'), 'account_type_ids': fields.many2many('account.account.type', 'account_account_financial_report_type', 'report_id', 'account_type_id', 'Account Types'), 'sign': fields.selection([(-1, 'Reverse balance sign'), (1, 'Preserve balance sign')], 'Sign on Reports', required=True, help='For accounts that are typically more debited than credited and that you would like to print as negative amounts in your reports, you should reverse the sign of the balance; e.g.: Expense account. The same applies for accounts that are typically more credited than debited and that you would like to print as positive amounts in your reports; e.g.: Income account.'), 'display_detail': fields.selection([ ('no_detail','No detail'), ('detail_flat','Display children flat'), ('detail_with_hierarchy','Display children with hierarchy') ], 'Display details'), 'style_overwrite': fields.selection([ (0, 'Automatic formatting'), (1,'Main Title 1 (bold, underlined)'), (2,'Title 2 (bold)'), (3,'Title 3 (bold, smaller)'), (4,'Normal Text'), (5,'Italic Text (smaller)'), (6,'Smallest Text'), ],'Financial Report Style', help="You can set up here the format you want this record to be displayed. If you leave the automatic formatting, it will be computed based on the financial reports hierarchy (auto-computed field 'level')."), } _defaults = { 'type': 'sum', 'display_detail': 'detail_flat', 'sign': 1, 'style_overwrite': 0, } account_financial_report() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
lmazuel/azure-sdk-for-python
azure-mgmt-network/azure/mgmt/network/v2017_10_01/models/resource_navigation_link_py3.py
9
2202
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from .sub_resource import SubResource class ResourceNavigationLink(SubResource): """ResourceNavigationLink resource. Variables are only populated by the server, and will be ignored when sending a request. :param id: Resource ID. :type id: str :param linked_resource_type: Resource type of the linked resource. :type linked_resource_type: str :param link: Link to the external resource :type link: str :ivar provisioning_state: Provisioning state of the ResourceNavigationLink resource. :vartype provisioning_state: str :param name: Name of the resource that is unique within a resource group. This name can be used to access the resource. :type name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str """ _validation = { 'provisioning_state': {'readonly': True}, 'etag': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'linked_resource_type': {'key': 'properties.linkedResourceType', 'type': 'str'}, 'link': {'key': 'properties.link', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, } def __init__(self, *, id: str=None, linked_resource_type: str=None, link: str=None, name: str=None, **kwargs) -> None: super(ResourceNavigationLink, self).__init__(id=id, **kwargs) self.linked_resource_type = linked_resource_type self.link = link self.provisioning_state = None self.name = name self.etag = None
mit
nagyistoce/googlemock
scripts/fuse_gmock_files.py
729
8606
#!/usr/bin/env python # # Copyright 2009, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """fuse_gmock_files.py v0.1.0 Fuses Google Mock and Google Test source code into two .h files and a .cc file. SYNOPSIS fuse_gmock_files.py [GMOCK_ROOT_DIR] OUTPUT_DIR Scans GMOCK_ROOT_DIR for Google Mock and Google Test source code, assuming Google Test is in the GMOCK_ROOT_DIR/gtest sub-directory, and generates three files: OUTPUT_DIR/gtest/gtest.h, OUTPUT_DIR/gmock/gmock.h, and OUTPUT_DIR/gmock-gtest-all.cc. Then you can build your tests by adding OUTPUT_DIR to the include search path and linking with OUTPUT_DIR/gmock-gtest-all.cc. These three files contain everything you need to use Google Mock. Hence you can "install" Google Mock by copying them to wherever you want. GMOCK_ROOT_DIR can be omitted and defaults to the parent directory of the directory holding this script. EXAMPLES ./fuse_gmock_files.py fused_gmock ./fuse_gmock_files.py path/to/unpacked/gmock fused_gmock This tool is experimental. In particular, it assumes that there is no conditional inclusion of Google Mock or Google Test headers. Please report any problems to googlemock@googlegroups.com. You can read http://code.google.com/p/googlemock/wiki/CookBook for more information. """ __author__ = 'wan@google.com (Zhanyong Wan)' import os import re import sets import sys # We assume that this file is in the scripts/ directory in the Google # Mock root directory. DEFAULT_GMOCK_ROOT_DIR = os.path.join(os.path.dirname(__file__), '..') # We need to call into gtest/scripts/fuse_gtest_files.py. sys.path.append(os.path.join(DEFAULT_GMOCK_ROOT_DIR, 'gtest/scripts')) import fuse_gtest_files gtest = fuse_gtest_files # Regex for matching '#include "gmock/..."'. INCLUDE_GMOCK_FILE_REGEX = re.compile(r'^\s*#\s*include\s*"(gmock/.+)"') # Where to find the source seed files. GMOCK_H_SEED = 'include/gmock/gmock.h' GMOCK_ALL_CC_SEED = 'src/gmock-all.cc' # Where to put the generated files. GTEST_H_OUTPUT = 'gtest/gtest.h' GMOCK_H_OUTPUT = 'gmock/gmock.h' GMOCK_GTEST_ALL_CC_OUTPUT = 'gmock-gtest-all.cc' def GetGTestRootDir(gmock_root): """Returns the root directory of Google Test.""" return os.path.join(gmock_root, 'gtest') def ValidateGMockRootDir(gmock_root): """Makes sure gmock_root points to a valid gmock root directory. The function aborts the program on failure. """ gtest.ValidateGTestRootDir(GetGTestRootDir(gmock_root)) gtest.VerifyFileExists(gmock_root, GMOCK_H_SEED) gtest.VerifyFileExists(gmock_root, GMOCK_ALL_CC_SEED) def ValidateOutputDir(output_dir): """Makes sure output_dir points to a valid output directory. The function aborts the program on failure. """ gtest.VerifyOutputFile(output_dir, gtest.GTEST_H_OUTPUT) gtest.VerifyOutputFile(output_dir, GMOCK_H_OUTPUT) gtest.VerifyOutputFile(output_dir, GMOCK_GTEST_ALL_CC_OUTPUT) def FuseGMockH(gmock_root, output_dir): """Scans folder gmock_root to generate gmock/gmock.h in output_dir.""" output_file = file(os.path.join(output_dir, GMOCK_H_OUTPUT), 'w') processed_files = sets.Set() # Holds all gmock headers we've processed. def ProcessFile(gmock_header_path): """Processes the given gmock header file.""" # We don't process the same header twice. if gmock_header_path in processed_files: return processed_files.add(gmock_header_path) # Reads each line in the given gmock header. for line in file(os.path.join(gmock_root, gmock_header_path), 'r'): m = INCLUDE_GMOCK_FILE_REGEX.match(line) if m: # It's '#include "gmock/..."' - let's process it recursively. ProcessFile('include/' + m.group(1)) else: m = gtest.INCLUDE_GTEST_FILE_REGEX.match(line) if m: # It's '#include "gtest/foo.h"'. We translate it to # "gtest/gtest.h", regardless of what foo is, since all # gtest headers are fused into gtest/gtest.h. # There is no need to #include gtest.h twice. if not gtest.GTEST_H_SEED in processed_files: processed_files.add(gtest.GTEST_H_SEED) output_file.write('#include "%s"\n' % (gtest.GTEST_H_OUTPUT,)) else: # Otherwise we copy the line unchanged to the output file. output_file.write(line) ProcessFile(GMOCK_H_SEED) output_file.close() def FuseGMockAllCcToFile(gmock_root, output_file): """Scans folder gmock_root to fuse gmock-all.cc into output_file.""" processed_files = sets.Set() def ProcessFile(gmock_source_file): """Processes the given gmock source file.""" # We don't process the same #included file twice. if gmock_source_file in processed_files: return processed_files.add(gmock_source_file) # Reads each line in the given gmock source file. for line in file(os.path.join(gmock_root, gmock_source_file), 'r'): m = INCLUDE_GMOCK_FILE_REGEX.match(line) if m: # It's '#include "gmock/foo.h"'. We treat it as '#include # "gmock/gmock.h"', as all other gmock headers are being fused # into gmock.h and cannot be #included directly. # There is no need to #include "gmock/gmock.h" more than once. if not GMOCK_H_SEED in processed_files: processed_files.add(GMOCK_H_SEED) output_file.write('#include "%s"\n' % (GMOCK_H_OUTPUT,)) else: m = gtest.INCLUDE_GTEST_FILE_REGEX.match(line) if m: # It's '#include "gtest/..."'. # There is no need to #include gtest.h as it has been # #included by gtest-all.cc. pass else: m = gtest.INCLUDE_SRC_FILE_REGEX.match(line) if m: # It's '#include "src/foo"' - let's process it recursively. ProcessFile(m.group(1)) else: # Otherwise we copy the line unchanged to the output file. output_file.write(line) ProcessFile(GMOCK_ALL_CC_SEED) def FuseGMockGTestAllCc(gmock_root, output_dir): """Scans folder gmock_root to generate gmock-gtest-all.cc in output_dir.""" output_file = file(os.path.join(output_dir, GMOCK_GTEST_ALL_CC_OUTPUT), 'w') # First, fuse gtest-all.cc into gmock-gtest-all.cc. gtest.FuseGTestAllCcToFile(GetGTestRootDir(gmock_root), output_file) # Next, append fused gmock-all.cc to gmock-gtest-all.cc. FuseGMockAllCcToFile(gmock_root, output_file) output_file.close() def FuseGMock(gmock_root, output_dir): """Fuses gtest.h, gmock.h, and gmock-gtest-all.h.""" ValidateGMockRootDir(gmock_root) ValidateOutputDir(output_dir) gtest.FuseGTestH(GetGTestRootDir(gmock_root), output_dir) FuseGMockH(gmock_root, output_dir) FuseGMockGTestAllCc(gmock_root, output_dir) def main(): argc = len(sys.argv) if argc == 2: # fuse_gmock_files.py OUTPUT_DIR FuseGMock(DEFAULT_GMOCK_ROOT_DIR, sys.argv[1]) elif argc == 3: # fuse_gmock_files.py GMOCK_ROOT_DIR OUTPUT_DIR FuseGMock(sys.argv[1], sys.argv[2]) else: print __doc__ sys.exit(1) if __name__ == '__main__': main()
bsd-3-clause
rbprogrammer/advanced_python_topics
course-material/py2/solutions/06 Metaprogramming/Shapes/Solution.c/shape.py
1
2495
#!/usr/local/bin/python2 # Solution.c import abc import math class Shape(object): """ Shape abstract base class. """ __metaclass__ = abc.ABCMeta """ ===================================================================== Place your shape functions here. The functions needed are: translateX() - adjust the shape's horizontal coordinates translateY() - adjust the shape's vertical coordinates virtual area() - calculate the shape's area ====================================================================== """ def translateX(self, offset): """ adjust the shape's horizontal coordinates """ self._left += offset self._right += offset def translateY(self, offset): """ adjust the shape's vertical coordinates """ self._top += offset self._bottom += offset @abc.abstractmethod def area(self): """ calculate the shape's area """ return NotImplemented # default action """ ====================================================================== Prewritten shape functions ====================================================================== """ def __init__(self, x0, y0, x1, y1): self._left = x0 self._top = y0 self._right = x1 self._bottom = y1 def display_coords(self): print "Coordinates: ( %d, %d, %d, %d )" % \ (self._left, self._top, self._right, self._bottom) @abc.abstractmethod def __del__(self): pass """ In questions b-e, you will be asked to define derived classes to represent a circle, a parallelogram and a rectangle """ class Circle(Shape): def __init__(self, cx, cy, rad): Shape.__init__(self, cx-rad, cy-rad, cx+rad, cy+rad) def area(self): radius = (self._right - self._left) / 2 return math.pi * radius * radius def __del__(self): print "Circle destructor" class Parallelogram(Shape): def __init__(self, x0, y0, x1, y1, dx): Shape.__init__(self, x0, y0, x1, y1) self.__delta = dx def __del__(self): print "Parallelogram destructor" def area(self): base = abs(self._right - self._left - self.__delta) height = abs(self._bottom - self._top) return base * height
apache-2.0
saviaga/crowdsource-platform
crowdsourcing/forms.py
9
7962
__author__ = 'd' from csp import settings from django import forms from django.contrib.auth.models import User class RegistrationForm(forms.Form): email = forms.EmailField(label='', widget=forms.TextInput(attrs={'class':'form-control', 'placeholder':'Email', 'required':'', 'id': 'register__email', 'ng-model': 'register.email', 'type':'email', }) ) first_name = forms.CharField(label='', widget=forms.TextInput(attrs={'class':'form-control', 'placeholder':'First Name', 'required':'', 'id': 'register__first_name', 'ng-model': 'register.first_name', })) last_name = forms.CharField(label='', widget=forms.TextInput(attrs={'class':'form-control', 'placeholder':'Last Name', 'required':'', 'id': 'register__last_name', 'ng-model': 'register.last_name', })) password1 = forms.CharField(widget=forms.PasswordInput(attrs={'class':'form-control', 'required':'', 'placeholder':'Password - at least 8 characters long', 'id': 'register__password1', 'ng-model': 'register.password1', }), label='') password2 = forms.CharField(widget=forms.PasswordInput(attrs={'class':'form-control', 'required':'', 'placeholder':'Confirm Password', 'id': 'register__password2', 'ng-model': 'register.password2', }), label='') def clean(self): if settings.REGISTRATION_ALLOWED: try: if User.objects.filter(email__iexact=self.email): #cleaned_data['email'] raise forms.ValidationError("Email already in use.") if len(self.password1) < 8: #cleaned_data['password1'] raise forms.ValidationError("Password needs to be at least eight characters long.") if self.password1 != self.password2: #self.cleaned_data['password1'] != self.cleaned_data['password2'] raise forms.ValidationError("The two password fields didn't match.") return True except KeyError: pass else: raise forms.ValidationError("Currently registrations are not allowed.") class PasswordResetForm(forms.Form): password1 = forms.CharField(widget=forms.PasswordInput(attrs={'class':'form-control', 'required':'', 'placeholder':'Password - at least 8 characters long', 'id':'reset_password__password1', 'ng-model':'reset_password.password1', }), label='') password2 = forms.CharField(widget=forms.PasswordInput(attrs={'class':'form-control', 'required':'', 'placeholder':'Confirm Password', 'id':'reset_password__password2', 'ng-model':'reset_password.password2', }), label='') def clean(self): if settings.PASSWORD_RESET_ALLOWED: try: if len(self.password1) < 8:#cleaned_data['password1'] raise forms.ValidationError("Password needs to be at least eight characters long.") if self.password1 != self.password2: raise forms.ValidationError("The two password fields didn't match.") return True except KeyError: pass else: raise forms.ValidationError("Currently password resetting is not allowed.") class ForgotPasswordForm(forms.Form): email = forms.EmailField(label='', widget=forms.TextInput(attrs={'class':'form-control', 'placeholder':'Email', 'required':'', 'id':'forgot_password__email', 'ng-model':'forgot_password.email', 'type':'email', }) ) def clean(self): try: if User.objects.filter(email__iexact=self.email): #cleaned_data['email'] pass else: raise forms.ValidationError("Invalid email entered.") except KeyError: pass #raise forms.ValidationError("Invalid email entered.") class LoginForm(forms.Form): form_name = 'login_form' email = forms.CharField(label='', widget=forms.TextInput(attrs={'class':'form-control', 'placeholder':'Email or Username', 'required':'', 'ng-model':'login.username', 'id':'login__username', }) ) password1 = forms.CharField(widget=forms.PasswordInput(attrs={'class':'form-control', 'required':'', 'placeholder':'Password', 'ng-model':'login.password', 'id':'login__password', }), label='')
mit
cloudsidekick/catoclient
catoclient/commands/describetaskparameters.py
1
1730
######################################################################### # Copyright 2011 Cloud Sidekick # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ######################################################################### import catoclient.catocommand from catoclient.param import Param class DescribeTaskParameters(catoclient.catocommand.CatoCommand): Description = 'Describes the parameters defined for a task in a text readable format.' API = 'describe_task_parameters' Examples = ''' _Print the parameters of the default version of a task_ cato-describe-task-parameters -t "mytask01" _Print the parameters of a specific version of a task_ cato-describe-task-parameters -t "new example" -v "2.000" ''' Options = [Param(name='task', short_name='t', long_name='task', optional=False, ptype='string', doc='The ID or Name of a Task.'), Param(name='version', short_name='v', long_name='version', optional=True, ptype='string', doc='An optional specific Task Version. (Default if omitted.)')] def main(self): results = self.call_api(self.API, ['task', 'version']) print(results)
apache-2.0
VitalPet/odoo
addons/mrp_repair/wizard/cancel_repair.py
52
3699
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import osv,fields from openerp.tools.translate import _ class repair_cancel(osv.osv_memory): _name = 'mrp.repair.cancel' _description = 'Cancel Repair' def cancel_repair(self, cr, uid, ids, context=None): """ Cancels the repair @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param ids: List of IDs selected @param context: A standard dictionary @return: """ if context is None: context = {} record_id = context and context.get('active_id', False) or False assert record_id, _('Active ID not Found') repair_order_obj = self.pool.get('mrp.repair') repair_line_obj = self.pool.get('mrp.repair.line') repair_order = repair_order_obj.browse(cr, uid, record_id, context=context) if repair_order.invoiced or repair_order.invoice_method == 'none': repair_order_obj.action_cancel(cr, uid, [record_id], context=context) else: raise osv.except_osv(_('Warning!'),_('Repair order is not invoiced.')) return {'type': 'ir.actions.act_window_close'} def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): """ Changes the view dynamically @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param context: A standard dictionary @return: New arch of view. """ if context is None: context = {} res = super(repair_cancel, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar,submenu=False) record_id = context and context.get('active_id', False) or False active_model = context.get('active_model') if not record_id or (active_model and active_model != 'mrp.repair'): return res repair_order = self.pool.get('mrp.repair').browse(cr, uid, record_id, context=context) if not repair_order.invoiced: res['arch'] = """ <form string="Cancel Repair" version="7.0"> <header> <button name="cancel_repair" string="_Yes" type="object" class="oe_highlight"/> or <button string="Cancel" class="oe_link" special="cancel"/> </header> <label string="Do you want to continue?"/> </form> """ return res repair_cancel() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
DerekSelander/LLDB
lldb_commands/pmodule.py
1
11903
# MIT License # # Copyright (c) 2017 Derek Selander # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import lldb import shlex import optparse import os import textwrap from stat import * def __lldb_init_module(debugger, internal_dict): debugger.HandleCommand( 'command script add -f pmodule.pmodule pmodule -h "Generates DTrace script to profile module"') def pmodule(debugger, command, exe_ctx, result, internal_dict): '''Creates a custom dtrace script that profiles modules in an executable based upon its memory layout and ASLR. Provide no arguments w/ '-a' if you want a count of all the modules firing. Provide a module if you want to dump all the methods as they occur. pmodule [[MODULENAME]...] You have the option to use objc or non-objc (i.e. objc$target or pid$target) Through the -n argument Examples: # Trace all Objective-C code in UIKit pmodule UIKit # Trace all non-Objective-C code in libsystem_kernel.dylib (i.e. pid$target:libsystem_kernel.dylib::entry) pmodule -n libsystem_kernel.dylib # Dump errrything. Only displays count of function calls from modules after you end the script. Warning slow pmodule -a ''' command_args = shlex.split(command) parser = generate_option_parser() target = exe_ctx.target try: (options, args) = parser.parse_args(command_args) except: result.SetError("option parsing failed") return pid = exe_ctx.process.id # module_parirs = get_module_pair(, target) is_cplusplus = options.non_objectivec if not args and not (options.all_modules or options.all_modules_output): result.SetError('Need a module or use the -a option. You can list all modules by "image list -b"') return dtrace_script = generate_dtrace_script(target, options, args) if options.debug: source = '\n'.join(['# '+ format(idx + 1, '2') +': ' + line for idx, line in enumerate(dtrace_script.split('\n'))]) result.AppendMessage(source) return filename = '/tmp/lldb_dtrace_pmodule_' + ''.join(args) create_or_touch_filepath(filename, dtrace_script) copycommand = 'echo \"sudo {0} -p {1} 2>/dev/null\" | pbcopy' os.system(copycommand.format(filename, pid)) result.AppendMessage("Copied to clipboard. Paste in Terminal.") # 10.12.3 beta broke AppleScript's "do script" API. Dammit. Using pbcopy instead... # dtraceCommand = 'osascript -e \'tell application \"Terminal\" to activate & do script \"sudo {0} -p {1} \"\' 2>/dev/null' # os.system(dtraceCommand.format(filename, pid)) # result.AppendMessage("Continuing in different Terminal tab...") result.SetStatus(lldb.eReturnStatusSuccessFinishNoResult) def generate_conditional_for_module_name(module_name, target, options): pair = get_module_pair(module_name, target) if not options.non_objectivec and options.root_function: template = '/ ({0} > *(uintptr_t *)copyin(uregs[R_SP], sizeof(uintptr_t)) || *(uintptr_t *)copyin(uregs[R_SP], sizeof(uintptr_t)) > {1}) && {0} <= uregs[R_PC] && uregs[R_PC] <= {1} /\n' elif options.non_objectivec and not options.root_function: template = '\n' elif not options.non_objectivec and not options.root_function: template = '/ {0} <= uregs[R_PC] && uregs[R_PC] <= {1} /\n' elif options.non_objectivec and options.root_function: template = '/ ({0} > *(uintptr_t *)copyin(uregs[R_SP], sizeof(uintptr_t)) || *(uintptr_t *)copyin(uregs[R_SP], sizeof(uintptr_t)) > {1}) /\n' return template.format(pair[0], pair[1]) def generate_dump_all_module_script(target): dtrace_script = r''' this->method_counter = \"Unknown\"; program_counter = uregs[R_PC]; ''' dtrace_template = "this->method_counter = {} <= program_counter && program_counter <= {} ? \"{}\" : this->method_counter;\n" dtrace_template = textwrap.dedent(dtrace_template) for module in target.modules: section = module.FindSection("__TEXT") lower_bounds = section.GetLoadAddress(target) upper_bounds = lower_bounds + section.file_size module_name = module.file.basename if "_lldb_" not in module_name: dtrace_script += dtrace_template.format(lower_bounds, upper_bounds, module_name) return dtrace_script def create_or_touch_filepath(filepath, dtrace_script): file = open(filepath, "w") file.write(dtrace_script) file.flush() st = os.stat(filepath) os.chmod(filepath, st.st_mode | S_IEXEC) file.close() def generate_dtrace_script(target, options, args): is_cplusplus = options.non_objectivec dtrace_script = '''#!/usr/sbin/dtrace -s #pragma D option quiet ''' if options.flow_indent: dtrace_script += '#pragma D option flowindent' dtrace_script += ''' dtrace:::BEGIN {{ printf("Starting... Hit Ctrl-C to end. Observing {} functions in {}\\n"); }} '''.format('straight up, normal' if is_cplusplus else 'Objective-C', (', ').join(args)) dtrace_template = '' pid = target.process.id is_cplusplus = options.non_objectivec query_template = '{}$target:{}::entry\n' if options.all_modules or options.all_modules_output: if is_cplusplus: dtrace_script += query_template.format('pid', '') else: dtrace_script += query_template.format('objc', '') if options.all_modules_output and not options.non_objectivec: dtrace_script += '{\nprintf("0x%012p %c[%s %s]\\n", uregs[R_RDI], probefunc[0], probemod, (string)&probefunc[1]);\n}' elif options.all_modules_output and options.non_objectivec: dtrace_script += '{\nprintf("0x%012p %s, %s\\n", uregs[R_RDI], probemod, probefunc);\n}' else: dtrace_script += '{\nprogram_counter = uregs[R_PC];\nthis->method_counter = \"Unknown\";' # TODO 64 only change to universal arch dtrace_template += "this->method_counter = {} <= program_counter && program_counter <= {} ? \"{}\" : this->method_counter;\n" dtrace_template = textwrap.dedent(dtrace_template) for module in target.modules: section = module.FindSection("__TEXT") lower_bounds = section.GetLoadAddress(target) upper_bounds = lower_bounds + section.file_size module_name = module.file.basename if "_lldb_" not in module_name: dtrace_script += dtrace_template.format(lower_bounds, upper_bounds, module_name) dtrace_script += "\n@num[this->method_counter] = count();\n}\n" else: for module_name in args: # uregs[R_RDI] # Objective-C logic: objc$target:::entry / {} <= uregs[R_PC] && uregs[R_PC] <= {} / { } if not is_cplusplus: dtrace_script += query_template.format('objc', '') dtrace_script += generate_conditional_for_module_name(module_name, target, options) # Non-Objective-C logic: pid$target:Module::entry { } if is_cplusplus: dtrace_script += query_template.format('pid', module_name) dtrace_script += generate_conditional_for_module_name(module_name, target, options) if options.timestamp: dtrace_script += '{\n printf("%Y [%s] %s\\n", walltimestamp, probemod, probefunc);\n' else: dtrace_script += '{\n printf("[%s] %s\\n", probemod, probefunc);\n' else: if options.timestamp: dtrace_script += '{\n printf("%Y 0x%012p %c[%s %s]\\n", walltimestamp, uregs[R_RDI], probefunc[0], probemod, (string)&probefunc[1]);\n' else: dtrace_script += '{\n printf("0x%012p %c[%s %s]\\n", uregs[R_RDI], probefunc[0], probemod, (string)&probefunc[1]);\n' # Logic to append counting at the termination of script if options.count: dtrace_script += ' @numWrites{}[probefunc] = count();\n'.format(os.path.splitext(module_name)[0]) dtrace_script += '}\n' return dtrace_script def get_module_pair(module_name, target): module = target.FindModule(lldb.SBFileSpec(module_name)) if not module.file.exists: result.SetError( "Unable to open module name '{}', to see list of images use 'image list -b'".format(module_name)) return section = module.FindSection("__TEXT") lower_bounds = section.GetLoadAddress(target) upper_bounds = lower_bounds + section.file_size return (lower_bounds, upper_bounds) def generate_option_parser(): usage = "usage: %prog [options] arg1 [arg2...]" parser = optparse.OptionParser(usage=usage, prog='pmodule') parser.add_option("-n", "--non_objectivec", action="store_true", default=False, dest="non_objectivec", help="Use pid$target instead of objc$target") parser.add_option("-c", "--count", action="store_true", default=False, dest="count", help="Count method calls for framework") parser.add_option("-a", "--all_modules", action="store_true", default=False, dest="all_modules", help="Profile all modules. If this is selected, specific modules are ignored and counts are returned when script finishes") parser.add_option("-A", "--all_modules_output", action="store_true", default=False, dest="all_modules_output", help="Dumps EVERYTHING. Only execute single commands with this one in lldb") parser.add_option("-r", "--root_function", action="store_true", default=False, dest="root_function", help="Only prints the root functions if it's called from another module") parser.add_option("-f", "--flow_indent", action="store_true", default=False, dest="flow_indent", help="Adds the flow indent flag") parser.add_option("-t", "--timestamp", action="store_true", default=False, dest="timestamp", help="Prints out an approximate timestamp of when the calls were made") parser.add_option("-g", "--debug", action="store_true", default=False, dest="debug", help="Doesn't copy the script, just prints it out to stderr") return parser
gpl-2.0
miles0411/pm
venv/lib/python2.7/site-packages/pip/commands/freeze.py
311
2330
from __future__ import absolute_import import sys import pip from pip.basecommand import Command from pip.operations.freeze import freeze from pip.wheel import WheelCache class FreezeCommand(Command): """ Output installed packages in requirements format. packages are listed in a case-insensitive sorted order. """ name = 'freeze' usage = """ %prog [options]""" summary = 'Output installed packages in requirements format.' log_streams = ("ext://sys.stderr", "ext://sys.stderr") def __init__(self, *args, **kw): super(FreezeCommand, self).__init__(*args, **kw) self.cmd_opts.add_option( '-r', '--requirement', dest='requirement', action='store', default=None, metavar='file', help="Use the order in the given requirements file and its " "comments when generating output.") self.cmd_opts.add_option( '-f', '--find-links', dest='find_links', action='append', default=[], metavar='URL', help='URL for finding packages, which will be added to the ' 'output.') self.cmd_opts.add_option( '-l', '--local', dest='local', action='store_true', default=False, help='If in a virtualenv that has global access, do not output ' 'globally-installed packages.') self.cmd_opts.add_option( '--user', dest='user', action='store_true', default=False, help='Only output packages installed in user-site.') self.parser.insert_option_group(0, self.cmd_opts) def run(self, options, args): format_control = pip.index.FormatControl(set(), set()) wheel_cache = WheelCache(options.cache_dir, format_control) freeze_kwargs = dict( requirement=options.requirement, find_links=options.find_links, local_only=options.local, user_only=options.user, skip_regex=options.skip_requirements_regex, isolated=options.isolated_mode, wheel_cache=wheel_cache) for line in freeze(**freeze_kwargs): sys.stdout.write(line + '\n')
apache-2.0
fivejjs/PTVS
Python/Tests/TestData/VirtualEnv/env/Lib/encodings/mac_roman.py
93
14043
""" Python Character Mapping Codec mac_roman generated from 'MAPPINGS/VENDORS/APPLE/ROMAN.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='mac-roman', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Table decoding_table = ( u'\x00' # 0x00 -> CONTROL CHARACTER u'\x01' # 0x01 -> CONTROL CHARACTER u'\x02' # 0x02 -> CONTROL CHARACTER u'\x03' # 0x03 -> CONTROL CHARACTER u'\x04' # 0x04 -> CONTROL CHARACTER u'\x05' # 0x05 -> CONTROL CHARACTER u'\x06' # 0x06 -> CONTROL CHARACTER u'\x07' # 0x07 -> CONTROL CHARACTER u'\x08' # 0x08 -> CONTROL CHARACTER u'\t' # 0x09 -> CONTROL CHARACTER u'\n' # 0x0A -> CONTROL CHARACTER u'\x0b' # 0x0B -> CONTROL CHARACTER u'\x0c' # 0x0C -> CONTROL CHARACTER u'\r' # 0x0D -> CONTROL CHARACTER u'\x0e' # 0x0E -> CONTROL CHARACTER u'\x0f' # 0x0F -> CONTROL CHARACTER u'\x10' # 0x10 -> CONTROL CHARACTER u'\x11' # 0x11 -> CONTROL CHARACTER u'\x12' # 0x12 -> CONTROL CHARACTER u'\x13' # 0x13 -> CONTROL CHARACTER u'\x14' # 0x14 -> CONTROL CHARACTER u'\x15' # 0x15 -> CONTROL CHARACTER u'\x16' # 0x16 -> CONTROL CHARACTER u'\x17' # 0x17 -> CONTROL CHARACTER u'\x18' # 0x18 -> CONTROL CHARACTER u'\x19' # 0x19 -> CONTROL CHARACTER u'\x1a' # 0x1A -> CONTROL CHARACTER u'\x1b' # 0x1B -> CONTROL CHARACTER u'\x1c' # 0x1C -> CONTROL CHARACTER u'\x1d' # 0x1D -> CONTROL CHARACTER u'\x1e' # 0x1E -> CONTROL CHARACTER u'\x1f' # 0x1F -> CONTROL CHARACTER u' ' # 0x20 -> SPACE u'!' # 0x21 -> EXCLAMATION MARK u'"' # 0x22 -> QUOTATION MARK u'#' # 0x23 -> NUMBER SIGN u'$' # 0x24 -> DOLLAR SIGN u'%' # 0x25 -> PERCENT SIGN u'&' # 0x26 -> AMPERSAND u"'" # 0x27 -> APOSTROPHE u'(' # 0x28 -> LEFT PARENTHESIS u')' # 0x29 -> RIGHT PARENTHESIS u'*' # 0x2A -> ASTERISK u'+' # 0x2B -> PLUS SIGN u',' # 0x2C -> COMMA u'-' # 0x2D -> HYPHEN-MINUS u'.' # 0x2E -> FULL STOP u'/' # 0x2F -> SOLIDUS u'0' # 0x30 -> DIGIT ZERO u'1' # 0x31 -> DIGIT ONE u'2' # 0x32 -> DIGIT TWO u'3' # 0x33 -> DIGIT THREE u'4' # 0x34 -> DIGIT FOUR u'5' # 0x35 -> DIGIT FIVE u'6' # 0x36 -> DIGIT SIX u'7' # 0x37 -> DIGIT SEVEN u'8' # 0x38 -> DIGIT EIGHT u'9' # 0x39 -> DIGIT NINE u':' # 0x3A -> COLON u';' # 0x3B -> SEMICOLON u'<' # 0x3C -> LESS-THAN SIGN u'=' # 0x3D -> EQUALS SIGN u'>' # 0x3E -> GREATER-THAN SIGN u'?' # 0x3F -> QUESTION MARK u'@' # 0x40 -> COMMERCIAL AT u'A' # 0x41 -> LATIN CAPITAL LETTER A u'B' # 0x42 -> LATIN CAPITAL LETTER B u'C' # 0x43 -> LATIN CAPITAL LETTER C u'D' # 0x44 -> LATIN CAPITAL LETTER D u'E' # 0x45 -> LATIN CAPITAL LETTER E u'F' # 0x46 -> LATIN CAPITAL LETTER F u'G' # 0x47 -> LATIN CAPITAL LETTER G u'H' # 0x48 -> LATIN CAPITAL LETTER H u'I' # 0x49 -> LATIN CAPITAL LETTER I u'J' # 0x4A -> LATIN CAPITAL LETTER J u'K' # 0x4B -> LATIN CAPITAL LETTER K u'L' # 0x4C -> LATIN CAPITAL LETTER L u'M' # 0x4D -> LATIN CAPITAL LETTER M u'N' # 0x4E -> LATIN CAPITAL LETTER N u'O' # 0x4F -> LATIN CAPITAL LETTER O u'P' # 0x50 -> LATIN CAPITAL LETTER P u'Q' # 0x51 -> LATIN CAPITAL LETTER Q u'R' # 0x52 -> LATIN CAPITAL LETTER R u'S' # 0x53 -> LATIN CAPITAL LETTER S u'T' # 0x54 -> LATIN CAPITAL LETTER T u'U' # 0x55 -> LATIN CAPITAL LETTER U u'V' # 0x56 -> LATIN CAPITAL LETTER V u'W' # 0x57 -> LATIN CAPITAL LETTER W u'X' # 0x58 -> LATIN CAPITAL LETTER X u'Y' # 0x59 -> LATIN CAPITAL LETTER Y u'Z' # 0x5A -> LATIN CAPITAL LETTER Z u'[' # 0x5B -> LEFT SQUARE BRACKET u'\\' # 0x5C -> REVERSE SOLIDUS u']' # 0x5D -> RIGHT SQUARE BRACKET u'^' # 0x5E -> CIRCUMFLEX ACCENT u'_' # 0x5F -> LOW LINE u'`' # 0x60 -> GRAVE ACCENT u'a' # 0x61 -> LATIN SMALL LETTER A u'b' # 0x62 -> LATIN SMALL LETTER B u'c' # 0x63 -> LATIN SMALL LETTER C u'd' # 0x64 -> LATIN SMALL LETTER D u'e' # 0x65 -> LATIN SMALL LETTER E u'f' # 0x66 -> LATIN SMALL LETTER F u'g' # 0x67 -> LATIN SMALL LETTER G u'h' # 0x68 -> LATIN SMALL LETTER H u'i' # 0x69 -> LATIN SMALL LETTER I u'j' # 0x6A -> LATIN SMALL LETTER J u'k' # 0x6B -> LATIN SMALL LETTER K u'l' # 0x6C -> LATIN SMALL LETTER L u'm' # 0x6D -> LATIN SMALL LETTER M u'n' # 0x6E -> LATIN SMALL LETTER N u'o' # 0x6F -> LATIN SMALL LETTER O u'p' # 0x70 -> LATIN SMALL LETTER P u'q' # 0x71 -> LATIN SMALL LETTER Q u'r' # 0x72 -> LATIN SMALL LETTER R u's' # 0x73 -> LATIN SMALL LETTER S u't' # 0x74 -> LATIN SMALL LETTER T u'u' # 0x75 -> LATIN SMALL LETTER U u'v' # 0x76 -> LATIN SMALL LETTER V u'w' # 0x77 -> LATIN SMALL LETTER W u'x' # 0x78 -> LATIN SMALL LETTER X u'y' # 0x79 -> LATIN SMALL LETTER Y u'z' # 0x7A -> LATIN SMALL LETTER Z u'{' # 0x7B -> LEFT CURLY BRACKET u'|' # 0x7C -> VERTICAL LINE u'}' # 0x7D -> RIGHT CURLY BRACKET u'~' # 0x7E -> TILDE u'\x7f' # 0x7F -> CONTROL CHARACTER u'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS u'\xc5' # 0x81 -> LATIN CAPITAL LETTER A WITH RING ABOVE u'\xc7' # 0x82 -> LATIN CAPITAL LETTER C WITH CEDILLA u'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE u'\xd1' # 0x84 -> LATIN CAPITAL LETTER N WITH TILDE u'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS u'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS u'\xe1' # 0x87 -> LATIN SMALL LETTER A WITH ACUTE u'\xe0' # 0x88 -> LATIN SMALL LETTER A WITH GRAVE u'\xe2' # 0x89 -> LATIN SMALL LETTER A WITH CIRCUMFLEX u'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS u'\xe3' # 0x8B -> LATIN SMALL LETTER A WITH TILDE u'\xe5' # 0x8C -> LATIN SMALL LETTER A WITH RING ABOVE u'\xe7' # 0x8D -> LATIN SMALL LETTER C WITH CEDILLA u'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE u'\xe8' # 0x8F -> LATIN SMALL LETTER E WITH GRAVE u'\xea' # 0x90 -> LATIN SMALL LETTER E WITH CIRCUMFLEX u'\xeb' # 0x91 -> LATIN SMALL LETTER E WITH DIAERESIS u'\xed' # 0x92 -> LATIN SMALL LETTER I WITH ACUTE u'\xec' # 0x93 -> LATIN SMALL LETTER I WITH GRAVE u'\xee' # 0x94 -> LATIN SMALL LETTER I WITH CIRCUMFLEX u'\xef' # 0x95 -> LATIN SMALL LETTER I WITH DIAERESIS u'\xf1' # 0x96 -> LATIN SMALL LETTER N WITH TILDE u'\xf3' # 0x97 -> LATIN SMALL LETTER O WITH ACUTE u'\xf2' # 0x98 -> LATIN SMALL LETTER O WITH GRAVE u'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX u'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS u'\xf5' # 0x9B -> LATIN SMALL LETTER O WITH TILDE u'\xfa' # 0x9C -> LATIN SMALL LETTER U WITH ACUTE u'\xf9' # 0x9D -> LATIN SMALL LETTER U WITH GRAVE u'\xfb' # 0x9E -> LATIN SMALL LETTER U WITH CIRCUMFLEX u'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS u'\u2020' # 0xA0 -> DAGGER u'\xb0' # 0xA1 -> DEGREE SIGN u'\xa2' # 0xA2 -> CENT SIGN u'\xa3' # 0xA3 -> POUND SIGN u'\xa7' # 0xA4 -> SECTION SIGN u'\u2022' # 0xA5 -> BULLET u'\xb6' # 0xA6 -> PILCROW SIGN u'\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S u'\xae' # 0xA8 -> REGISTERED SIGN u'\xa9' # 0xA9 -> COPYRIGHT SIGN u'\u2122' # 0xAA -> TRADE MARK SIGN u'\xb4' # 0xAB -> ACUTE ACCENT u'\xa8' # 0xAC -> DIAERESIS u'\u2260' # 0xAD -> NOT EQUAL TO u'\xc6' # 0xAE -> LATIN CAPITAL LETTER AE u'\xd8' # 0xAF -> LATIN CAPITAL LETTER O WITH STROKE u'\u221e' # 0xB0 -> INFINITY u'\xb1' # 0xB1 -> PLUS-MINUS SIGN u'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO u'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO u'\xa5' # 0xB4 -> YEN SIGN u'\xb5' # 0xB5 -> MICRO SIGN u'\u2202' # 0xB6 -> PARTIAL DIFFERENTIAL u'\u2211' # 0xB7 -> N-ARY SUMMATION u'\u220f' # 0xB8 -> N-ARY PRODUCT u'\u03c0' # 0xB9 -> GREEK SMALL LETTER PI u'\u222b' # 0xBA -> INTEGRAL u'\xaa' # 0xBB -> FEMININE ORDINAL INDICATOR u'\xba' # 0xBC -> MASCULINE ORDINAL INDICATOR u'\u03a9' # 0xBD -> GREEK CAPITAL LETTER OMEGA u'\xe6' # 0xBE -> LATIN SMALL LETTER AE u'\xf8' # 0xBF -> LATIN SMALL LETTER O WITH STROKE u'\xbf' # 0xC0 -> INVERTED QUESTION MARK u'\xa1' # 0xC1 -> INVERTED EXCLAMATION MARK u'\xac' # 0xC2 -> NOT SIGN u'\u221a' # 0xC3 -> SQUARE ROOT u'\u0192' # 0xC4 -> LATIN SMALL LETTER F WITH HOOK u'\u2248' # 0xC5 -> ALMOST EQUAL TO u'\u2206' # 0xC6 -> INCREMENT u'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK u'\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK u'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS u'\xa0' # 0xCA -> NO-BREAK SPACE u'\xc0' # 0xCB -> LATIN CAPITAL LETTER A WITH GRAVE u'\xc3' # 0xCC -> LATIN CAPITAL LETTER A WITH TILDE u'\xd5' # 0xCD -> LATIN CAPITAL LETTER O WITH TILDE u'\u0152' # 0xCE -> LATIN CAPITAL LIGATURE OE u'\u0153' # 0xCF -> LATIN SMALL LIGATURE OE u'\u2013' # 0xD0 -> EN DASH u'\u2014' # 0xD1 -> EM DASH u'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK u'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK u'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK u'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK u'\xf7' # 0xD6 -> DIVISION SIGN u'\u25ca' # 0xD7 -> LOZENGE u'\xff' # 0xD8 -> LATIN SMALL LETTER Y WITH DIAERESIS u'\u0178' # 0xD9 -> LATIN CAPITAL LETTER Y WITH DIAERESIS u'\u2044' # 0xDA -> FRACTION SLASH u'\u20ac' # 0xDB -> EURO SIGN u'\u2039' # 0xDC -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK u'\u203a' # 0xDD -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK u'\ufb01' # 0xDE -> LATIN SMALL LIGATURE FI u'\ufb02' # 0xDF -> LATIN SMALL LIGATURE FL u'\u2021' # 0xE0 -> DOUBLE DAGGER u'\xb7' # 0xE1 -> MIDDLE DOT u'\u201a' # 0xE2 -> SINGLE LOW-9 QUOTATION MARK u'\u201e' # 0xE3 -> DOUBLE LOW-9 QUOTATION MARK u'\u2030' # 0xE4 -> PER MILLE SIGN u'\xc2' # 0xE5 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX u'\xca' # 0xE6 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX u'\xc1' # 0xE7 -> LATIN CAPITAL LETTER A WITH ACUTE u'\xcb' # 0xE8 -> LATIN CAPITAL LETTER E WITH DIAERESIS u'\xc8' # 0xE9 -> LATIN CAPITAL LETTER E WITH GRAVE u'\xcd' # 0xEA -> LATIN CAPITAL LETTER I WITH ACUTE u'\xce' # 0xEB -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX u'\xcf' # 0xEC -> LATIN CAPITAL LETTER I WITH DIAERESIS u'\xcc' # 0xED -> LATIN CAPITAL LETTER I WITH GRAVE u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE u'\xd4' # 0xEF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX u'\uf8ff' # 0xF0 -> Apple logo u'\xd2' # 0xF1 -> LATIN CAPITAL LETTER O WITH GRAVE u'\xda' # 0xF2 -> LATIN CAPITAL LETTER U WITH ACUTE u'\xdb' # 0xF3 -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX u'\xd9' # 0xF4 -> LATIN CAPITAL LETTER U WITH GRAVE u'\u0131' # 0xF5 -> LATIN SMALL LETTER DOTLESS I u'\u02c6' # 0xF6 -> MODIFIER LETTER CIRCUMFLEX ACCENT u'\u02dc' # 0xF7 -> SMALL TILDE u'\xaf' # 0xF8 -> MACRON u'\u02d8' # 0xF9 -> BREVE u'\u02d9' # 0xFA -> DOT ABOVE u'\u02da' # 0xFB -> RING ABOVE u'\xb8' # 0xFC -> CEDILLA u'\u02dd' # 0xFD -> DOUBLE ACUTE ACCENT u'\u02db' # 0xFE -> OGONEK u'\u02c7' # 0xFF -> CARON ) ### Encoding table encoding_table=codecs.charmap_build(decoding_table)
apache-2.0
janusnic/ecommerce
ecommerce/extensions/partner/tests/test_strategy.py
1
2109
import datetime from django.test import TestCase from oscar.apps.partner import availability import pytz from ecommerce.courses.models import Course from ecommerce.extensions.catalogue.tests.mixins import CourseCatalogTestMixin from ecommerce.extensions.partner.strategy import DefaultStrategy, Selector class DefaultStrategyTests(CourseCatalogTestMixin, TestCase): def setUp(self): super(DefaultStrategyTests, self).setUp() self.strategy = DefaultStrategy() course = Course.objects.create(id='a/b/c', name='Demo Course') self.honor_seat = course.create_or_update_seat('honor', False, 0) def test_seat_class(self): """ Verify the property returns the course seat Product Class. """ self.assertEqual(self.strategy.seat_class, self.seat_product_class) def test_availability_policy_not_expired(self): """ If the course seat's expiration date has not passed, the seat should be available for purchase. """ product = self.honor_seat product.expires = None stock_record = product.stockrecords.first() actual = self.strategy.availability_policy(self.honor_seat, stock_record) self.assertIsInstance(actual, availability.Available) product.expires = pytz.utc.localize(datetime.datetime.max) actual = self.strategy.availability_policy(product, stock_record) self.assertIsInstance(actual, availability.Available) def test_availability_policy_expired(self): """ If the course seat's expiration date has passed, the seat should NOT be available for purchase. """ product = self.honor_seat product.expires = pytz.utc.localize(datetime.datetime.min) stock_record = product.stockrecords.first() actual = self.strategy.availability_policy(product, stock_record) self.assertIsInstance(actual, availability.Unavailable) class SelectorTests(TestCase): def test_strategy(self): """ Verify our own DefaultStrategy is returned. """ actual = Selector().strategy() self.assertIsInstance(actual, DefaultStrategy)
agpl-3.0
t-hey/QGIS-Original
python/plugins/processing/algs/qgis/VectorLayerScatterplot.py
3
3677
# -*- coding: utf-8 -*- """ *************************************************************************** EquivalentNumField.py --------------------- Date : January 2013 Copyright : (C) 2013 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'January 2013' __copyright__ = '(C) 2013, Victor Olaya' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' import plotly as plt import plotly.graph_objs as go from qgis.core import (QgsProcessingParameterFeatureSource, QgsProcessingParameterField, QgsProcessingUtils, QgsProcessingParameterFileDestination, QgsProcessingOutputHtml) from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm from processing.tools import vector class VectorLayerScatterplot(QgisAlgorithm): INPUT = 'INPUT' OUTPUT = 'OUTPUT' XFIELD = 'XFIELD' YFIELD = 'YFIELD' def group(self): return self.tr('Graphics') def __init__(self): super().__init__() def initAlgorithm(self, config=None): self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT, self.tr('Input layer'))) self.addParameter(QgsProcessingParameterField(self.XFIELD, self.tr('X attribute'), parentLayerParameterName=self.INPUT, type=QgsProcessingParameterField.Numeric)) self.addParameter(QgsProcessingParameterField(self.YFIELD, self.tr('Y attribute'), parentLayerParameterName=self.INPUT, type=QgsProcessingParameterField.Numeric)) self.addParameter(QgsProcessingParameterFileDestination(self.OUTPUT, self.tr('Scatterplot'), self.tr('HTML files (*.html)'))) self.addOutput(QgsProcessingOutputHtml(self.OUTPUT, self.tr('Scatterplot'))) def name(self): return 'vectorlayerscatterplot' def displayName(self): return self.tr('Vector layer scatterplot') def processAlgorithm(self, parameters, context, feedback): source = self.parameterAsSource(parameters, self.INPUT, context) xfieldname = self.parameterAsString(parameters, self.XFIELD, context) yfieldname = self.parameterAsString(parameters, self.YFIELD, context) output = self.parameterAsFileOutput(parameters, self.OUTPUT, context) values = vector.values(source, xfieldname, yfieldname) data = [go.Scatter(x=values[xfieldname], y=values[yfieldname], mode='markers')] plt.offline.plot(data, filename=output, auto_open=False) return {self.OUTPUT: output}
gpl-2.0
catalyst-cooperative/pudl
src/pudl/output/pudltabl.py
1
35324
""" This module provides a class enabling tabular compilations from the PUDL DB. Many of our potential users are comfortable using spreadsheets, not databases, so we are creating a collection of tabular outputs that contain the most useful core information from the PUDL data packages, including additional keys and human readable names for the objects (utilities, plants, generators) being described in the table. These tabular outputs can be joined with each other using those keys, and used as a data source within Microsoft Excel, Access, R Studio, or other data analysis packages that folks may be familiar with. They aren't meant to completely replicate all the data and relationships contained within the full PUDL database, but should serve as a generally usable set of PUDL data products. The PudlTabl class can also provide access to complex derived values, like the generator and plant level marginal cost of electricity (MCOE), which are defined in the analysis module. In the long run, this is a probably a kind of prototype for pre-packaged API outputs or data products that we might want to be able to provide to users a la carte. Todo: Return to for update arg and returns values in functions below """ import logging from pathlib import Path # Useful high-level external modules. import pandas as pd import sqlalchemy as sa import pudl from pudl import constants as pc logger = logging.getLogger(__name__) ############################################################################### # Output Class, that can pull all the below tables with similar parameters ############################################################################### class PudlTabl(object): """A class for compiling common useful tabular outputs from the PUDL DB.""" def __init__( self, pudl_engine, ds=None, freq=None, start_date=None, end_date=None, fill_fuel_cost=False, roll_fuel_cost=False, fill_net_gen=False ): """ Initialize the PUDL output object. Private data members are not initialized until they are requested. They are then cached within the object unless they get re-initialized via a method that includes update=True. Some methods (e.g mcoe) will take a while to run, since they need to pull substantial data and do a bunch of calculations. Args: freq (str): String describing time frequency at which to aggregate the reported data. E.g. 'MS' (monthly start). start_date (date): Beginning date for data to pull from the PUDL DB. end_date (date): End date for data to pull from the PUDL DB. pudl_engine (sqlalchemy.engine.Engine): SQLAlchemy connection engine for the PUDL DB. fill_fuel_cost (boolean): if True, fill in missing EIA fuel cost from ``frc_eia923()`` with state-level monthly averages from EIA's API. roll_fuel_cost (boolean): if True, apply a rolling average to a subset of output table's columns (currently only 'fuel_cost_per_mmbtu' for the frc table). fill_net_gen (boolean): if True, use net generation from the generation_fuel_eia923 - which is reported at the plant/fuel/prime mover level - re-allocated to generators in ``mcoe()``, ``capacity_factor()`` and ``heat_rate_by_unit()``. """ self.pudl_engine = pudl_engine self.freq = freq # We need datastore access because some data is not yet integrated into the # PUDL DB. See the etl_eia861 method. self.ds = ds if self.ds is None: pudl_in = Path(pudl.workspace.setup.get_defaults()["pudl_in"]) self.ds = pudl.workspace.datastore.Datastore( local_cache_path=pudl_in / "data" ) # grab all working eia dates to use to set start and end dates if they # are not set eia_dates = pudl.helpers.get_working_eia_dates() if start_date is None: self.start_date = min(eia_dates) else: # Make sure it's a date... and not a string. self.start_date = pd.to_datetime(start_date) if end_date is None: self.end_date = max(eia_dates) else: # Make sure it's a date... and not a string. self.end_date = pd.to_datetime(end_date) if not pudl_engine: raise AssertionError('PudlTabl object needs a pudl_engine') self.roll_fuel_cost = roll_fuel_cost self.fill_fuel_cost = fill_fuel_cost self.fill_net_gen = fill_net_gen # We populate this library of dataframes as they are generated, and # allow them to persist, in case they need to be used again. self._dfs = { "pu_eia": None, "pu_ferc1": None, "utils_eia860": None, "bga_eia860": None, "plants_eia860": None, "gens_eia860": None, "own_eia860": None, # TODO add the other tables -- this is just an interim check "advanced_metering_infrastructure_eia861": None, "balancing_authority_eia861": None, "balancing_authority_assn_eia861": None, "demand_response_eia861": None, "demand_side_management_eia861": None, "distributed_generation_eia861": None, "distribution_systems_eia861": None, "dynamic_pricing_eia861": None, "energy_efficiency_eia861": None, "green_pricing_eia861": None, "mergers_eia861": None, "net_metering_eia861": None, "non_net_metering_eia861": None, "operational_data_eia861": None, "reliability_eia861": None, "sales_eia861": None, "service_territory_eia861": None, "utility_assn_eia861": None, "utility_data_eia861": None, # TODO add the other tables -- this is just an interim check "respondent_id_ferc714": None, "gf_eia923": None, "frc_eia923": None, "bf_eia923": None, "gen_eia923": None, "gen_og_eia923": None, "gen_allocated_eia923": None, "plants_steam_ferc1": None, "fuel_ferc1": None, "fbp_ferc1": None, "plants_small_ferc1": None, "plants_hydro_ferc1": None, "plants_pumped_storage_ferc1": None, "purchased_power_ferc1": None, "plant_in_service_ferc1": None, "bga": None, "hr_by_unit": None, "hr_by_gen": None, "fuel_cost": None, "capacity_factor": None, "mcoe": None, } def pu_eia860(self, update=False): """ Pull a dataframe of EIA plant-utility associations. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['pu_eia'] is None: self._dfs['pu_eia'] = pudl.output.eia860.plants_utils_eia860( self.pudl_engine, start_date=self.start_date, end_date=self.end_date) return self._dfs['pu_eia'] def pu_ferc1(self, update=False): """ Pull a dataframe of FERC plant-utility associations. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['pu_ferc1'] is None: self._dfs['pu_ferc1'] = pudl.output.ferc1.plants_utils_ferc1( self.pudl_engine) return self._dfs['pu_ferc1'] ########################################################################### # EIA 861 Interim Outputs (awaiting full DB integration) ########################################################################### def etl_eia861(self, update=False): """ A single function that runs the temporary EIA 861 ETL and sets all DFs. This is an interim solution that provides a (somewhat) standard way of accessing the EIA 861 data prior to its being fully integrated into the PUDL database. If any of the dataframes is attempted to be accessed, all of them are set. Only the tables that have actual transform functions are included, and as new transform functions are completed, they would need to be added to the list below. Surely there is a way to do this automatically / magically but that's beyond my knowledge right now. Args: update (bool): Whether to overwrite the existing dataframes if they exist. """ if update or self._dfs["balancing_authority_eia861"] is None: logger.warning( "Running the interim EIA 861 ETL process!") eia861_raw_dfs = ( pudl.extract.eia861.Extractor(self.ds) .extract(year=pc.working_partitions["eia861"]["years"]) ) eia861_tfr_dfs = pudl.transform.eia861.transform(eia861_raw_dfs) for table in eia861_tfr_dfs: self._dfs[table] = eia861_tfr_dfs[table] def advanced_metering_infrastructure_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["advanced_metering_infrastructure_eia861"] def balancing_authority_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["balancing_authority_eia861"] def balancing_authority_assn_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["balancing_authority_assn_eia861"] def demand_response_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["demand_response_eia861"] def demand_side_management_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["demand_side_management_eia861"] def distributed_generation_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["distributed_generation_eia861"] def distribution_systems_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["distribution_systems_eia861"] def dynamic_pricing_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["dynamic_pricing_eia861"] def energy_efficiency_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["energy_efficiency_eia861"] def green_pricing_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["green_pricing_eia861"] def mergers_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["mergers_eia861"] def net_metering_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["net_metering_eia861"] def non_net_metering_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["non_net_meterin_eia861"] def operational_data_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["operational_data_eia861"] def reliability_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["reliability_eia861"] def sales_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["sales_eia861"] def service_territory_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["service_territory_eia861"] def utility_assn_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["utility_assn_eia861"] def utility_data_eia861(self, update=False): """An interim EIA 861 output function.""" self.etl_eia861(update=update) return self._dfs["_eia861"] ########################################################################### # FERC 714 Interim Outputs (awaiting full DB integration) ########################################################################### def etl_ferc714(self, update=False): """ A single function that runs the temporary FERC 714 ETL and sets all DFs. This is an interim solution, so that we can have a (relatively) standard way of accessing the FERC 714 data prior to getting it integrated into the PUDL DB. Some of these are not yet cleaned up, but there are dummy transform functions which pass through the raw DFs with some minor alterations, so all the data is available as it exists right now. An attempt to access *any* of the dataframes results in all of them being populated, since generating all of them is almost the same amount of work as generating one of them. Args: update (bool): Whether to overwrite the existing dataframes if they exist. """ if update or self._dfs["respondent_id_ferc714"] is None: logger.warning( "Running the interim FERC 714 ETL process!") ferc714_raw_dfs = pudl.extract.ferc714.extract(ds=self.ds) ferc714_tfr_dfs = pudl.transform.ferc714.transform(ferc714_raw_dfs) for table in ferc714_tfr_dfs: self._dfs[table] = ferc714_tfr_dfs[table] def respondent_id_ferc714(self, update=False): """An interim FERC 714 output function.""" self.etl_ferc714(update=update) return self._dfs["respondent_id_ferc714"] def demand_hourly_pa_ferc714(self, update=False): """An interim FERC 714 output function.""" self.etl_ferc714(update=update) return self._dfs["demand_hourly_pa_ferc714"] def description_pa_ferc714(self, update=False): """An interim FERC 714 output function.""" self.etl_ferc714(update=update) return self._dfs["description_pa_ferc714"] def id_certification_ferc714(self, update=False): """An interim FERC 714 output function.""" self.etl_ferc714(update=update) return self._dfs["id_certification_ferc714"] def gen_plants_ba_ferc714(self, update=False): """An interim FERC 714 output function.""" self.etl_ferc714(update=update) return self._dfs["gen_plants_ba_ferc714"] def demand_monthly_ba_ferc714(self, update=False): """An interim FERC 714 output function.""" self.etl_ferc714(update=update) return self._dfs["demand_monthly_ba_ferc714"] def net_energy_load_ba_ferc714(self, update=False): """An interim FERC 714 output function.""" self.etl_ferc714(update=update) return self._dfs["net_energy_load_ba_ferc714"] def adjacency_ba_ferc714(self, update=False): """An interim FERC 714 output function.""" self.etl_ferc714(update=update) return self._dfs["adjacency_ba_ferc714"] def interchange_ba_ferc714(self, update=False): """An interim FERC 714 output function.""" self.etl_ferc714(update=update) return self._dfs["interchange_ba_ferc714"] def lambda_hourly_ba_ferc714(self, update=False): """An interim FERC 714 output function.""" self.etl_ferc714(update=update) return self._dfs["lambda_hourly_ba_ferc714"] def lambda_description_ferc714(self, update=False): """An interim FERC 714 output function.""" self.etl_ferc714(update=update) return self._dfs["lambda_description_ferc714"] def demand_forecast_pa_ferc714(self, update=False): """An interim FERC 714 output function.""" self.etl_ferc714(update=update) return self._dfs["demand_forecast_pa_ferc714"] ########################################################################### # EIA 860/923 OUTPUTS ########################################################################### def utils_eia860(self, update=False): """ Pull a dataframe describing utilities reported in EIA 860. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['utils_eia860'] is None: self._dfs['utils_eia860'] = pudl.output.eia860.utilities_eia860( self.pudl_engine, start_date=self.start_date, end_date=self.end_date) return self._dfs['utils_eia860'] def bga_eia860(self, update=False): """ Pull a dataframe of boiler-generator associations from EIA 860. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['bga_eia860'] is None: self._dfs['bga_eia860'] = pudl.output.eia860.boiler_generator_assn_eia860( self.pudl_engine, start_date=self.start_date, end_date=self.end_date) return self._dfs['bga_eia860'] def plants_eia860(self, update=False): """ Pull a dataframe of plant level info reported in EIA 860. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['plants_eia860'] is None: self._dfs['plants_eia860'] = pudl.output.eia860.plants_eia860( self.pudl_engine, start_date=self.start_date, end_date=self.end_date,) return self._dfs['plants_eia860'] def gens_eia860(self, update=False): """ Pull a dataframe describing generators, as reported in EIA 860. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['gens_eia860'] is None: self._dfs['gens_eia860'] = pudl.output.eia860.generators_eia860( self.pudl_engine, start_date=self.start_date, end_date=self.end_date) return self._dfs['gens_eia860'] def own_eia860(self, update=False): """ Pull a dataframe of generator level ownership data from EIA 860. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['own_eia860'] is None: self._dfs['own_eia860'] = pudl.output.eia860.ownership_eia860( self.pudl_engine, start_date=self.start_date, end_date=self.end_date) return self._dfs['own_eia860'] def gf_eia923(self, update=False): """ Pull EIA 923 generation and fuel consumption data. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['gf_eia923'] is None: self._dfs['gf_eia923'] = pudl.output.eia923.generation_fuel_eia923( self.pudl_engine, freq=self.freq, start_date=self.start_date, end_date=self.end_date) return self._dfs['gf_eia923'] def frc_eia923(self, update=False): """ Pull EIA 923 fuel receipts and costs data. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['frc_eia923'] is None: self._dfs['frc_eia923'] = pudl.output.eia923.fuel_receipts_costs_eia923( self.pudl_engine, freq=self.freq, start_date=self.start_date, end_date=self.end_date, fill=self.fill_fuel_cost, roll=self.roll_fuel_cost) return self._dfs['frc_eia923'] def bf_eia923(self, update=False): """ Pull EIA 923 boiler fuel consumption data. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['bf_eia923'] is None: self._dfs['bf_eia923'] = pudl.output.eia923.boiler_fuel_eia923( self.pudl_engine, freq=self.freq, start_date=self.start_date, end_date=self.end_date) return self._dfs['bf_eia923'] def gen_eia923(self, update=False): """ Pull EIA 923 net generation data by generator. Net generation is reported in two seperate tables in EIA 923: in the generation_eia923 and generation_fuel_eia923 tables. While the generation_fuel_eia923 table is more complete (the generation_eia923 table includes only ~55% of the reported MWhs), the generation_eia923 table is more granular (it is reported at the generator level). This method either grabs the generation_eia923 table that is reported by generator, or allocates net generation from the generation_fuel_eia923 table to the generator level. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['gen_eia923'] is None: if self.fill_net_gen: logger.info( 'Allocating net generation from the generation_fuel_eia923 ' 'to the generator level instead of using the less complete ' 'generation_eia923 table.' ) self._dfs['gen_eia923'] = self.gen_allocated_eia923(update) else: self._dfs['gen_eia923'] = self.gen_original_eia923(update) return self._dfs['gen_eia923'] def gen_original_eia923(self, update=False): """Pull the original EIA 923 net generation data by generator.""" if update or self._dfs['gen_og_eia923'] is None: self._dfs['gen_og_eia923'] = pudl.output.eia923.generation_eia923( self.pudl_engine, freq=self.freq, start_date=self.start_date, end_date=self.end_date) return self._dfs['gen_og_eia923'] def gen_allocated_eia923(self, update=False): """Net generation from gen fuel table allocated to generators.""" if update or self._dfs['gen_allocated_eia923'] is None: self._dfs['gen_allocated_eia923'] = ( pudl.analysis.allocate_net_gen.allocate_gen_fuel_by_gen(self) ) return self._dfs['gen_allocated_eia923'] ########################################################################### # FERC FORM 1 OUTPUTS ########################################################################### def plants_steam_ferc1(self, update=False): """ Pull the FERC Form 1 steam plants data. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['plants_steam_ferc1'] is None: self._dfs['plants_steam_ferc1'] = pudl.output.ferc1.plants_steam_ferc1( self.pudl_engine) return self._dfs['plants_steam_ferc1'] def fuel_ferc1(self, update=False): """ Pull the FERC Form 1 steam plants fuel consumption data. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['fuel_ferc1'] is None: self._dfs['fuel_ferc1'] = pudl.output.ferc1.fuel_ferc1( self.pudl_engine) return self._dfs['fuel_ferc1'] def fbp_ferc1(self, update=False): """ Summarize FERC Form 1 fuel usage by plant. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['fbp_ferc1'] is None: self._dfs['fbp_ferc1'] = pudl.output.ferc1.fuel_by_plant_ferc1( self.pudl_engine) return self._dfs['fbp_ferc1'] def plants_small_ferc1(self, update=False): """ Pull the FERC Form 1 Small Plants Table. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['plants_small_ferc1'] is None: self._dfs['plants_small_ferc1'] = pudl.output.ferc1.plants_small_ferc1( self.pudl_engine) return self._dfs['plants_small_ferc1'] def plants_hydro_ferc1(self, update=False): """ Pull the FERC Form 1 Hydro Plants Table. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['plants_hydro_ferc1'] is None: self._dfs['plants_hydro_ferc1'] = pudl.output.ferc1.plants_hydro_ferc1( self.pudl_engine) return self._dfs['plants_hydro_ferc1'] def plants_pumped_storage_ferc1(self, update=False): """ Pull the FERC Form 1 Pumped Storage Table. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['plants_pumped_storage_ferc1'] is None: self._dfs['plants_pumped_storage_ferc1'] = pudl.output.ferc1.plants_pumped_storage_ferc1( self.pudl_engine) return self._dfs['plants_pumped_storage_ferc1'] def purchased_power_ferc1(self, update=False): """ Pull the FERC Form 1 Purchased Power Table. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['purchased_power_ferc1'] is None: self._dfs['purchased_power_ferc1'] = pudl.output.ferc1.purchased_power_ferc1( self.pudl_engine) return self._dfs['purchased_power_ferc1'] def plant_in_service_ferc1(self, update=False): """ Pull the FERC Form 1 Plant in Service Table. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['plant_in_service_ferc1'] is None: self._dfs['plant_in_service_ferc1'] = pudl.output.ferc1.plant_in_service_ferc1( self.pudl_engine) return self._dfs['plant_in_service_ferc1'] ########################################################################### # EIA MCOE OUTPUTS ########################################################################### def bga(self, update=False): """ Pull the more complete EIA/PUDL boiler-generator associations. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['bga'] is None: self._dfs['bga'] = pudl.output.glue.boiler_generator_assn( self.pudl_engine, start_date=self.start_date, end_date=self.end_date) return self._dfs['bga'] def hr_by_gen(self, update=False): """ Calculate and return generator level heat rates (mmBTU/MWh). Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['hr_by_gen'] is None: self._dfs['hr_by_gen'] = pudl.analysis.mcoe.heat_rate_by_gen(self) return self._dfs['hr_by_gen'] def hr_by_unit(self, update=False): """ Calculate and return generation unit level heat rates. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['hr_by_unit'] is None: self._dfs['hr_by_unit'] = ( pudl.analysis.mcoe.heat_rate_by_unit(self) ) return self._dfs['hr_by_unit'] def fuel_cost(self, update=False): """ Calculate and return generator level fuel costs per MWh. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['fuel_cost'] is None: self._dfs['fuel_cost'] = pudl.analysis.mcoe.fuel_cost(self) return self._dfs['fuel_cost'] def capacity_factor(self, update=False, min_cap_fact=None, max_cap_fact=None): """ Calculate and return generator level capacity factors. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. Returns: pandas.DataFrame: a denormalized table for interactive use. """ if update or self._dfs['capacity_factor'] is None: self._dfs['capacity_factor'] = ( pudl.analysis.mcoe.capacity_factor( self, min_cap_fact=min_cap_fact, max_cap_fact=max_cap_fact) ) return self._dfs['capacity_factor'] def mcoe(self, update=False, min_heat_rate=5.5, min_fuel_cost_per_mwh=0.0, min_cap_fact=0.0, max_cap_fact=1.5): """ Calculate and return generator level MCOE based on EIA data. Eventually this calculation will include non-fuel operating expenses as reported in FERC Form 1, but for now only the fuel costs reported to EIA are included. They are attibuted based on the unit-level heat rates and fuel costs. Args: update (bool): If true, re-calculate the output dataframe, even if a cached version exists. min_heat_rate: lowest plausible heat rate, in mmBTU/MWh. Any MCOE records with lower heat rates are presumed to be invalid, and are discarded before returning. min_cap_fact: minimum generator capacity factor. Generator records with a lower capacity factor will be filtered out before returning. This allows the user to exclude generators that aren't being used enough to have valid. min_fuel_cost_per_mwh: minimum fuel cost on a per MWh basis that is required for a generator record to be considered valid. For some reason there are now a large number of $0 fuel cost records, which previously would have been NaN. max_cap_fact: maximum generator capacity factor. Generator records with a lower capacity factor will be filtered out before returning. This allows the user to exclude generators that aren't being used enough to have valid. Returns: :class:`pandas.DataFrame`: a compilation of generator attributes, including fuel costs per MWh. """ if update or self._dfs['mcoe'] is None: self._dfs['mcoe'] = pudl.analysis.mcoe.mcoe( self, min_heat_rate=min_heat_rate, min_fuel_cost_per_mwh=min_fuel_cost_per_mwh, min_cap_fact=min_cap_fact, max_cap_fact=max_cap_fact, ) return self._dfs['mcoe'] def get_table_meta(pudl_engine): """Grab the pudl sqlitie database table metadata.""" md = sa.MetaData() md.reflect(pudl_engine) return md.tables
mit
invisiblek/python-for-android
python3-alpha/python3-src/Lib/quopri.py
57
7304
#! /usr/bin/env python3 """Conversions to/from quoted-printable transport encoding as per RFC 1521.""" # (Dec 1991 version). __all__ = ["encode", "decode", "encodestring", "decodestring"] ESCAPE = b'=' MAXLINESIZE = 76 HEX = b'0123456789ABCDEF' EMPTYSTRING = b'' try: from binascii import a2b_qp, b2a_qp except ImportError: a2b_qp = None b2a_qp = None def needsquoting(c, quotetabs, header): """Decide whether a particular byte ordinal needs to be quoted. The 'quotetabs' flag indicates whether embedded tabs and spaces should be quoted. Note that line-ending tabs and spaces are always encoded, as per RFC 1521. """ assert isinstance(c, bytes) if c in b' \t': return quotetabs # if header, we have to escape _ because _ is used to escape space if c == b'_': return header return c == ESCAPE or not (b' ' <= c <= b'~') def quote(c): """Quote a single character.""" assert isinstance(c, bytes) and len(c)==1 c = ord(c) return ESCAPE + bytes((HEX[c//16], HEX[c%16])) def encode(input, output, quotetabs, header=False): """Read 'input', apply quoted-printable encoding, and write to 'output'. 'input' and 'output' are files with readline() and write() methods. The 'quotetabs' flag indicates whether embedded tabs and spaces should be quoted. Note that line-ending tabs and spaces are always encoded, as per RFC 1521. The 'header' flag indicates whether we are encoding spaces as _ as per RFC 1522. """ if b2a_qp is not None: data = input.read() odata = b2a_qp(data, quotetabs=quotetabs, header=header) output.write(odata) return def write(s, output=output, lineEnd=b'\n'): # RFC 1521 requires that the line ending in a space or tab must have # that trailing character encoded. if s and s[-1:] in b' \t': output.write(s[:-1] + quote(s[-1:]) + lineEnd) elif s == b'.': output.write(quote(s) + lineEnd) else: output.write(s + lineEnd) prevline = None while 1: line = input.readline() if not line: break outline = [] # Strip off any readline induced trailing newline stripped = b'' if line[-1:] == b'\n': line = line[:-1] stripped = b'\n' # Calculate the un-length-limited encoded line for c in line: c = bytes((c,)) if needsquoting(c, quotetabs, header): c = quote(c) if header and c == b' ': outline.append(b'_') else: outline.append(c) # First, write out the previous line if prevline is not None: write(prevline) # Now see if we need any soft line breaks because of RFC-imposed # length limitations. Then do the thisline->prevline dance. thisline = EMPTYSTRING.join(outline) while len(thisline) > MAXLINESIZE: # Don't forget to include the soft line break `=' sign in the # length calculation! write(thisline[:MAXLINESIZE-1], lineEnd=b'=\n') thisline = thisline[MAXLINESIZE-1:] # Write out the current line prevline = thisline # Write out the last line, without a trailing newline if prevline is not None: write(prevline, lineEnd=stripped) def encodestring(s, quotetabs=False, header=False): if b2a_qp is not None: return b2a_qp(s, quotetabs=quotetabs, header=header) from io import BytesIO infp = BytesIO(s) outfp = BytesIO() encode(infp, outfp, quotetabs, header) return outfp.getvalue() def decode(input, output, header=False): """Read 'input', apply quoted-printable decoding, and write to 'output'. 'input' and 'output' are files with readline() and write() methods. If 'header' is true, decode underscore as space (per RFC 1522).""" if a2b_qp is not None: data = input.read() odata = a2b_qp(data, header=header) output.write(odata) return new = b'' while 1: line = input.readline() if not line: break i, n = 0, len(line) if n > 0 and line[n-1:n] == b'\n': partial = 0; n = n-1 # Strip trailing whitespace while n > 0 and line[n-1:n] in b" \t\r": n = n-1 else: partial = 1 while i < n: c = line[i:i+1] if c == b'_' and header: new = new + b' '; i = i+1 elif c != ESCAPE: new = new + c; i = i+1 elif i+1 == n and not partial: partial = 1; break elif i+1 < n and line[i+1] == ESCAPE: new = new + ESCAPE; i = i+2 elif i+2 < n and ishex(line[i+1:i+2]) and ishex(line[i+2:i+3]): new = new + bytes((unhex(line[i+1:i+3]),)); i = i+3 else: # Bad escape sequence -- leave it in new = new + c; i = i+1 if not partial: output.write(new + b'\n') new = b'' if new: output.write(new) def decodestring(s, header=False): if a2b_qp is not None: return a2b_qp(s, header=header) from io import BytesIO infp = BytesIO(s) outfp = BytesIO() decode(infp, outfp, header=header) return outfp.getvalue() # Other helper functions def ishex(c): """Return true if the byte ordinal 'c' is a hexadecimal digit in ASCII.""" assert isinstance(c, bytes) return b'0' <= c <= b'9' or b'a' <= c <= b'f' or b'A' <= c <= b'F' def unhex(s): """Get the integer value of a hexadecimal number.""" bits = 0 for c in s: c = bytes((c,)) if b'0' <= c <= b'9': i = ord('0') elif b'a' <= c <= b'f': i = ord('a')-10 elif b'A' <= c <= b'F': i = ord(b'A')-10 else: assert False, "non-hex digit "+repr(c) bits = bits*16 + (ord(c) - i) return bits def main(): import sys import getopt try: opts, args = getopt.getopt(sys.argv[1:], 'td') except getopt.error as msg: sys.stdout = sys.stderr print(msg) print("usage: quopri [-t | -d] [file] ...") print("-t: quote tabs") print("-d: decode; default encode") sys.exit(2) deco = 0 tabs = 0 for o, a in opts: if o == '-t': tabs = 1 if o == '-d': deco = 1 if tabs and deco: sys.stdout = sys.stderr print("-t and -d are mutually exclusive") sys.exit(2) if not args: args = ['-'] sts = 0 for file in args: if file == '-': fp = sys.stdin.buffer else: try: fp = open(file, "rb") except IOError as msg: sys.stderr.write("%s: can't open (%s)\n" % (file, msg)) sts = 1 continue try: if deco: decode(fp, sys.stdout.buffer) else: encode(fp, sys.stdout.buffer, tabs) finally: if file != '-': fp.close() if sts: sys.exit(sts) if __name__ == '__main__': main()
apache-2.0
xianggong/m2c_unit_test
test/operator/post_decrement_long2/compile.py
1861
4430
#!/usr/bin/python import os import subprocess import re def runCommand(command): p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) p.wait() return iter(p.stdout.readline, b'') def dumpRunCommand(command, dump_file_name, postfix): dumpFile = open(dump_file_name + postfix, "w+") dumpFile.write(command + "\n") for line in runCommand(command.split()): dumpFile.write(line) def rmFile(file_name): cmd = "rm -rf " + file_name runCommand(cmd.split()) def rnm_ir(file_name): # Append all unnamed variable with prefix 'tmp_' ir_file_name = file_name + ".ll" if os.path.isfile(ir_file_name): fo = open(ir_file_name, "rw+") lines = fo.readlines() fo.seek(0) fo.truncate() for line in lines: # Add entry block identifier if "define" in line: line += "entry:\n" # Rename all unnamed variables line = re.sub('\%([0-9]+)', r'%tmp_\1', line.rstrip()) # Also rename branch name line = re.sub('(\;\ \<label\>\:)([0-9]+)', r'tmp_\2:', line.rstrip()) fo.write(line + '\n') def gen_ir(file_name): # Directories root_dir = '../../../' header_dir = root_dir + "inc/" # Headers header = " -I " + header_dir header += " -include " + header_dir + "m2c_buildin_fix.h " header += " -include " + header_dir + "clc/clc.h " header += " -D cl_clang_storage_class_specifiers " gen_ir = "clang -S -emit-llvm -O0 -target r600-- -mcpu=verde " cmd_gen_ir = gen_ir + header + file_name + ".cl" dumpRunCommand(cmd_gen_ir, file_name, ".clang.log") def asm_ir(file_name): if os.path.isfile(file_name + ".ll"): # Command to assemble IR to bitcode gen_bc = "llvm-as " gen_bc_src = file_name + ".ll" gen_bc_dst = file_name + ".bc" cmd_gen_bc = gen_bc + gen_bc_src + " -o " + gen_bc_dst runCommand(cmd_gen_bc.split()) def opt_bc(file_name): if os.path.isfile(file_name + ".bc"): # Command to optmize bitcode opt_bc = "opt --mem2reg " opt_ir_src = file_name + ".bc" opt_ir_dst = file_name + ".opt.bc" cmd_opt_bc = opt_bc + opt_ir_src + " -o " + opt_ir_dst runCommand(cmd_opt_bc.split()) def dis_bc(file_name): if os.path.isfile(file_name + ".bc"): # Command to disassemble bitcode dis_bc = "llvm-dis " dis_ir_src = file_name + ".opt.bc" dis_ir_dst = file_name + ".opt.ll" cmd_dis_bc = dis_bc + dis_ir_src + " -o " + dis_ir_dst runCommand(cmd_dis_bc.split()) def m2c_gen(file_name): if os.path.isfile(file_name + ".opt.bc"): # Command to disassemble bitcode m2c_gen = "m2c --llvm2si " m2c_gen_src = file_name + ".opt.bc" cmd_m2c_gen = m2c_gen + m2c_gen_src dumpRunCommand(cmd_m2c_gen, file_name, ".m2c.llvm2si.log") # Remove file if size is 0 if os.path.isfile(file_name + ".opt.s"): if os.path.getsize(file_name + ".opt.s") == 0: rmFile(file_name + ".opt.s") def m2c_bin(file_name): if os.path.isfile(file_name + ".opt.s"): # Command to disassemble bitcode m2c_bin = "m2c --si2bin " m2c_bin_src = file_name + ".opt.s" cmd_m2c_bin = m2c_bin + m2c_bin_src dumpRunCommand(cmd_m2c_bin, file_name, ".m2c.si2bin.log") def main(): # Commands for file in os.listdir("./"): if file.endswith(".cl"): file_name = os.path.splitext(file)[0] # Execute commands gen_ir(file_name) rnm_ir(file_name) asm_ir(file_name) opt_bc(file_name) dis_bc(file_name) m2c_gen(file_name) m2c_bin(file_name) if __name__ == "__main__": main()
gpl-2.0
hermanlee/mysql-5.6
xtrabackup/test/kewpie/lib/util/xtrabackup_methods.py
26
4388
#! /usr/bin/env python # -*- mode: python; indent-tabs-mode: nil; -*- # vim:expandtab:shiftwidth=2:tabstop=2:smarttab: # # Copyright (C) 2011 Patrick Crews # # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import os import subprocess def execute_cmd(cmd, exec_path, outfile_path): outfile = open(outfile_path,'w') cmd_subproc = subprocess.Popen( cmd , cwd = exec_path , shell=True , stdout = outfile , stderr = subprocess.STDOUT ) cmd_subproc.wait() retcode = cmd_subproc.returncode outfile.close in_file = open(outfile_path,'r') output = ''.join(in_file.readlines()) return retcode,output def innobackupex_backup( innobackupex_path , xtrabackup_path , output_path , server , backup_path , extra_opts=None): """ Use the innobackupex binary specified at system_manager.innobackupex_path to take a backup of the given server """ cmd = "%s --defaults-file=%s --user=root --port=%d --host=127.0.0.1 --ibbackup=%s %s" %( innobackupex_path , server.cnf_file , server.master_port , xtrabackup_path , backup_path) if extra_opts: cmd = ' '.join([cmd, extra_opts]) exec_path = os.path.dirname(innobackupex_path) retcode, output = execute_cmd(cmd, exec_path, output_path) return retcode, output def innobackupex_prepare( innobackupex_path , xtrabackup_path , output_path , backup_path , use_mem='500M' , extra_opts=None): """ Use innobackupex to prepare an xtrabackup backup file """ cmd = "%s --apply-log --use-memory=%s --ibbackup=%s %s" %( innobackupex_path , use_mem , xtrabackup_path , backup_path) if extra_opts: cmd = ' '.join([cmd, extra_opts]) exec_path = os.path.dirname(innobackupex_path) retcode, output = execute_cmd(cmd, exec_path, output_path) return retcode, output def innobackupex_restore( innobackupex_path , xtrabackup_path , output_path , backup_path , cnf_file , use_mem='500M' , extra_opts=None): """ Use innobackupex to restore a server from a prepared xtrabackup backup """ cmd = "%s --defaults-file=%s --copy-back --ibbackup=%s %s" %( innobackupex_path , cnf_file , xtrabackup_path , backup_path ) if extra_opts: cmd = ' '.join([cmd, extra_opts]) exec_path = os.path.dirname(innobackupex_path) retcode, output = execute_cmd(cmd, exec_path, output_path) return retcode, output
gpl-2.0
ghanashyamprabhu/linux
scripts/gdb/linux/tasks.py
367
2552
# # gdb helper commands and functions for Linux kernel debugging # # task & thread tools # # Copyright (c) Siemens AG, 2011-2013 # # Authors: # Jan Kiszka <jan.kiszka@siemens.com> # # This work is licensed under the terms of the GNU GPL version 2. # import gdb from linux import utils task_type = utils.CachedType("struct task_struct") def task_lists(): global task_type task_ptr_type = task_type.get_type().pointer() init_task = gdb.parse_and_eval("init_task").address t = g = init_task while True: while True: yield t t = utils.container_of(t['thread_group']['next'], task_ptr_type, "thread_group") if t == g: break t = g = utils.container_of(g['tasks']['next'], task_ptr_type, "tasks") if t == init_task: return def get_task_by_pid(pid): for task in task_lists(): if int(task['pid']) == pid: return task return None class LxTaskByPidFunc(gdb.Function): """Find Linux task by PID and return the task_struct variable. $lx_task_by_pid(PID): Given PID, iterate over all tasks of the target and return that task_struct variable which PID matches.""" def __init__(self): super(LxTaskByPidFunc, self).__init__("lx_task_by_pid") def invoke(self, pid): task = get_task_by_pid(pid) if task: return task.dereference() else: raise gdb.GdbError("No task of PID " + str(pid)) LxTaskByPidFunc() thread_info_type = utils.CachedType("struct thread_info") ia64_task_size = None def get_thread_info(task): global thread_info_type thread_info_ptr_type = thread_info_type.get_type().pointer() if utils.is_target_arch("ia64"): global ia64_task_size if ia64_task_size is None: ia64_task_size = gdb.parse_and_eval("sizeof(struct task_struct)") thread_info_addr = task.address + ia64_task_size thread_info = thread_info_addr.cast(thread_info_ptr_type) else: thread_info = task['stack'].cast(thread_info_ptr_type) return thread_info.dereference() class LxThreadInfoFunc (gdb.Function): """Calculate Linux thread_info from task variable. $lx_thread_info(TASK): Given TASK, return the corresponding thread_info variable.""" def __init__(self): super(LxThreadInfoFunc, self).__init__("lx_thread_info") def invoke(self, task): return get_thread_info(task) LxThreadInfoFunc()
gpl-2.0
android-ia/platform_external_chromium_org
third_party/markupsafe/__init__.py
371
8205
# -*- coding: utf-8 -*- """ markupsafe ~~~~~~~~~~ Implements a Markup string. :copyright: (c) 2010 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ import re from markupsafe._compat import text_type, string_types, int_types, \ unichr, PY2 __all__ = ['Markup', 'soft_unicode', 'escape', 'escape_silent'] _striptags_re = re.compile(r'(<!--.*?-->|<[^>]*>)') _entity_re = re.compile(r'&([^;]+);') class Markup(text_type): r"""Marks a string as being safe for inclusion in HTML/XML output without needing to be escaped. This implements the `__html__` interface a couple of frameworks and web applications use. :class:`Markup` is a direct subclass of `unicode` and provides all the methods of `unicode` just that it escapes arguments passed and always returns `Markup`. The `escape` function returns markup objects so that double escaping can't happen. The constructor of the :class:`Markup` class can be used for three different things: When passed an unicode object it's assumed to be safe, when passed an object with an HTML representation (has an `__html__` method) that representation is used, otherwise the object passed is converted into a unicode string and then assumed to be safe: >>> Markup("Hello <em>World</em>!") Markup(u'Hello <em>World</em>!') >>> class Foo(object): ... def __html__(self): ... return '<a href="#">foo</a>' ... >>> Markup(Foo()) Markup(u'<a href="#">foo</a>') If you want object passed being always treated as unsafe you can use the :meth:`escape` classmethod to create a :class:`Markup` object: >>> Markup.escape("Hello <em>World</em>!") Markup(u'Hello &lt;em&gt;World&lt;/em&gt;!') Operations on a markup string are markup aware which means that all arguments are passed through the :func:`escape` function: >>> em = Markup("<em>%s</em>") >>> em % "foo & bar" Markup(u'<em>foo &amp; bar</em>') >>> strong = Markup("<strong>%(text)s</strong>") >>> strong % {'text': '<blink>hacker here</blink>'} Markup(u'<strong>&lt;blink&gt;hacker here&lt;/blink&gt;</strong>') >>> Markup("<em>Hello</em> ") + "<foo>" Markup(u'<em>Hello</em> &lt;foo&gt;') """ __slots__ = () def __new__(cls, base=u'', encoding=None, errors='strict'): if hasattr(base, '__html__'): base = base.__html__() if encoding is None: return text_type.__new__(cls, base) return text_type.__new__(cls, base, encoding, errors) def __html__(self): return self def __add__(self, other): if isinstance(other, string_types) or hasattr(other, '__html__'): return self.__class__(super(Markup, self).__add__(self.escape(other))) return NotImplemented def __radd__(self, other): if hasattr(other, '__html__') or isinstance(other, string_types): return self.escape(other).__add__(self) return NotImplemented def __mul__(self, num): if isinstance(num, int_types): return self.__class__(text_type.__mul__(self, num)) return NotImplemented __rmul__ = __mul__ def __mod__(self, arg): if isinstance(arg, tuple): arg = tuple(_MarkupEscapeHelper(x, self.escape) for x in arg) else: arg = _MarkupEscapeHelper(arg, self.escape) return self.__class__(text_type.__mod__(self, arg)) def __repr__(self): return '%s(%s)' % ( self.__class__.__name__, text_type.__repr__(self) ) def join(self, seq): return self.__class__(text_type.join(self, map(self.escape, seq))) join.__doc__ = text_type.join.__doc__ def split(self, *args, **kwargs): return list(map(self.__class__, text_type.split(self, *args, **kwargs))) split.__doc__ = text_type.split.__doc__ def rsplit(self, *args, **kwargs): return list(map(self.__class__, text_type.rsplit(self, *args, **kwargs))) rsplit.__doc__ = text_type.rsplit.__doc__ def splitlines(self, *args, **kwargs): return list(map(self.__class__, text_type.splitlines(self, *args, **kwargs))) splitlines.__doc__ = text_type.splitlines.__doc__ def unescape(self): r"""Unescape markup again into an text_type string. This also resolves known HTML4 and XHTML entities: >>> Markup("Main &raquo; <em>About</em>").unescape() u'Main \xbb <em>About</em>' """ from markupsafe._constants import HTML_ENTITIES def handle_match(m): name = m.group(1) if name in HTML_ENTITIES: return unichr(HTML_ENTITIES[name]) try: if name[:2] in ('#x', '#X'): return unichr(int(name[2:], 16)) elif name.startswith('#'): return unichr(int(name[1:])) except ValueError: pass return u'' return _entity_re.sub(handle_match, text_type(self)) def striptags(self): r"""Unescape markup into an text_type string and strip all tags. This also resolves known HTML4 and XHTML entities. Whitespace is normalized to one: >>> Markup("Main &raquo; <em>About</em>").striptags() u'Main \xbb About' """ stripped = u' '.join(_striptags_re.sub('', self).split()) return Markup(stripped).unescape() @classmethod def escape(cls, s): """Escape the string. Works like :func:`escape` with the difference that for subclasses of :class:`Markup` this function would return the correct subclass. """ rv = escape(s) if rv.__class__ is not cls: return cls(rv) return rv def make_wrapper(name): orig = getattr(text_type, name) def func(self, *args, **kwargs): args = _escape_argspec(list(args), enumerate(args), self.escape) #_escape_argspec(kwargs, kwargs.iteritems(), None) return self.__class__(orig(self, *args, **kwargs)) func.__name__ = orig.__name__ func.__doc__ = orig.__doc__ return func for method in '__getitem__', 'capitalize', \ 'title', 'lower', 'upper', 'replace', 'ljust', \ 'rjust', 'lstrip', 'rstrip', 'center', 'strip', \ 'translate', 'expandtabs', 'swapcase', 'zfill': locals()[method] = make_wrapper(method) # new in python 2.5 if hasattr(text_type, 'partition'): def partition(self, sep): return tuple(map(self.__class__, text_type.partition(self, self.escape(sep)))) def rpartition(self, sep): return tuple(map(self.__class__, text_type.rpartition(self, self.escape(sep)))) # new in python 2.6 if hasattr(text_type, 'format'): format = make_wrapper('format') # not in python 3 if hasattr(text_type, '__getslice__'): __getslice__ = make_wrapper('__getslice__') del method, make_wrapper def _escape_argspec(obj, iterable, escape): """Helper for various string-wrapped functions.""" for key, value in iterable: if hasattr(value, '__html__') or isinstance(value, string_types): obj[key] = escape(value) return obj class _MarkupEscapeHelper(object): """Helper for Markup.__mod__""" def __init__(self, obj, escape): self.obj = obj self.escape = escape __getitem__ = lambda s, x: _MarkupEscapeHelper(s.obj[x], s.escape) __unicode__ = __str__ = lambda s: text_type(s.escape(s.obj)) __repr__ = lambda s: str(s.escape(repr(s.obj))) __int__ = lambda s: int(s.obj) __float__ = lambda s: float(s.obj) # we have to import it down here as the speedups and native # modules imports the markup type which is define above. try: from markupsafe._speedups import escape, escape_silent, soft_unicode except ImportError: from markupsafe._native import escape, escape_silent, soft_unicode if not PY2: soft_str = soft_unicode __all__.append('soft_str')
bsd-3-clause
4eek/edx-platform
lms/djangoapps/certificates/api.py
19
15049
"""Certificates API This is a Python API for generating certificates asynchronously. Other Django apps should use the API functions defined in this module rather than importing Django models directly. """ import logging from django.conf import settings from django.core.urlresolvers import reverse from eventtracking import tracker from opaque_keys.edx.keys import CourseKey from openedx.core.djangoapps.content.course_overviews.models import CourseOverview from xmodule.modulestore.django import modulestore from xmodule_django.models import CourseKeyField from util.organizations_helpers import get_course_organizations from certificates.models import ( CertificateStatuses, certificate_status_for_student, CertificateGenerationCourseSetting, CertificateGenerationConfiguration, ExampleCertificateSet, GeneratedCertificate, CertificateTemplate, ) from certificates.queue import XQueueCertInterface log = logging.getLogger("edx.certificate") def get_certificates_for_user(username): """ Retrieve certificate information for a particular user. Arguments: username (unicode): The identifier of the user. Returns: list Example Usage: >>> get_certificates_for_user("bob") [ { "username": "bob", "course_key": "edX/DemoX/Demo_Course", "type": "verified", "status": "downloadable", "download_url": "http://www.example.com/cert.pdf", "grade": "0.98", "created": 2015-07-31T00:00:00Z, "modified": 2015-07-31T00:00:00Z } ] """ return [ { "username": username, "course_key": cert.course_id, "type": cert.mode, "status": cert.status, "grade": cert.grade, "created": cert.created_date, "modified": cert.modified_date, # NOTE: the download URL is not currently being set for webview certificates. # In the future, we can update this to construct a URL to the webview certificate # for courses that have this feature enabled. "download_url": ( cert.download_url if cert.status == CertificateStatuses.downloadable else None ), } for cert in GeneratedCertificate.objects.filter(user__username=username).order_by("course_id") ] def generate_user_certificates(student, course_key, course=None, insecure=False, generation_mode='batch', forced_grade=None): """ It will add the add-cert request into the xqueue. A new record will be created to track the certificate generation task. If an error occurs while adding the certificate to the queue, the task will have status 'error'. It also emits `edx.certificate.created` event for analytics. Args: student (User) course_key (CourseKey) Keyword Arguments: course (Course): Optionally provide the course object; if not provided it will be loaded. insecure - (Boolean) generation_mode - who has requested certificate generation. Its value should `batch` in case of django command and `self` if student initiated the request. forced_grade - a string indicating to replace grade parameter. if present grading will be skipped. """ xqueue = XQueueCertInterface() if insecure: xqueue.use_https = False generate_pdf = not has_html_certificates_enabled(course_key, course) status, cert = xqueue.add_cert(student, course_key, course=course, generate_pdf=generate_pdf, forced_grade=forced_grade) if status in [CertificateStatuses.generating, CertificateStatuses.downloadable]: emit_certificate_event('created', student, course_key, course, { 'user_id': student.id, 'course_id': unicode(course_key), 'certificate_id': cert.verify_uuid, 'enrollment_mode': cert.mode, 'generation_mode': generation_mode }) return status def regenerate_user_certificates(student, course_key, course=None, forced_grade=None, template_file=None, insecure=False): """ It will add the regen-cert request into the xqueue. A new record will be created to track the certificate generation task. If an error occurs while adding the certificate to the queue, the task will have status 'error'. Args: student (User) course_key (CourseKey) Keyword Arguments: course (Course): Optionally provide the course object; if not provided it will be loaded. grade_value - The grade string, such as "Distinction" template_file - The template file used to render this certificate insecure - (Boolean) """ xqueue = XQueueCertInterface() if insecure: xqueue.use_https = False generate_pdf = not has_html_certificates_enabled(course_key, course) return xqueue.regen_cert( student, course_key, course=course, forced_grade=forced_grade, template_file=template_file, generate_pdf=generate_pdf ) def certificate_downloadable_status(student, course_key): """ Check the student existing certificates against a given course. if status is not generating and not downloadable or error then user can view the generate button. Args: student (user object): logged-in user course_key (CourseKey): ID associated with the course Returns: Dict containing student passed status also download url for cert if available """ current_status = certificate_status_for_student(student, course_key) # If the certificate status is an error user should view that status is "generating". # On the back-end, need to monitor those errors and re-submit the task. response_data = { 'is_downloadable': False, 'is_generating': True if current_status['status'] in [CertificateStatuses.generating, CertificateStatuses.error] else False, 'download_url': None } if current_status['status'] == CertificateStatuses.downloadable: response_data['is_downloadable'] = True response_data['download_url'] = current_status['download_url'] return response_data def set_cert_generation_enabled(course_key, is_enabled): """Enable or disable self-generated certificates for a course. There are two "switches" that control whether self-generated certificates are enabled for a course: 1) Whether the self-generated certificates feature is enabled. 2) Whether self-generated certificates have been enabled for this particular course. The second flag should be enabled *only* when someone has successfully generated example certificates for the course. This helps avoid configuration errors (for example, not having a template configured for the course installed on the workers). The UI for the instructor dashboard enforces this constraint. Arguments: course_key (CourseKey): The course identifier. Keyword Arguments: is_enabled (boolean): If provided, enable/disable self-generated certificates for this course. """ CertificateGenerationCourseSetting.set_enabled_for_course(course_key, is_enabled) cert_event_type = 'enabled' if is_enabled else 'disabled' event_name = '.'.join(['edx', 'certificate', 'generation', cert_event_type]) tracker.emit(event_name, { 'course_id': unicode(course_key), }) if is_enabled: log.info(u"Enabled self-generated certificates for course '%s'.", unicode(course_key)) else: log.info(u"Disabled self-generated certificates for course '%s'.", unicode(course_key)) def cert_generation_enabled(course_key): """Check whether certificate generation is enabled for a course. There are two "switches" that control whether self-generated certificates are enabled for a course: 1) Whether the self-generated certificates feature is enabled. 2) Whether self-generated certificates have been enabled for this particular course. Certificates are enabled for a course only when both switches are set to True. Arguments: course_key (CourseKey): The course identifier. Returns: boolean: Whether self-generated certificates are enabled for the course. """ return ( CertificateGenerationConfiguration.current().enabled and CertificateGenerationCourseSetting.is_enabled_for_course(course_key) ) def generate_example_certificates(course_key): """Generate example certificates for a course. Example certificates are used to validate that certificates are configured correctly for the course. Staff members can view the example certificates before enabling the self-generated certificates button for students. Several example certificates may be generated for a course. For example, if a course offers both verified and honor certificates, examples of both types of certificate will be generated. If an error occurs while starting the certificate generation job, the errors will be recorded in the database and can be retrieved using `example_certificate_status()`. Arguments: course_key (CourseKey): The course identifier. Returns: None """ xqueue = XQueueCertInterface() for cert in ExampleCertificateSet.create_example_set(course_key): xqueue.add_example_cert(cert) def has_html_certificates_enabled(course_key, course=None): """ Determine if a course has html certificates enabled. Arguments: course_key (CourseKey|str): A course key or a string representation of one. course (CourseDescriptor|CourseOverview): A course. """ html_certificates_enabled = False try: if not isinstance(course_key, CourseKey): course_key = CourseKey.from_string(course_key) course = course if course else CourseOverview.get_from_id(course_key) if settings.FEATURES.get('CERTIFICATES_HTML_VIEW', False) and course.cert_html_view_enabled: html_certificates_enabled = True except: # pylint: disable=bare-except pass return html_certificates_enabled def example_certificates_status(course_key): """Check the status of example certificates for a course. This will check the *latest* example certificate task. This is generally what we care about in terms of enabling/disabling self-generated certificates for a course. Arguments: course_key (CourseKey): The course identifier. Returns: list Example Usage: >>> from certificates import api as certs_api >>> certs_api.example_certificate_status(course_key) [ { 'description': 'honor', 'status': 'success', 'download_url': 'http://www.example.com/abcd/honor_cert.pdf' }, { 'description': 'verified', 'status': 'error', 'error_reason': 'No template found!' } ] """ return ExampleCertificateSet.latest_status(course_key) def get_certificate_url(user_id, course_id): """ :return certificate url """ url = "" if settings.FEATURES.get('CERTIFICATES_HTML_VIEW', False): url = reverse( 'certificates:html_view', kwargs={ "user_id": str(user_id), "course_id": unicode(course_id), } ) else: try: if isinstance(course_id, basestring): course_id = CourseKey.from_string(course_id) user_certificate = GeneratedCertificate.objects.get( user=user_id, course_id=course_id ) url = user_certificate.download_url except GeneratedCertificate.DoesNotExist: log.critical( 'Unable to lookup certificate\n' 'user id: %d\n' 'course: %s', user_id, unicode(course_id) ) return url def get_active_web_certificate(course, is_preview_mode=None): """ Retrieves the active web certificate configuration for the specified course """ certificates = getattr(course, 'certificates', '{}') configurations = certificates.get('certificates', []) for config in configurations: if config.get('is_active') or is_preview_mode: return config return None def get_certificate_template(course_key, mode): """ Retrieves the custom certificate template based on course_key and mode. """ org_id, template = None, None # fetch organization of the course course_organization = get_course_organizations(course_key) if course_organization: org_id = course_organization[0]['id'] if org_id and mode: template = CertificateTemplate.objects.filter( organization_id=org_id, course_key=course_key, mode=mode, is_active=True ) # if don't template find by org and mode if not template and org_id and mode: template = CertificateTemplate.objects.filter( organization_id=org_id, course_key=CourseKeyField.Empty, mode=mode, is_active=True ) # if don't template find by only org if not template and org_id: template = CertificateTemplate.objects.filter( organization_id=org_id, course_key=CourseKeyField.Empty, mode=None, is_active=True ) # if we still don't template find by only course mode if not template and mode: template = CertificateTemplate.objects.filter( organization_id=None, course_key=CourseKeyField.Empty, mode=mode, is_active=True ) return template[0].template if template else None def emit_certificate_event(event_name, user, course_id, course=None, event_data=None): """ Emits certificate event. """ event_name = '.'.join(['edx', 'certificate', event_name]) if course is None: course = modulestore().get_course(course_id, depth=0) context = { 'org_id': course.org, 'course_id': unicode(course_id) } data = { 'user_id': user.id, 'course_id': unicode(course_id), 'certificate_url': get_certificate_url(user.id, course_id) } event_data = event_data or {} event_data.update(data) with tracker.get_tracker().context(event_name, context): tracker.emit(event_name, event_data)
agpl-3.0
iaddict/mercurial.rb
vendor/mercurial/mercurial/commandserver.py
93
6720
# commandserver.py - communicate with Mercurial's API over a pipe # # Copyright Matt Mackall <mpm@selenic.com> # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. from i18n import _ import struct import sys, os import dispatch, encoding, util logfile = None def log(*args): if not logfile: return for a in args: logfile.write(str(a)) logfile.flush() class channeledoutput(object): """ Write data from in_ to out in the following format: data length (unsigned int), data """ def __init__(self, in_, out, channel): self.in_ = in_ self.out = out self.channel = channel def write(self, data): if not data: return self.out.write(struct.pack('>cI', self.channel, len(data))) self.out.write(data) self.out.flush() def __getattr__(self, attr): if attr in ('isatty', 'fileno'): raise AttributeError(attr) return getattr(self.in_, attr) class channeledinput(object): """ Read data from in_. Requests for input are written to out in the following format: channel identifier - 'I' for plain input, 'L' line based (1 byte) how many bytes to send at most (unsigned int), The client replies with: data length (unsigned int), 0 meaning EOF data """ maxchunksize = 4 * 1024 def __init__(self, in_, out, channel): self.in_ = in_ self.out = out self.channel = channel def read(self, size=-1): if size < 0: # if we need to consume all the clients input, ask for 4k chunks # so the pipe doesn't fill up risking a deadlock size = self.maxchunksize s = self._read(size, self.channel) buf = s while s: s = self._read(size, self.channel) buf += s return buf else: return self._read(size, self.channel) def _read(self, size, channel): if not size: return '' assert size > 0 # tell the client we need at most size bytes self.out.write(struct.pack('>cI', channel, size)) self.out.flush() length = self.in_.read(4) length = struct.unpack('>I', length)[0] if not length: return '' else: return self.in_.read(length) def readline(self, size=-1): if size < 0: size = self.maxchunksize s = self._read(size, 'L') buf = s # keep asking for more until there's either no more or # we got a full line while s and s[-1] != '\n': s = self._read(size, 'L') buf += s return buf else: return self._read(size, 'L') def __iter__(self): return self def next(self): l = self.readline() if not l: raise StopIteration return l def __getattr__(self, attr): if attr in ('isatty', 'fileno'): raise AttributeError(attr) return getattr(self.in_, attr) class server(object): """ Listens for commands on stdin, runs them and writes the output on a channel based stream to stdout. """ def __init__(self, ui, repo, mode): self.cwd = os.getcwd() logpath = ui.config("cmdserver", "log", None) if logpath: global logfile if logpath == '-': # write log on a special 'd' (debug) channel logfile = channeledoutput(sys.stdout, sys.stdout, 'd') else: logfile = open(logpath, 'a') # the ui here is really the repo ui so take its baseui so we don't end # up with its local configuration self.ui = repo.baseui self.repo = repo self.repoui = repo.ui if mode == 'pipe': self.cerr = channeledoutput(sys.stderr, sys.stdout, 'e') self.cout = channeledoutput(sys.stdout, sys.stdout, 'o') self.cin = channeledinput(sys.stdin, sys.stdout, 'I') self.cresult = channeledoutput(sys.stdout, sys.stdout, 'r') self.client = sys.stdin else: raise util.Abort(_('unknown mode %s') % mode) def _read(self, size): if not size: return '' data = self.client.read(size) # is the other end closed? if not data: raise EOFError return data def runcommand(self): """ reads a list of \0 terminated arguments, executes and writes the return code to the result channel """ length = struct.unpack('>I', self._read(4))[0] if not length: args = [] else: args = self._read(length).split('\0') # copy the uis so changes (e.g. --config or --verbose) don't # persist between requests copiedui = self.ui.copy() self.repo.baseui = copiedui self.repo.ui = self.repo.dirstate._ui = self.repoui.copy() self.repo.invalidate() self.repo.invalidatedirstate() req = dispatch.request(args[:], copiedui, self.repo, self.cin, self.cout, self.cerr) ret = dispatch.dispatch(req) or 0 # might return None # restore old cwd if '--cwd' in args: os.chdir(self.cwd) self.cresult.write(struct.pack('>i', int(ret))) def getencoding(self): """ writes the current encoding to the result channel """ self.cresult.write(encoding.encoding) def serveone(self): cmd = self.client.readline()[:-1] if cmd: handler = self.capabilities.get(cmd) if handler: handler(self) else: # clients are expected to check what commands are supported by # looking at the servers capabilities raise util.Abort(_('unknown command %s') % cmd) return cmd != '' capabilities = {'runcommand' : runcommand, 'getencoding' : getencoding} def serve(self): hellomsg = 'capabilities: ' + ' '.join(sorted(self.capabilities)) hellomsg += '\n' hellomsg += 'encoding: ' + encoding.encoding # write the hello msg in -one- chunk self.cout.write(hellomsg) try: while self.serveone(): pass except EOFError: # we'll get here if the client disconnected while we were reading # its request return 1 return 0
mit
vinnyoodles/algorithms
python/dcp/problem6.py
1
1498
# This problem was asked by Google. # An XOR linked list is a more memory efficient doubly linked list. # Instead of each node holding next and prev fields, it holds a field named both, which is a XOR of the next node and the previous node. # Implement a XOR linked list; it has an add(element) which adds the element to the end, and a get(index) which returns the node at index. # If using a language that has no pointers (such as Python), # assume you have access to get_pointer and dereference_pointer functions that converts between nodes and memory addresses. class ListNode(object): def __init__(self, element): self.element = element self.both = None class XORLinkedList(object): def __init__(self): self.head = None self.size = 0 def add(self, element): if self.head == None: self.head = ListNode(element) self.size += 1 return # Get the tail node first tail = get(size - 1) node = ListNode(element) node.both = get_pointer(node) tail.both = tail.both ^ get_pointer(tail) self.size += 1 def get(self, index): if index < 0 or index >= size return None node = self.head prev_p = 0 for i in range(index): next_p = prev_p ^ node.both next_node = dereference_pointer(next_p) prev_p = next_node.both ^ get_pointer(node) node = next_node return node
mit
mmmavis/lightbeam-bedrock-website
vendor-local/packages/python-memcached/memcache.py
57
49300
#!/usr/bin/env python """ client module for memcached (memory cache daemon) Overview ======== See U{the MemCached homepage<http://www.danga.com/memcached>} for more about memcached. Usage summary ============= This should give you a feel for how this module operates:: import memcache mc = memcache.Client(['127.0.0.1:11211'], debug=0) mc.set("some_key", "Some value") value = mc.get("some_key") mc.set("another_key", 3) mc.delete("another_key") mc.set("key", "1") # note that the key used for incr/decr must be a string. mc.incr("key") mc.decr("key") The standard way to use memcache with a database is like this:: key = derive_key(obj) obj = mc.get(key) if not obj: obj = backend_api.get(...) mc.set(key, obj) # we now have obj, and future passes through this code # will use the object from the cache. Detailed Documentation ====================== More detailed documentation is available in the L{Client} class. """ import sys import socket import time import os import re try: import cPickle as pickle except ImportError: import pickle from binascii import crc32 # zlib version is not cross-platform def cmemcache_hash(key): return((((crc32(key) & 0xffffffff) >> 16) & 0x7fff) or 1) serverHashFunction = cmemcache_hash def useOldServerHashFunction(): """Use the old python-memcache server hash function.""" global serverHashFunction serverHashFunction = crc32 try: from zlib import compress, decompress _supports_compress = True except ImportError: _supports_compress = False # quickly define a decompress just in case we recv compressed data. def decompress(val): raise _Error("received compressed data but I don't support compression (import error)") try: from cStringIO import StringIO except ImportError: from StringIO import StringIO # Original author: Evan Martin of Danga Interactive __author__ = "Sean Reifschneider <jafo-memcached@tummy.com>" __version__ = "1.48" __copyright__ = "Copyright (C) 2003 Danga Interactive" # http://en.wikipedia.org/wiki/Python_Software_Foundation_License __license__ = "Python Software Foundation License" SERVER_MAX_KEY_LENGTH = 250 # Storing values larger than 1MB requires recompiling memcached. If you do, # this value can be changed by doing "memcache.SERVER_MAX_VALUE_LENGTH = N" # after importing this module. SERVER_MAX_VALUE_LENGTH = 1024*1024 class _Error(Exception): pass class _ConnectionDeadError(Exception): pass try: # Only exists in Python 2.4+ from threading import local except ImportError: # TODO: add the pure-python local implementation class local(object): pass _DEAD_RETRY = 30 # number of seconds before retrying a dead server. _SOCKET_TIMEOUT = 3 # number of seconds before sockets timeout. class Client(local): """ Object representing a pool of memcache servers. See L{memcache} for an overview. In all cases where a key is used, the key can be either: 1. A simple hashable type (string, integer, etc.). 2. A tuple of C{(hashvalue, key)}. This is useful if you want to avoid making this module calculate a hash value. You may prefer, for example, to keep all of a given user's objects on the same memcache server, so you could use the user's unique id as the hash value. @group Setup: __init__, set_servers, forget_dead_hosts, disconnect_all, debuglog @group Insertion: set, add, replace, set_multi @group Retrieval: get, get_multi @group Integers: incr, decr @group Removal: delete, delete_multi @sort: __init__, set_servers, forget_dead_hosts, disconnect_all, debuglog,\ set, set_multi, add, replace, get, get_multi, incr, decr, delete, delete_multi """ _FLAG_PICKLE = 1<<0 _FLAG_INTEGER = 1<<1 _FLAG_LONG = 1<<2 _FLAG_COMPRESSED = 1<<3 _SERVER_RETRIES = 10 # how many times to try finding a free server. # exceptions for Client class MemcachedKeyError(Exception): pass class MemcachedKeyLengthError(MemcachedKeyError): pass class MemcachedKeyCharacterError(MemcachedKeyError): pass class MemcachedKeyNoneError(MemcachedKeyError): pass class MemcachedKeyTypeError(MemcachedKeyError): pass class MemcachedStringEncodingError(Exception): pass def __init__(self, servers, debug=0, pickleProtocol=0, pickler=pickle.Pickler, unpickler=pickle.Unpickler, pload=None, pid=None, server_max_key_length=SERVER_MAX_KEY_LENGTH, server_max_value_length=SERVER_MAX_VALUE_LENGTH, dead_retry=_DEAD_RETRY, socket_timeout=_SOCKET_TIMEOUT, cache_cas = False): """ Create a new Client object with the given list of servers. @param servers: C{servers} is passed to L{set_servers}. @param debug: whether to display error messages when a server can't be contacted. @param pickleProtocol: number to mandate protocol used by (c)Pickle. @param pickler: optional override of default Pickler to allow subclassing. @param unpickler: optional override of default Unpickler to allow subclassing. @param pload: optional persistent_load function to call on pickle loading. Useful for cPickle since subclassing isn't allowed. @param pid: optional persistent_id function to call on pickle storing. Useful for cPickle since subclassing isn't allowed. @param dead_retry: number of seconds before retrying a blacklisted server. Default to 30 s. @param socket_timeout: timeout in seconds for all calls to a server. Defaults to 3 seconds. @param cache_cas: (default False) If true, cas operations will be cached. WARNING: This cache is not expired internally, if you have a long-running process you will need to expire it manually via "client.reset_cas(), or the cache can grow unlimited. @param server_max_key_length: (default SERVER_MAX_KEY_LENGTH) Data that is larger than this will not be sent to the server. @param server_max_value_length: (default SERVER_MAX_VALUE_LENGTH) Data that is larger than this will not be sent to the server. """ local.__init__(self) self.debug = debug self.dead_retry = dead_retry self.socket_timeout = socket_timeout self.set_servers(servers) self.stats = {} self.cache_cas = cache_cas self.reset_cas() # Allow users to modify pickling/unpickling behavior self.pickleProtocol = pickleProtocol self.pickler = pickler self.unpickler = unpickler self.persistent_load = pload self.persistent_id = pid self.server_max_key_length = server_max_key_length self.server_max_value_length = server_max_value_length # figure out the pickler style file = StringIO() try: pickler = self.pickler(file, protocol = self.pickleProtocol) self.picklerIsKeyword = True except TypeError: self.picklerIsKeyword = False def reset_cas(self): """ Reset the cas cache. This is only used if the Client() object was created with "cache_cas=True". If used, this cache does not expire internally, so it can grow unbounded if you do not clear it yourself. """ self.cas_ids = {} def set_servers(self, servers): """ Set the pool of servers used by this client. @param servers: an array of servers. Servers can be passed in two forms: 1. Strings of the form C{"host:port"}, which implies a default weight of 1. 2. Tuples of the form C{("host:port", weight)}, where C{weight} is an integer weight value. """ self.servers = [_Host(s, self.debug, dead_retry=self.dead_retry, socket_timeout=self.socket_timeout) for s in servers] self._init_buckets() def get_stats(self, stat_args = None): '''Get statistics from each of the servers. @param stat_args: Additional arguments to pass to the memcache "stats" command. @return: A list of tuples ( server_identifier, stats_dictionary ). The dictionary contains a number of name/value pairs specifying the name of the status field and the string value associated with it. The values are not converted from strings. ''' data = [] for s in self.servers: if not s.connect(): continue if s.family == socket.AF_INET: name = '%s:%s (%s)' % ( s.ip, s.port, s.weight ) else: name = 'unix:%s (%s)' % ( s.address, s.weight ) if not stat_args: s.send_cmd('stats') else: s.send_cmd('stats ' + stat_args) serverData = {} data.append(( name, serverData )) readline = s.readline while 1: line = readline() if not line or line.strip() == 'END': break stats = line.split(' ', 2) serverData[stats[1]] = stats[2] return(data) def get_slabs(self): data = [] for s in self.servers: if not s.connect(): continue if s.family == socket.AF_INET: name = '%s:%s (%s)' % ( s.ip, s.port, s.weight ) else: name = 'unix:%s (%s)' % ( s.address, s.weight ) serverData = {} data.append(( name, serverData )) s.send_cmd('stats items') readline = s.readline while 1: line = readline() if not line or line.strip() == 'END': break item = line.split(' ', 2) #0 = STAT, 1 = ITEM, 2 = Value slab = item[1].split(':', 2) #0 = items, 1 = Slab #, 2 = Name if slab[1] not in serverData: serverData[slab[1]] = {} serverData[slab[1]][slab[2]] = item[2] return data def flush_all(self): 'Expire all data currently in the memcache servers.' for s in self.servers: if not s.connect(): continue s.send_cmd('flush_all') s.expect("OK") def debuglog(self, str): if self.debug: sys.stderr.write("MemCached: %s\n" % str) def _statlog(self, func): if func not in self.stats: self.stats[func] = 1 else: self.stats[func] += 1 def forget_dead_hosts(self): """ Reset every host in the pool to an "alive" state. """ for s in self.servers: s.deaduntil = 0 def _init_buckets(self): self.buckets = [] for server in self.servers: for i in range(server.weight): self.buckets.append(server) def _get_server(self, key): if isinstance(key, tuple): serverhash, key = key else: serverhash = serverHashFunction(key) for i in range(Client._SERVER_RETRIES): server = self.buckets[serverhash % len(self.buckets)] if server.connect(): #print "(using server %s)" % server, return server, key serverhash = serverHashFunction(str(serverhash) + str(i)) return None, None def disconnect_all(self): for s in self.servers: s.close_socket() def delete_multi(self, keys, time=0, key_prefix=''): ''' Delete multiple keys in the memcache doing just one query. >>> notset_keys = mc.set_multi({'key1' : 'val1', 'key2' : 'val2'}) >>> mc.get_multi(['key1', 'key2']) == {'key1' : 'val1', 'key2' : 'val2'} 1 >>> mc.delete_multi(['key1', 'key2']) 1 >>> mc.get_multi(['key1', 'key2']) == {} 1 This method is recommended over iterated regular L{delete}s as it reduces total latency, since your app doesn't have to wait for each round-trip of L{delete} before sending the next one. @param keys: An iterable of keys to clear @param time: number of seconds any subsequent set / update commands should fail. Defaults to 0 for no delay. @param key_prefix: Optional string to prepend to each key when sending to memcache. See docs for L{get_multi} and L{set_multi}. @return: 1 if no failure in communication with any memcacheds. @rtype: int ''' self._statlog('delete_multi') server_keys, prefixed_to_orig_key = self._map_and_prefix_keys(keys, key_prefix) # send out all requests on each server before reading anything dead_servers = [] rc = 1 for server in server_keys.iterkeys(): bigcmd = [] write = bigcmd.append if time != None: for key in server_keys[server]: # These are mangled keys write("delete %s %d\r\n" % (key, time)) else: for key in server_keys[server]: # These are mangled keys write("delete %s\r\n" % key) try: server.send_cmds(''.join(bigcmd)) except socket.error, msg: rc = 0 if isinstance(msg, tuple): msg = msg[1] server.mark_dead(msg) dead_servers.append(server) # if any servers died on the way, don't expect them to respond. for server in dead_servers: del server_keys[server] for server, keys in server_keys.iteritems(): try: for key in keys: server.expect("DELETED") except socket.error, msg: if isinstance(msg, tuple): msg = msg[1] server.mark_dead(msg) rc = 0 return rc def delete(self, key, time=0): '''Deletes a key from the memcache. @return: Nonzero on success. @param time: number of seconds any subsequent set / update commands should fail. Defaults to None for no delay. @rtype: int ''' self.check_key(key) server, key = self._get_server(key) if not server: return 0 self._statlog('delete') if time != None and time != 0: cmd = "delete %s %d" % (key, time) else: cmd = "delete %s" % key try: server.send_cmd(cmd) line = server.readline() if line and line.strip() in ['DELETED', 'NOT_FOUND']: return 1 self.debuglog('Delete expected DELETED or NOT_FOUND, got: %s' % repr(line)) except socket.error, msg: if isinstance(msg, tuple): msg = msg[1] server.mark_dead(msg) return 0 def incr(self, key, delta=1): """ Sends a command to the server to atomically increment the value for C{key} by C{delta}, or by 1 if C{delta} is unspecified. Returns None if C{key} doesn't exist on server, otherwise it returns the new value after incrementing. Note that the value for C{key} must already exist in the memcache, and it must be the string representation of an integer. >>> mc.set("counter", "20") # returns 1, indicating success 1 >>> mc.incr("counter") 21 >>> mc.incr("counter") 22 Overflow on server is not checked. Be aware of values approaching 2**32. See L{decr}. @param delta: Integer amount to increment by (should be zero or greater). @return: New value after incrementing. @rtype: int """ return self._incrdecr("incr", key, delta) def decr(self, key, delta=1): """ Like L{incr}, but decrements. Unlike L{incr}, underflow is checked and new values are capped at 0. If server value is 1, a decrement of 2 returns 0, not -1. @param delta: Integer amount to decrement by (should be zero or greater). @return: New value after decrementing. @rtype: int """ return self._incrdecr("decr", key, delta) def _incrdecr(self, cmd, key, delta): self.check_key(key) server, key = self._get_server(key) if not server: return 0 self._statlog(cmd) cmd = "%s %s %d" % (cmd, key, delta) try: server.send_cmd(cmd) line = server.readline() if line == None or line.strip() =='NOT_FOUND': return None return int(line) except socket.error, msg: if isinstance(msg, tuple): msg = msg[1] server.mark_dead(msg) return None def add(self, key, val, time = 0, min_compress_len = 0): ''' Add new key with value. Like L{set}, but only stores in memcache if the key doesn't already exist. @return: Nonzero on success. @rtype: int ''' return self._set("add", key, val, time, min_compress_len) def append(self, key, val, time=0, min_compress_len=0): '''Append the value to the end of the existing key's value. Only stores in memcache if key already exists. Also see L{prepend}. @return: Nonzero on success. @rtype: int ''' return self._set("append", key, val, time, min_compress_len) def prepend(self, key, val, time=0, min_compress_len=0): '''Prepend the value to the beginning of the existing key's value. Only stores in memcache if key already exists. Also see L{append}. @return: Nonzero on success. @rtype: int ''' return self._set("prepend", key, val, time, min_compress_len) def replace(self, key, val, time=0, min_compress_len=0): '''Replace existing key with value. Like L{set}, but only stores in memcache if the key already exists. The opposite of L{add}. @return: Nonzero on success. @rtype: int ''' return self._set("replace", key, val, time, min_compress_len) def set(self, key, val, time=0, min_compress_len=0): '''Unconditionally sets a key to a given value in the memcache. The C{key} can optionally be an tuple, with the first element being the server hash value and the second being the key. If you want to avoid making this module calculate a hash value. You may prefer, for example, to keep all of a given user's objects on the same memcache server, so you could use the user's unique id as the hash value. @return: Nonzero on success. @rtype: int @param time: Tells memcached the time which this value should expire, either as a delta number of seconds, or an absolute unix time-since-the-epoch value. See the memcached protocol docs section "Storage Commands" for more info on <exptime>. We default to 0 == cache forever. @param min_compress_len: The threshold length to kick in auto-compression of the value using the zlib.compress() routine. If the value being cached is a string, then the length of the string is measured, else if the value is an object, then the length of the pickle result is measured. If the resulting attempt at compression yeilds a larger string than the input, then it is discarded. For backwards compatability, this parameter defaults to 0, indicating don't ever try to compress. ''' return self._set("set", key, val, time, min_compress_len) def cas(self, key, val, time=0, min_compress_len=0): '''Sets a key to a given value in the memcache if it hasn't been altered since last fetched. (See L{gets}). The C{key} can optionally be an tuple, with the first element being the server hash value and the second being the key. If you want to avoid making this module calculate a hash value. You may prefer, for example, to keep all of a given user's objects on the same memcache server, so you could use the user's unique id as the hash value. @return: Nonzero on success. @rtype: int @param time: Tells memcached the time which this value should expire, either as a delta number of seconds, or an absolute unix time-since-the-epoch value. See the memcached protocol docs section "Storage Commands" for more info on <exptime>. We default to 0 == cache forever. @param min_compress_len: The threshold length to kick in auto-compression of the value using the zlib.compress() routine. If the value being cached is a string, then the length of the string is measured, else if the value is an object, then the length of the pickle result is measured. If the resulting attempt at compression yeilds a larger string than the input, then it is discarded. For backwards compatability, this parameter defaults to 0, indicating don't ever try to compress. ''' return self._set("cas", key, val, time, min_compress_len) def _map_and_prefix_keys(self, key_iterable, key_prefix): """Compute the mapping of server (_Host instance) -> list of keys to stuff onto that server, as well as the mapping of prefixed key -> original key. """ # Check it just once ... key_extra_len=len(key_prefix) if key_prefix: self.check_key(key_prefix) # server (_Host) -> list of unprefixed server keys in mapping server_keys = {} prefixed_to_orig_key = {} # build up a list for each server of all the keys we want. for orig_key in key_iterable: if isinstance(orig_key, tuple): # Tuple of hashvalue, key ala _get_server(). Caller is essentially telling us what server to stuff this on. # Ensure call to _get_server gets a Tuple as well. str_orig_key = str(orig_key[1]) server, key = self._get_server((orig_key[0], key_prefix + str_orig_key)) # Gotta pre-mangle key before hashing to a server. Returns the mangled key. else: str_orig_key = str(orig_key) # set_multi supports int / long keys. server, key = self._get_server(key_prefix + str_orig_key) # Now check to make sure key length is proper ... self.check_key(str_orig_key, key_extra_len=key_extra_len) if not server: continue if server not in server_keys: server_keys[server] = [] server_keys[server].append(key) prefixed_to_orig_key[key] = orig_key return (server_keys, prefixed_to_orig_key) def set_multi(self, mapping, time=0, key_prefix='', min_compress_len=0): ''' Sets multiple keys in the memcache doing just one query. >>> notset_keys = mc.set_multi({'key1' : 'val1', 'key2' : 'val2'}) >>> mc.get_multi(['key1', 'key2']) == {'key1' : 'val1', 'key2' : 'val2'} 1 This method is recommended over regular L{set} as it lowers the number of total packets flying around your network, reducing total latency, since your app doesn't have to wait for each round-trip of L{set} before sending the next one. @param mapping: A dict of key/value pairs to set. @param time: Tells memcached the time which this value should expire, either as a delta number of seconds, or an absolute unix time-since-the-epoch value. See the memcached protocol docs section "Storage Commands" for more info on <exptime>. We default to 0 == cache forever. @param key_prefix: Optional string to prepend to each key when sending to memcache. Allows you to efficiently stuff these keys into a pseudo-namespace in memcache: >>> notset_keys = mc.set_multi({'key1' : 'val1', 'key2' : 'val2'}, key_prefix='subspace_') >>> len(notset_keys) == 0 True >>> mc.get_multi(['subspace_key1', 'subspace_key2']) == {'subspace_key1' : 'val1', 'subspace_key2' : 'val2'} True Causes key 'subspace_key1' and 'subspace_key2' to be set. Useful in conjunction with a higher-level layer which applies namespaces to data in memcache. In this case, the return result would be the list of notset original keys, prefix not applied. @param min_compress_len: The threshold length to kick in auto-compression of the value using the zlib.compress() routine. If the value being cached is a string, then the length of the string is measured, else if the value is an object, then the length of the pickle result is measured. If the resulting attempt at compression yeilds a larger string than the input, then it is discarded. For backwards compatability, this parameter defaults to 0, indicating don't ever try to compress. @return: List of keys which failed to be stored [ memcache out of memory, etc. ]. @rtype: list ''' self._statlog('set_multi') server_keys, prefixed_to_orig_key = self._map_and_prefix_keys(mapping.iterkeys(), key_prefix) # send out all requests on each server before reading anything dead_servers = [] notstored = [] # original keys. for server in server_keys.iterkeys(): bigcmd = [] write = bigcmd.append try: for key in server_keys[server]: # These are mangled keys store_info = self._val_to_store_info( mapping[prefixed_to_orig_key[key]], min_compress_len) if store_info: write("set %s %d %d %d\r\n%s\r\n" % (key, store_info[0], time, store_info[1], store_info[2])) else: notstored.append(prefixed_to_orig_key[key]) server.send_cmds(''.join(bigcmd)) except socket.error, msg: if isinstance(msg, tuple): msg = msg[1] server.mark_dead(msg) dead_servers.append(server) # if any servers died on the way, don't expect them to respond. for server in dead_servers: del server_keys[server] # short-circuit if there are no servers, just return all keys if not server_keys: return(mapping.keys()) for server, keys in server_keys.iteritems(): try: for key in keys: line = server.readline() if line == 'STORED': continue else: notstored.append(prefixed_to_orig_key[key]) #un-mangle. except (_Error, socket.error), msg: if isinstance(msg, tuple): msg = msg[1] server.mark_dead(msg) return notstored def _val_to_store_info(self, val, min_compress_len): """ Transform val to a storable representation, returning a tuple of the flags, the length of the new value, and the new value itself. """ flags = 0 if isinstance(val, str): pass elif isinstance(val, int): flags |= Client._FLAG_INTEGER val = "%d" % val # force no attempt to compress this silly string. min_compress_len = 0 elif isinstance(val, long): flags |= Client._FLAG_LONG val = "%d" % val # force no attempt to compress this silly string. min_compress_len = 0 else: flags |= Client._FLAG_PICKLE file = StringIO() if self.picklerIsKeyword: pickler = self.pickler(file, protocol = self.pickleProtocol) else: pickler = self.pickler(file, self.pickleProtocol) if self.persistent_id: pickler.persistent_id = self.persistent_id pickler.dump(val) val = file.getvalue() lv = len(val) # We should try to compress if min_compress_len > 0 and we could # import zlib and this string is longer than our min threshold. if min_compress_len and _supports_compress and lv > min_compress_len: comp_val = compress(val) # Only retain the result if the compression result is smaller # than the original. if len(comp_val) < lv: flags |= Client._FLAG_COMPRESSED val = comp_val # silently do not store if value length exceeds maximum if self.server_max_value_length != 0 and \ len(val) > self.server_max_value_length: return(0) return (flags, len(val), val) def _set(self, cmd, key, val, time, min_compress_len = 0): self.check_key(key) server, key = self._get_server(key) if not server: return 0 def _unsafe_set(): self._statlog(cmd) store_info = self._val_to_store_info(val, min_compress_len) if not store_info: return(0) if cmd == 'cas': if key not in self.cas_ids: return self._set('set', key, val, time, min_compress_len) fullcmd = "%s %s %d %d %d %d\r\n%s" % ( cmd, key, store_info[0], time, store_info[1], self.cas_ids[key], store_info[2]) else: fullcmd = "%s %s %d %d %d\r\n%s" % ( cmd, key, store_info[0], time, store_info[1], store_info[2]) try: server.send_cmd(fullcmd) return(server.expect("STORED") == "STORED") except socket.error, msg: if isinstance(msg, tuple): msg = msg[1] server.mark_dead(msg) return 0 try: return _unsafe_set() except _ConnectionDeadError: # retry once try: server._get_socket() return _unsafe_set() except (_ConnectionDeadError, socket.error), msg: server.mark_dead(msg) return 0 def _get(self, cmd, key): self.check_key(key) server, key = self._get_server(key) if not server: return None def _unsafe_get(): self._statlog(cmd) try: server.send_cmd("%s %s" % (cmd, key)) rkey = flags = rlen = cas_id = None if cmd == 'gets': rkey, flags, rlen, cas_id, = self._expect_cas_value(server) if rkey and self.cache_cas: self.cas_ids[rkey] = cas_id else: rkey, flags, rlen, = self._expectvalue(server) if not rkey: return None try: value = self._recv_value(server, flags, rlen) finally: server.expect("END") except (_Error, socket.error), msg: if isinstance(msg, tuple): msg = msg[1] server.mark_dead(msg) return None return value try: return _unsafe_get() except _ConnectionDeadError: # retry once try: if server.connect(): return _unsafe_get() return None except (_ConnectionDeadError, socket.error), msg: server.mark_dead(msg) return None def get(self, key): '''Retrieves a key from the memcache. @return: The value or None. ''' return self._get('get', key) def gets(self, key): '''Retrieves a key from the memcache. Used in conjunction with 'cas'. @return: The value or None. ''' return self._get('gets', key) def get_multi(self, keys, key_prefix=''): ''' Retrieves multiple keys from the memcache doing just one query. >>> success = mc.set("foo", "bar") >>> success = mc.set("baz", 42) >>> mc.get_multi(["foo", "baz", "foobar"]) == {"foo": "bar", "baz": 42} 1 >>> mc.set_multi({'k1' : 1, 'k2' : 2}, key_prefix='pfx_') == [] 1 This looks up keys 'pfx_k1', 'pfx_k2', ... . Returned dict will just have unprefixed keys 'k1', 'k2'. >>> mc.get_multi(['k1', 'k2', 'nonexist'], key_prefix='pfx_') == {'k1' : 1, 'k2' : 2} 1 get_mult [ and L{set_multi} ] can take str()-ables like ints / longs as keys too. Such as your db pri key fields. They're rotored through str() before being passed off to memcache, with or without the use of a key_prefix. In this mode, the key_prefix could be a table name, and the key itself a db primary key number. >>> mc.set_multi({42: 'douglass adams', 46 : 'and 2 just ahead of me'}, key_prefix='numkeys_') == [] 1 >>> mc.get_multi([46, 42], key_prefix='numkeys_') == {42: 'douglass adams', 46 : 'and 2 just ahead of me'} 1 This method is recommended over regular L{get} as it lowers the number of total packets flying around your network, reducing total latency, since your app doesn't have to wait for each round-trip of L{get} before sending the next one. See also L{set_multi}. @param keys: An array of keys. @param key_prefix: A string to prefix each key when we communicate with memcache. Facilitates pseudo-namespaces within memcache. Returned dictionary keys will not have this prefix. @return: A dictionary of key/value pairs that were available. If key_prefix was provided, the keys in the retured dictionary will not have it present. ''' self._statlog('get_multi') server_keys, prefixed_to_orig_key = self._map_and_prefix_keys(keys, key_prefix) # send out all requests on each server before reading anything dead_servers = [] for server in server_keys.iterkeys(): try: server.send_cmd("get %s" % " ".join(server_keys[server])) except socket.error, msg: if isinstance(msg, tuple): msg = msg[1] server.mark_dead(msg) dead_servers.append(server) # if any servers died on the way, don't expect them to respond. for server in dead_servers: del server_keys[server] retvals = {} for server in server_keys.iterkeys(): try: line = server.readline() while line and line != 'END': rkey, flags, rlen = self._expectvalue(server, line) # Bo Yang reports that this can sometimes be None if rkey is not None: val = self._recv_value(server, flags, rlen) retvals[prefixed_to_orig_key[rkey]] = val # un-prefix returned key. line = server.readline() except (_Error, socket.error), msg: if isinstance(msg, tuple): msg = msg[1] server.mark_dead(msg) return retvals def _expect_cas_value(self, server, line=None): if not line: line = server.readline() if line and line[:5] == 'VALUE': resp, rkey, flags, len, cas_id = line.split() return (rkey, int(flags), int(len), int(cas_id)) else: return (None, None, None, None) def _expectvalue(self, server, line=None): if not line: line = server.readline() if line and line[:5] == 'VALUE': resp, rkey, flags, len = line.split() flags = int(flags) rlen = int(len) return (rkey, flags, rlen) else: return (None, None, None) def _recv_value(self, server, flags, rlen): rlen += 2 # include \r\n buf = server.recv(rlen) if len(buf) != rlen: raise _Error("received %d bytes when expecting %d" % (len(buf), rlen)) if len(buf) == rlen: buf = buf[:-2] # strip \r\n if flags & Client._FLAG_COMPRESSED: buf = decompress(buf) if flags == 0 or flags == Client._FLAG_COMPRESSED: # Either a bare string or a compressed string now decompressed... val = buf elif flags & Client._FLAG_INTEGER: val = int(buf) elif flags & Client._FLAG_LONG: val = long(buf) elif flags & Client._FLAG_PICKLE: try: file = StringIO(buf) unpickler = self.unpickler(file) if self.persistent_load: unpickler.persistent_load = self.persistent_load val = unpickler.load() except Exception, e: self.debuglog('Pickle error: %s\n' % e) return None else: self.debuglog("unknown flags on get: %x\n" % flags) return val def check_key(self, key, key_extra_len=0): """Checks sanity of key. Fails if: Key length is > SERVER_MAX_KEY_LENGTH (Raises MemcachedKeyLength). Contains control characters (Raises MemcachedKeyCharacterError). Is not a string (Raises MemcachedStringEncodingError) Is an unicode string (Raises MemcachedStringEncodingError) Is not a string (Raises MemcachedKeyError) Is None (Raises MemcachedKeyError) """ if isinstance(key, tuple): key = key[1] if not key: raise Client.MemcachedKeyNoneError("Key is None") if isinstance(key, unicode): raise Client.MemcachedStringEncodingError( "Keys must be str()'s, not unicode. Convert your unicode " "strings using mystring.encode(charset)!") if not isinstance(key, str): raise Client.MemcachedKeyTypeError("Key must be str()'s") if isinstance(key, basestring): if self.server_max_key_length != 0 and \ len(key) + key_extra_len > self.server_max_key_length: raise Client.MemcachedKeyLengthError("Key length is > %s" % self.server_max_key_length) for char in key: if ord(char) < 33 or ord(char) == 127: raise Client.MemcachedKeyCharacterError( "Control characters not allowed") class _Host(object): def __init__(self, host, debug=0, dead_retry=_DEAD_RETRY, socket_timeout=_SOCKET_TIMEOUT): self.dead_retry = dead_retry self.socket_timeout = socket_timeout self.debug = debug if isinstance(host, tuple): host, self.weight = host else: self.weight = 1 # parse the connection string m = re.match(r'^(?P<proto>unix):(?P<path>.*)$', host) if not m: m = re.match(r'^(?P<proto>inet):' r'(?P<host>[^:]+)(:(?P<port>[0-9]+))?$', host) if not m: m = re.match(r'^(?P<host>[^:]+)(:(?P<port>[0-9]+))?$', host) if not m: raise ValueError('Unable to parse connection string: "%s"' % host) hostData = m.groupdict() if hostData.get('proto') == 'unix': self.family = socket.AF_UNIX self.address = hostData['path'] else: self.family = socket.AF_INET self.ip = hostData['host'] self.port = int(hostData.get('port', 11211)) self.address = ( self.ip, self.port ) self.deaduntil = 0 self.socket = None self.buffer = '' def debuglog(self, str): if self.debug: sys.stderr.write("MemCached: %s\n" % str) def _check_dead(self): if self.deaduntil and self.deaduntil > time.time(): return 1 self.deaduntil = 0 return 0 def connect(self): if self._get_socket(): return 1 return 0 def mark_dead(self, reason): self.debuglog("MemCache: %s: %s. Marking dead." % (self, reason)) self.deaduntil = time.time() + self.dead_retry self.close_socket() def _get_socket(self): if self._check_dead(): return None if self.socket: return self.socket s = socket.socket(self.family, socket.SOCK_STREAM) if hasattr(s, 'settimeout'): s.settimeout(self.socket_timeout) try: s.connect(self.address) except socket.timeout, msg: self.mark_dead("connect: %s" % msg) return None except socket.error, msg: if isinstance(msg, tuple): msg = msg[1] self.mark_dead("connect: %s" % msg[1]) return None self.socket = s self.buffer = '' return s def close_socket(self): if self.socket: self.socket.close() self.socket = None def send_cmd(self, cmd): self.socket.sendall(cmd + '\r\n') def send_cmds(self, cmds): """ cmds already has trailing \r\n's applied """ self.socket.sendall(cmds) def readline(self): buf = self.buffer recv = self.socket.recv while True: index = buf.find('\r\n') if index >= 0: break data = recv(4096) if not data: # connection close, let's kill it and raise self.close_socket() raise _ConnectionDeadError() buf += data self.buffer = buf[index+2:] return buf[:index] def expect(self, text): line = self.readline() if line != text: self.debuglog("while expecting '%s', got unexpected response '%s'" % (text, line)) return line def recv(self, rlen): self_socket_recv = self.socket.recv buf = self.buffer while len(buf) < rlen: foo = self_socket_recv(max(rlen - len(buf), 4096)) buf += foo if not foo: raise _Error( 'Read %d bytes, expecting %d, ' 'read returned 0 length bytes' % ( len(buf), rlen )) self.buffer = buf[rlen:] return buf[:rlen] def __str__(self): d = '' if self.deaduntil: d = " (dead until %d)" % self.deaduntil if self.family == socket.AF_INET: return "inet:%s:%d%s" % (self.address[0], self.address[1], d) else: return "unix:%s%s" % (self.address, d) def _doctest(): import doctest, memcache servers = ["127.0.0.1:11211"] mc = Client(servers, debug=1) globs = {"mc": mc} return doctest.testmod(memcache, globs=globs) if __name__ == "__main__": failures = 0 print "Testing docstrings..." _doctest() print "Running tests:" print serverList = [["127.0.0.1:11211"]] if '--do-unix' in sys.argv: serverList.append([os.path.join(os.getcwd(), 'memcached.socket')]) for servers in serverList: mc = Client(servers, debug=1) def to_s(val): if not isinstance(val, basestring): return "%s (%s)" % (val, type(val)) return "%s" % val def test_setget(key, val): global failures print "Testing set/get {'%s': %s} ..." % (to_s(key), to_s(val)), mc.set(key, val) newval = mc.get(key) if newval == val: print "OK" return 1 else: print "FAIL"; failures = failures + 1 return 0 class FooStruct(object): def __init__(self): self.bar = "baz" def __str__(self): return "A FooStruct" def __eq__(self, other): if isinstance(other, FooStruct): return self.bar == other.bar return 0 test_setget("a_string", "some random string") test_setget("an_integer", 42) if test_setget("long", long(1<<30)): print "Testing delete ...", if mc.delete("long"): print "OK" else: print "FAIL"; failures = failures + 1 print "Checking results of delete ..." if mc.get("long") == None: print "OK" else: print "FAIL"; failures = failures + 1 print "Testing get_multi ...", print mc.get_multi(["a_string", "an_integer"]) # removed from the protocol #if test_setget("timed_delete", 'foo'): # print "Testing timed delete ...", # if mc.delete("timed_delete", 1): # print "OK" # else: # print "FAIL"; failures = failures + 1 # print "Checking results of timed delete ..." # if mc.get("timed_delete") == None: # print "OK" # else: # print "FAIL"; failures = failures + 1 print "Testing get(unknown value) ...", print to_s(mc.get("unknown_value")) f = FooStruct() test_setget("foostruct", f) print "Testing incr ...", x = mc.incr("an_integer", 1) if x == 43: print "OK" else: print "FAIL"; failures = failures + 1 print "Testing decr ...", x = mc.decr("an_integer", 1) if x == 42: print "OK" else: print "FAIL"; failures = failures + 1 sys.stdout.flush() # sanity tests print "Testing sending spaces...", sys.stdout.flush() try: x = mc.set("this has spaces", 1) except Client.MemcachedKeyCharacterError, msg: print "OK" else: print "FAIL"; failures = failures + 1 print "Testing sending control characters...", try: x = mc.set("this\x10has\x11control characters\x02", 1) except Client.MemcachedKeyCharacterError, msg: print "OK" else: print "FAIL"; failures = failures + 1 print "Testing using insanely long key...", try: x = mc.set('a'*SERVER_MAX_KEY_LENGTH, 1) except Client.MemcachedKeyLengthError, msg: print "FAIL"; failures = failures + 1 else: print "OK" try: x = mc.set('a'*SERVER_MAX_KEY_LENGTH + 'a', 1) except Client.MemcachedKeyLengthError, msg: print "OK" else: print "FAIL"; failures = failures + 1 print "Testing sending a unicode-string key...", try: x = mc.set(u'keyhere', 1) except Client.MemcachedStringEncodingError, msg: print "OK", else: print "FAIL",; failures = failures + 1 try: x = mc.set((u'a'*SERVER_MAX_KEY_LENGTH).encode('utf-8'), 1) except: print "FAIL",; failures = failures + 1 else: print "OK", import pickle s = pickle.loads('V\\u4f1a\np0\n.') try: x = mc.set((s*SERVER_MAX_KEY_LENGTH).encode('utf-8'), 1) except Client.MemcachedKeyLengthError: print "OK" else: print "FAIL"; failures = failures + 1 print "Testing using a value larger than the memcached value limit...", x = mc.set('keyhere', 'a'*SERVER_MAX_VALUE_LENGTH) if mc.get('keyhere') == None: print "OK", else: print "FAIL",; failures = failures + 1 x = mc.set('keyhere', 'a'*SERVER_MAX_VALUE_LENGTH + 'aaa') if mc.get('keyhere') == None: print "OK" else: print "FAIL"; failures = failures + 1 print "Testing set_multi() with no memcacheds running", mc.disconnect_all() errors = mc.set_multi({'keyhere' : 'a', 'keythere' : 'b'}) if errors != []: print "FAIL"; failures = failures + 1 else: print "OK" print "Testing delete_multi() with no memcacheds running", mc.disconnect_all() ret = mc.delete_multi({'keyhere' : 'a', 'keythere' : 'b'}) if ret != 1: print "FAIL"; failures = failures + 1 else: print "OK" if failures > 0: print '*** THERE WERE FAILED TESTS' sys.exit(1) sys.exit(0) # vim: ts=4 sw=4 et :
mpl-2.0
mohamed--abdel-maksoud/chromium.src
tools/telemetry/telemetry/core/platform/process_statistic_timeline_data_unittest.py
52
1662
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import unittest from telemetry.core.platform import process_statistic_timeline_data class ProcessStatisticTimelineDataTest(unittest.TestCase): def testProcessStatisticValueMath(self): pid1 = 1 pid2 = 2 a = process_statistic_timeline_data.ProcessStatisticTimelineData(pid1, 5) b = process_statistic_timeline_data.ProcessStatisticTimelineData(pid2, 1) c = process_statistic_timeline_data.ProcessStatisticTimelineData(pid1, 1) # Test addition. addition_result = (a + b).value_by_pid self.assertEquals(5, addition_result[pid1]) self.assertEquals(1, addition_result[pid2]) self.assertEquals(2, len(addition_result.keys())) # Test subtraction. subtraction_result = ((a + b) - c).value_by_pid self.assertEquals(4, subtraction_result[pid1]) self.assertEquals(1, subtraction_result[pid2]) self.assertEquals(2, len(subtraction_result.keys())) # Test subtraction with a pid that exists only in rhs. subtraction_results1 = (a - (b + c)).value_by_pid self.assertEquals(4, subtraction_results1[pid1]) self.assertEquals(1, len(subtraction_results1.keys())) # Test calculation of total sum. self.assertEquals(6, (a + b).total_sum()) def testProcessStatisticValueSummary(self): pid1 = 1 pid2 = 2 a = process_statistic_timeline_data.ProcessStatisticTimelineData(pid1, 1) b = process_statistic_timeline_data.ProcessStatisticTimelineData(pid2, 99) c = a + b self.assertEquals(100, c.total_sum())
bsd-3-clause
subdownloader/subdownloader
subdownloader/client/cli/state.py
3
1443
# -*- coding: utf-8 -*- # Copyright (c) 2019 SubDownloader Developers - See COPYING - GPLv3 from subdownloader.client.state import BaseState, SubtitlePathStrategy from subdownloader.client.player import VideoPlayer class CliState(BaseState): def __init__(self): BaseState.__init__(self) self._interactive = False self._console = False self._recursive = False self.set_subtitle_download_path_strategy(SubtitlePathStrategy.SAME) # FIXME: log state def load_settings(self, settings): # BaseState.load_settings(settings) # Do not load settings from file in cli pass def load_options(self, options): BaseState.load_options(self, options) self._console = options.program.client.cli.console self._interactive = options.program.client.cli.interactive self._list_languages = options.program.client.cli.list_languages self._recursive = options.search.recursive self.set_subtitle_naming_strategy(options.download.naming_strategy) self.set_videoplayer(VideoPlayer.find()) def get_console(self): return self._console def get_interactive(self): return self._interactive def get_list_languages(self): return self._list_languages def get_recursive(self): return self._recursive def set_recursive(self, recursive): self._recursive = recursive
gpl-3.0
Ballz0fSteel/Umeko
lib/youtube_dl/extractor/sandia.py
53
2311
# coding: utf-8 from __future__ import unicode_literals import json from .common import InfoExtractor from ..utils import ( int_or_none, mimetype2ext, ) class SandiaIE(InfoExtractor): IE_DESC = 'Sandia National Laboratories' _VALID_URL = r'https?://digitalops\.sandia\.gov/Mediasite/Play/(?P<id>[0-9a-f]+)' _TEST = { 'url': 'http://digitalops.sandia.gov/Mediasite/Play/24aace4429fc450fb5b38cdbf424a66e1d', 'md5': '9422edc9b9a60151727e4b6d8bef393d', 'info_dict': { 'id': '24aace4429fc450fb5b38cdbf424a66e1d', 'ext': 'mp4', 'title': 'Xyce Software Training - Section 1', 'description': 're:(?s)SAND Number: SAND 2013-7800.{200,}', 'upload_date': '20120409', 'timestamp': 1333983600, 'duration': 7794, } } def _real_extract(self, url): video_id = self._match_id(url) presentation_data = self._download_json( 'http://digitalops.sandia.gov/Mediasite/PlayerService/PlayerService.svc/json/GetPlayerOptions', video_id, data=json.dumps({ 'getPlayerOptionsRequest': { 'ResourceId': video_id, 'QueryString': '', } }), headers={ 'Content-Type': 'application/json; charset=utf-8', })['d']['Presentation'] title = presentation_data['Title'] formats = [] for stream in presentation_data.get('Streams', []): for fd in stream.get('VideoUrls', []): formats.append({ 'format_id': fd['MediaType'], 'format_note': fd['MimeType'].partition('/')[2], 'ext': mimetype2ext(fd['MimeType']), 'url': fd['Location'], 'protocol': 'f4m' if fd['MimeType'] == 'video/x-mp4-fragmented' else None, }) self._sort_formats(formats) return { 'id': video_id, 'title': title, 'description': presentation_data.get('Description'), 'formats': formats, 'timestamp': int_or_none(presentation_data.get('UnixTime'), 1000), 'duration': int_or_none(presentation_data.get('Duration'), 1000), }
gpl-3.0
lache/RacingKingLee
monitor/engine.win64/2.74/python/lib/site-packages/requests/packages/chardet/langcyrillicmodel.py
2762
17725
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # KOI8-R language model # Character Mapping Table: KOI8R_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, # 80 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, # 90 223,224,225, 68,226,227,228,229,230,231,232,233,234,235,236,237, # a0 238,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253, # b0 27, 3, 21, 28, 13, 2, 39, 19, 26, 4, 23, 11, 8, 12, 5, 1, # c0 15, 16, 9, 7, 6, 14, 24, 10, 17, 18, 20, 25, 30, 29, 22, 54, # d0 59, 37, 44, 58, 41, 48, 53, 46, 55, 42, 60, 36, 49, 38, 31, 34, # e0 35, 43, 45, 32, 40, 52, 56, 33, 61, 62, 51, 57, 47, 63, 50, 70, # f0 ) win1251_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, 239,240,241,242,243,244,245,246, 68,247,248,249,250,251,252,253, 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, ) latin5_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, 239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, ) macCyrillic_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, 239,240,241,242,243,244,245,246,247,248,249,250,251,252, 68, 16, 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27,255, ) IBM855_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 191,192,193,194, 68,195,196,197,198,199,200,201,202,203,204,205, 206,207,208,209,210,211,212,213,214,215,216,217, 27, 59, 54, 70, 3, 37, 21, 44, 28, 58, 13, 41, 2, 48, 39, 53, 19, 46,218,219, 220,221,222,223,224, 26, 55, 4, 42,225,226,227,228, 23, 60,229, 230,231,232,233,234,235, 11, 36,236,237,238,239,240,241,242,243, 8, 49, 12, 38, 5, 31, 1, 34, 15,244,245,246,247, 35, 16,248, 43, 9, 45, 7, 32, 6, 40, 14, 52, 24, 56, 10, 33, 17, 61,249, 250, 18, 62, 20, 51, 25, 57, 30, 47, 29, 63, 22, 50,251,252,255, ) IBM866_CharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253,142,143,144,145,146,147,148,149,150,151,152, 74,153, 75,154, # 40 155,156,157,158,159,160,161,162,163,164,165,253,253,253,253,253, # 50 253, 71,172, 66,173, 65,174, 76,175, 64,176,177, 77, 72,178, 69, # 60 67,179, 78, 73,180,181, 79,182,183,184,185,253,253,253,253,253, # 70 37, 44, 33, 46, 41, 48, 56, 51, 42, 60, 36, 49, 38, 31, 34, 35, 45, 32, 40, 52, 53, 55, 58, 50, 57, 63, 70, 62, 61, 47, 59, 43, 3, 21, 10, 19, 13, 2, 24, 20, 4, 23, 11, 8, 12, 5, 1, 15, 191,192,193,194,195,196,197,198,199,200,201,202,203,204,205,206, 207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222, 223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238, 9, 7, 6, 14, 39, 26, 28, 22, 25, 29, 54, 18, 17, 30, 27, 16, 239, 68,240,241,242,243,244,245,246,247,248,249,250,251,252,255, ) # Model Table: # total sequences: 100% # first 512 sequences: 97.6601% # first 1024 sequences: 2.3389% # rest sequences: 0.1237% # negative sequences: 0.0009% RussianLangModel = ( 0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,1,3,3,3,3,1,3,3,3,2,3,2,3,3, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,2,2,2,2,2,0,0,2, 3,3,3,2,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,2,3,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,2,2,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,2,3,3,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, 0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,0,0,3,3,3,3,3,3,3,3,3,3,3,2,1, 0,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,2,2,2,3,1,3,3,1,3,3,3,3,2,2,3,0,2,2,2,3,3,2,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,3,3,3,2,2,3,2,3,3,3,2,1,2,2,0,1,2,2,2,2,2,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,3,0,2,2,3,3,2,1,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,1,2,3,2,2,3,2,3,3,3,3,2,2,3,0,3,2,2,3,1,1,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,3,3,3,3,2,2,2,0,3,3,3,2,2,2,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,2,3,2,2,0,1,3,2,1,2,2,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,2,1,1,3,0,1,1,1,1,2,1,1,0,2,2,2,1,2,0,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,2,2,2,2,1,3,2,3,2,3,2,1,2,2,0,1,1,2,1,2,1,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,2,3,3,3,2,2,2,2,0,2,2,2,2,3,1,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 3,2,3,2,2,3,3,3,3,3,3,3,3,3,1,3,2,0,0,3,3,3,3,2,3,3,3,3,2,3,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,3,2,2,3,3,0,2,1,0,3,2,3,2,3,0,0,1,2,0,0,1,0,1,2,1,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,3,0,2,3,3,3,3,2,3,3,3,3,1,2,2,0,0,2,3,2,2,2,3,2,3,2,2,3,0,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,3,0,2,3,2,3,0,1,2,3,3,2,0,2,3,0,0,2,3,2,2,0,1,3,1,3,2,2,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,3,0,2,3,3,3,3,3,3,3,3,2,1,3,2,0,0,2,2,3,3,3,2,3,3,0,2,2,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,2,3,3,2,2,2,3,3,0,0,1,1,1,1,1,2,0,0,1,1,1,1,0,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,2,3,3,3,3,3,3,3,0,3,2,3,3,2,3,2,0,2,1,0,1,1,0,1,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,2,3,3,3,2,2,2,2,3,1,3,2,3,1,1,2,1,0,2,2,2,2,1,3,1,0, 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0, 2,2,3,3,3,3,3,1,2,2,1,3,1,0,3,0,0,3,0,0,0,1,1,0,1,2,1,0,0,0,0,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,2,1,1,3,3,3,2,2,1,2,2,3,1,1,2,0,0,2,2,1,3,0,0,2,1,1,2,1,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,3,3,3,3,1,2,2,2,1,2,1,3,3,1,1,2,1,2,1,2,2,0,2,0,0,1,1,0,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,3,2,1,3,2,2,3,2,0,3,2,0,3,0,1,0,1,1,0,0,1,1,1,1,0,1,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,2,3,3,3,2,2,2,3,3,1,2,1,2,1,0,1,0,1,1,0,1,0,0,2,1,1,1,0,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, 3,1,1,2,1,2,3,3,2,2,1,2,2,3,0,2,1,0,0,2,2,3,2,1,2,2,2,2,2,3,1,0, 0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,1,1,0,1,1,2,2,1,1,3,0,0,1,3,1,1,1,0,0,0,1,0,1,1,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,1,3,3,3,2,0,0,0,2,1,0,1,0,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,0,1,0,0,2,3,2,2,2,1,2,2,2,1,2,1,0,0,1,1,1,0,2,0,1,1,1,0,0,1,1, 1,0,0,0,0,0,1,2,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,0,0,0,0,1,0,0,0,0,3,0,1,2,1,0,0,0,0,0,0,0,1,1,0,0,1,1, 1,0,1,0,1,2,0,0,1,1,2,1,0,1,1,1,1,0,1,1,1,1,0,1,0,0,1,0,0,1,1,0, 2,2,3,2,2,2,3,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,0,1,0,1,1,1,0,2,1, 1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,0,1,1,0, 3,3,3,2,2,2,2,3,2,2,1,1,2,2,2,2,1,1,3,1,2,1,2,0,0,1,1,0,1,0,2,1, 1,1,1,1,1,2,1,0,1,1,1,1,0,1,0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,1,1,0, 2,0,0,1,0,3,2,2,2,2,1,2,1,2,1,2,0,0,0,2,1,2,2,1,1,2,2,0,1,1,0,2, 1,1,1,1,1,0,1,1,1,2,1,1,1,2,1,0,1,2,1,1,1,1,0,1,1,1,0,0,1,0,0,1, 1,3,2,2,2,1,1,1,2,3,0,0,0,0,2,0,2,2,1,0,0,0,0,0,0,1,0,0,0,0,1,1, 1,0,1,1,0,1,0,1,1,0,1,1,0,2,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, 2,3,2,3,2,1,2,2,2,2,1,0,0,0,2,0,0,1,1,0,0,0,0,0,0,0,1,1,0,0,2,1, 1,1,2,1,0,2,0,0,1,0,1,0,0,1,0,0,1,1,0,1,1,0,0,0,0,0,1,0,0,0,0,0, 3,0,0,1,0,2,2,2,3,2,2,2,2,2,2,2,0,0,0,2,1,2,1,1,1,2,2,0,0,0,1,2, 1,1,1,1,1,0,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,1,0,1,1,1,1,1,1,0,0,1, 2,3,2,3,3,2,0,1,1,1,0,0,1,0,2,0,1,1,3,1,0,0,0,0,0,0,0,1,0,0,2,1, 1,1,1,1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1,0,1,0,0,0,0,0,0,1,0, 2,3,3,3,3,1,2,2,2,2,0,1,1,0,2,1,1,1,2,1,0,1,1,0,0,1,0,1,0,0,2,0, 0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,2,0,0,1,1,2,2,1,0,0,2,0,1,1,3,0,0,1,0,0,0,0,0,1,0,1,2,1, 1,1,2,0,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,0,0,0,0,0,1,0,1,1,0, 1,3,2,3,2,1,0,0,2,2,2,0,1,0,2,0,1,1,1,0,1,0,0,0,3,0,1,1,0,0,2,1, 1,1,1,0,1,1,0,0,0,0,1,1,0,1,0,0,2,1,1,0,1,0,0,0,1,0,1,0,0,1,1,0, 3,1,2,1,1,2,2,2,2,2,2,1,2,2,1,1,0,0,0,2,2,2,0,0,0,1,2,1,0,1,0,1, 2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,2,1,1,1,0,1,0,1,1,0,1,1,1,0,0,1, 3,0,0,0,0,2,0,1,1,1,1,1,1,1,0,1,0,0,0,1,1,1,0,1,0,1,1,0,0,1,0,1, 1,1,0,0,1,0,0,0,1,0,1,1,0,0,1,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1, 1,3,3,2,2,0,0,0,2,2,0,0,0,1,2,0,1,1,2,0,0,0,0,0,0,0,0,1,0,0,2,1, 0,1,1,0,0,1,1,0,0,0,1,1,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0, 2,3,2,3,2,0,0,0,0,1,1,0,0,0,2,0,2,0,2,0,0,0,0,0,1,0,0,1,0,0,1,1, 1,1,2,0,1,2,1,0,1,1,2,1,1,1,1,1,2,1,1,0,1,0,0,1,1,1,1,1,0,1,1,0, 1,3,2,2,2,1,0,0,2,2,1,0,1,2,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,1,1, 0,0,1,1,0,1,1,0,0,1,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,1,0,2,3,1,2,2,2,2,2,2,1,1,0,0,0,1,0,1,0,2,1,1,1,0,0,0,0,1, 1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, 2,0,2,0,0,1,0,3,2,1,2,1,2,2,0,1,0,0,0,2,1,0,0,2,1,1,1,1,0,2,0,2, 2,1,1,1,1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,0,0,0,1,1,1,1,0,1,0,0,1, 1,2,2,2,2,1,0,0,1,0,0,0,0,0,2,0,1,1,1,1,0,0,0,0,1,0,1,2,0,0,2,0, 1,0,1,1,1,2,1,0,1,0,1,1,0,0,1,0,1,1,1,0,1,0,0,0,1,0,0,1,0,1,1,0, 2,1,2,2,2,0,3,0,1,1,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 0,0,0,1,1,1,0,0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0, 1,2,2,3,2,2,0,0,1,1,2,0,1,2,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1, 0,1,1,0,0,1,1,0,0,1,1,0,0,1,1,0,1,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0, 2,2,1,1,2,1,2,2,2,2,2,1,2,2,0,1,0,0,0,1,2,2,2,1,2,1,1,1,1,1,2,1, 1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,0,1, 1,2,2,2,2,0,1,0,2,2,0,0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0, 0,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,2,0,0,0,2,2,2,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1, 0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,2,0,0,0,0,1,0,0,1,1,2,0,0,0,0,1,0,1,0,0,1,0,0,2,0,0,0,1, 0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,1,1,2,0,2,1,1,1,1,0,2,2,0,0,0,0,0,0,0,0,0,1,1,0,0,0,1,1, 0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 1,0,2,1,2,0,0,0,0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0, 0,0,1,0,1,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0, 1,0,0,0,0,2,0,1,2,1,0,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,1, 0,0,0,0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1, 2,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 1,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,0,0, 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 1,1,0,1,1,0,1,0,1,0,0,0,0,1,1,0,1,1,0,0,0,0,0,1,0,1,1,0,1,0,0,0, 0,1,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0, ) Koi8rModel = { 'charToOrderMap': KOI8R_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "KOI8-R" } Win1251CyrillicModel = { 'charToOrderMap': win1251_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "windows-1251" } Latin5CyrillicModel = { 'charToOrderMap': latin5_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "ISO-8859-5" } MacCyrillicModel = { 'charToOrderMap': macCyrillic_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "MacCyrillic" }; Ibm866Model = { 'charToOrderMap': IBM866_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "IBM866" } Ibm855Model = { 'charToOrderMap': IBM855_CharToOrderMap, 'precedenceMatrix': RussianLangModel, 'mTypicalPositiveRatio': 0.976601, 'keepEnglishLetter': False, 'charsetName': "IBM855" } # flake8: noqa
mit
heli522/scikit-learn
sklearn/datasets/tests/test_svmlight_format.py
228
11221
from bz2 import BZ2File import gzip from io import BytesIO import numpy as np import os import shutil from tempfile import NamedTemporaryFile from sklearn.externals.six import b from sklearn.utils.testing import assert_equal from sklearn.utils.testing import assert_array_equal from sklearn.utils.testing import assert_array_almost_equal from sklearn.utils.testing import assert_raises from sklearn.utils.testing import raises from sklearn.utils.testing import assert_in import sklearn from sklearn.datasets import (load_svmlight_file, load_svmlight_files, dump_svmlight_file) currdir = os.path.dirname(os.path.abspath(__file__)) datafile = os.path.join(currdir, "data", "svmlight_classification.txt") multifile = os.path.join(currdir, "data", "svmlight_multilabel.txt") invalidfile = os.path.join(currdir, "data", "svmlight_invalid.txt") invalidfile2 = os.path.join(currdir, "data", "svmlight_invalid_order.txt") def test_load_svmlight_file(): X, y = load_svmlight_file(datafile) # test X's shape assert_equal(X.indptr.shape[0], 7) assert_equal(X.shape[0], 6) assert_equal(X.shape[1], 21) assert_equal(y.shape[0], 6) # test X's non-zero values for i, j, val in ((0, 2, 2.5), (0, 10, -5.2), (0, 15, 1.5), (1, 5, 1.0), (1, 12, -3), (2, 20, 27)): assert_equal(X[i, j], val) # tests X's zero values assert_equal(X[0, 3], 0) assert_equal(X[0, 5], 0) assert_equal(X[1, 8], 0) assert_equal(X[1, 16], 0) assert_equal(X[2, 18], 0) # test can change X's values X[0, 2] *= 2 assert_equal(X[0, 2], 5) # test y assert_array_equal(y, [1, 2, 3, 4, 1, 2]) def test_load_svmlight_file_fd(): # test loading from file descriptor X1, y1 = load_svmlight_file(datafile) fd = os.open(datafile, os.O_RDONLY) try: X2, y2 = load_svmlight_file(fd) assert_array_equal(X1.data, X2.data) assert_array_equal(y1, y2) finally: os.close(fd) def test_load_svmlight_file_multilabel(): X, y = load_svmlight_file(multifile, multilabel=True) assert_equal(y, [(0, 1), (2,), (), (1, 2)]) def test_load_svmlight_files(): X_train, y_train, X_test, y_test = load_svmlight_files([datafile] * 2, dtype=np.float32) assert_array_equal(X_train.toarray(), X_test.toarray()) assert_array_equal(y_train, y_test) assert_equal(X_train.dtype, np.float32) assert_equal(X_test.dtype, np.float32) X1, y1, X2, y2, X3, y3 = load_svmlight_files([datafile] * 3, dtype=np.float64) assert_equal(X1.dtype, X2.dtype) assert_equal(X2.dtype, X3.dtype) assert_equal(X3.dtype, np.float64) def test_load_svmlight_file_n_features(): X, y = load_svmlight_file(datafile, n_features=22) # test X'shape assert_equal(X.indptr.shape[0], 7) assert_equal(X.shape[0], 6) assert_equal(X.shape[1], 22) # test X's non-zero values for i, j, val in ((0, 2, 2.5), (0, 10, -5.2), (1, 5, 1.0), (1, 12, -3)): assert_equal(X[i, j], val) # 21 features in file assert_raises(ValueError, load_svmlight_file, datafile, n_features=20) def test_load_compressed(): X, y = load_svmlight_file(datafile) with NamedTemporaryFile(prefix="sklearn-test", suffix=".gz") as tmp: tmp.close() # necessary under windows with open(datafile, "rb") as f: shutil.copyfileobj(f, gzip.open(tmp.name, "wb")) Xgz, ygz = load_svmlight_file(tmp.name) # because we "close" it manually and write to it, # we need to remove it manually. os.remove(tmp.name) assert_array_equal(X.toarray(), Xgz.toarray()) assert_array_equal(y, ygz) with NamedTemporaryFile(prefix="sklearn-test", suffix=".bz2") as tmp: tmp.close() # necessary under windows with open(datafile, "rb") as f: shutil.copyfileobj(f, BZ2File(tmp.name, "wb")) Xbz, ybz = load_svmlight_file(tmp.name) # because we "close" it manually and write to it, # we need to remove it manually. os.remove(tmp.name) assert_array_equal(X.toarray(), Xbz.toarray()) assert_array_equal(y, ybz) @raises(ValueError) def test_load_invalid_file(): load_svmlight_file(invalidfile) @raises(ValueError) def test_load_invalid_order_file(): load_svmlight_file(invalidfile2) @raises(ValueError) def test_load_zero_based(): f = BytesIO(b("-1 4:1.\n1 0:1\n")) load_svmlight_file(f, zero_based=False) def test_load_zero_based_auto(): data1 = b("-1 1:1 2:2 3:3\n") data2 = b("-1 0:0 1:1\n") f1 = BytesIO(data1) X, y = load_svmlight_file(f1, zero_based="auto") assert_equal(X.shape, (1, 3)) f1 = BytesIO(data1) f2 = BytesIO(data2) X1, y1, X2, y2 = load_svmlight_files([f1, f2], zero_based="auto") assert_equal(X1.shape, (1, 4)) assert_equal(X2.shape, (1, 4)) def test_load_with_qid(): # load svmfile with qid attribute data = b(""" 3 qid:1 1:0.53 2:0.12 2 qid:1 1:0.13 2:0.1 7 qid:2 1:0.87 2:0.12""") X, y = load_svmlight_file(BytesIO(data), query_id=False) assert_array_equal(y, [3, 2, 7]) assert_array_equal(X.toarray(), [[.53, .12], [.13, .1], [.87, .12]]) res1 = load_svmlight_files([BytesIO(data)], query_id=True) res2 = load_svmlight_file(BytesIO(data), query_id=True) for X, y, qid in (res1, res2): assert_array_equal(y, [3, 2, 7]) assert_array_equal(qid, [1, 1, 2]) assert_array_equal(X.toarray(), [[.53, .12], [.13, .1], [.87, .12]]) @raises(ValueError) def test_load_invalid_file2(): load_svmlight_files([datafile, invalidfile, datafile]) @raises(TypeError) def test_not_a_filename(): # in python 3 integers are valid file opening arguments (taken as unix # file descriptors) load_svmlight_file(.42) @raises(IOError) def test_invalid_filename(): load_svmlight_file("trou pic nic douille") def test_dump(): Xs, y = load_svmlight_file(datafile) Xd = Xs.toarray() # slicing a csr_matrix can unsort its .indices, so test that we sort # those correctly Xsliced = Xs[np.arange(Xs.shape[0])] for X in (Xs, Xd, Xsliced): for zero_based in (True, False): for dtype in [np.float32, np.float64, np.int32]: f = BytesIO() # we need to pass a comment to get the version info in; # LibSVM doesn't grok comments so they're not put in by # default anymore. dump_svmlight_file(X.astype(dtype), y, f, comment="test", zero_based=zero_based) f.seek(0) comment = f.readline() try: comment = str(comment, "utf-8") except TypeError: # fails in Python 2.x pass assert_in("scikit-learn %s" % sklearn.__version__, comment) comment = f.readline() try: comment = str(comment, "utf-8") except TypeError: # fails in Python 2.x pass assert_in(["one", "zero"][zero_based] + "-based", comment) X2, y2 = load_svmlight_file(f, dtype=dtype, zero_based=zero_based) assert_equal(X2.dtype, dtype) assert_array_equal(X2.sorted_indices().indices, X2.indices) if dtype == np.float32: assert_array_almost_equal( # allow a rounding error at the last decimal place Xd.astype(dtype), X2.toarray(), 4) else: assert_array_almost_equal( # allow a rounding error at the last decimal place Xd.astype(dtype), X2.toarray(), 15) assert_array_equal(y, y2) def test_dump_multilabel(): X = [[1, 0, 3, 0, 5], [0, 0, 0, 0, 0], [0, 5, 0, 1, 0]] y = [[0, 1, 0], [1, 0, 1], [1, 1, 0]] f = BytesIO() dump_svmlight_file(X, y, f, multilabel=True) f.seek(0) # make sure it dumps multilabel correctly assert_equal(f.readline(), b("1 0:1 2:3 4:5\n")) assert_equal(f.readline(), b("0,2 \n")) assert_equal(f.readline(), b("0,1 1:5 3:1\n")) def test_dump_concise(): one = 1 two = 2.1 three = 3.01 exact = 1.000000000000001 # loses the last decimal place almost = 1.0000000000000001 X = [[one, two, three, exact, almost], [1e9, 2e18, 3e27, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]] y = [one, two, three, exact, almost] f = BytesIO() dump_svmlight_file(X, y, f) f.seek(0) # make sure it's using the most concise format possible assert_equal(f.readline(), b("1 0:1 1:2.1 2:3.01 3:1.000000000000001 4:1\n")) assert_equal(f.readline(), b("2.1 0:1000000000 1:2e+18 2:3e+27\n")) assert_equal(f.readline(), b("3.01 \n")) assert_equal(f.readline(), b("1.000000000000001 \n")) assert_equal(f.readline(), b("1 \n")) f.seek(0) # make sure it's correct too :) X2, y2 = load_svmlight_file(f) assert_array_almost_equal(X, X2.toarray()) assert_array_equal(y, y2) def test_dump_comment(): X, y = load_svmlight_file(datafile) X = X.toarray() f = BytesIO() ascii_comment = "This is a comment\nspanning multiple lines." dump_svmlight_file(X, y, f, comment=ascii_comment, zero_based=False) f.seek(0) X2, y2 = load_svmlight_file(f, zero_based=False) assert_array_almost_equal(X, X2.toarray()) assert_array_equal(y, y2) # XXX we have to update this to support Python 3.x utf8_comment = b("It is true that\n\xc2\xbd\xc2\xb2 = \xc2\xbc") f = BytesIO() assert_raises(UnicodeDecodeError, dump_svmlight_file, X, y, f, comment=utf8_comment) unicode_comment = utf8_comment.decode("utf-8") f = BytesIO() dump_svmlight_file(X, y, f, comment=unicode_comment, zero_based=False) f.seek(0) X2, y2 = load_svmlight_file(f, zero_based=False) assert_array_almost_equal(X, X2.toarray()) assert_array_equal(y, y2) f = BytesIO() assert_raises(ValueError, dump_svmlight_file, X, y, f, comment="I've got a \0.") def test_dump_invalid(): X, y = load_svmlight_file(datafile) f = BytesIO() y2d = [y] assert_raises(ValueError, dump_svmlight_file, X, y2d, f) f = BytesIO() assert_raises(ValueError, dump_svmlight_file, X, y[:-1], f) def test_dump_query_id(): # test dumping a file with query_id X, y = load_svmlight_file(datafile) X = X.toarray() query_id = np.arange(X.shape[0]) // 2 f = BytesIO() dump_svmlight_file(X, y, f, query_id=query_id, zero_based=True) f.seek(0) X1, y1, query_id1 = load_svmlight_file(f, query_id=True, zero_based=True) assert_array_almost_equal(X, X1.toarray()) assert_array_almost_equal(y, y1) assert_array_almost_equal(query_id, query_id1)
bsd-3-clause
moutai/scikit-learn
examples/neural_networks/plot_mnist_filters.py
57
2195
""" ===================================== Visualization of MLP weights on MNIST ===================================== Sometimes looking at the learned coefficients of a neural network can provide insight into the learning behavior. For example if weights look unstructured, maybe some were not used at all, or if very large coefficients exist, maybe regularization was too low or the learning rate too high. This example shows how to plot some of the first layer weights in a MLPClassifier trained on the MNIST dataset. The input data consists of 28x28 pixel handwritten digits, leading to 784 features in the dataset. Therefore the first layer weight matrix have the shape (784, hidden_layer_sizes[0]). We can therefore visualize a single column of the weight matrix as a 28x28 pixel image. To make the example run faster, we use very few hidden units, and train only for a very short time. Training longer would result in weights with a much smoother spatial appearance. """ print(__doc__) import matplotlib.pyplot as plt from sklearn.datasets import fetch_mldata from sklearn.neural_network import MLPClassifier mnist = fetch_mldata("MNIST original") # rescale the data, use the traditional train/test split X, y = mnist.data / 255., mnist.target X_train, X_test = X[:60000], X[60000:] y_train, y_test = y[:60000], y[60000:] # mlp = MLPClassifier(hidden_layer_sizes=(100, 100), max_iter=400, alpha=1e-4, # algorithm='sgd', verbose=10, tol=1e-4, random_state=1) mlp = MLPClassifier(hidden_layer_sizes=(50,), max_iter=10, alpha=1e-4, algorithm='sgd', verbose=10, tol=1e-4, random_state=1, learning_rate_init=.1) mlp.fit(X_train, y_train) print("Training set score: %f" % mlp.score(X_train, y_train)) print("Test set score: %f" % mlp.score(X_test, y_test)) fig, axes = plt.subplots(4, 4) # use global min / max to ensure all weights are shown on the same scale vmin, vmax = mlp.coefs_[0].min(), mlp.coefs_[0].max() for coef, ax in zip(mlp.coefs_[0].T, axes.ravel()): ax.matshow(coef.reshape(28, 28), cmap=plt.cm.gray, vmin=.5 * vmin, vmax=.5 * vmax) ax.set_xticks(()) ax.set_yticks(()) plt.show()
bsd-3-clause
llluiop/skia
platform_tools/android/gyp_gen/gypd_parser.py
1
4367
#!/usr/bin/python # Copyright 2014 Google Inc. # # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Functions for parsing the gypd output from gyp. """ def parse_dictionary(var_dict, d, current_target_name): """ Helper function to get the meaningful entries in a dictionary. @param var_dict VarsDict object for storing the results of the parsing. @param d Dictionary object to parse. @param current_target_name The current target being parsed. If this dictionary is a target, this will be its entry 'target_name'. Otherwise, this will be the name of the target which contains this dictionary. """ for source in d.get('sources', []): # Compare against a lowercase version, in case files are named .H or .GYPI lowercase_source = source.lower() if lowercase_source.endswith('.h'): # Android.mk does not need the header files. continue if lowercase_source.endswith('gypi'): # The gypi files are included in sources, but the sources they included # are also included. No need to parse them again. continue # The path is relative to the gyp folder, but Android wants the path # relative to the root. source = source.replace('../src', 'src', 1) var_dict.LOCAL_SRC_FILES.add(source) for lib in d.get('libraries', []): if lib.endswith('.a'): # Remove the '.a' lib = lib[:-2] # Add 'lib', if necessary if not lib.startswith('lib'): lib = 'lib' + lib var_dict.LOCAL_STATIC_LIBRARIES.add(lib) else: # lib will be in the form of '-l<name>'. Change it to 'lib<name>' lib = lib.replace('-l', 'lib', 1) var_dict.LOCAL_SHARED_LIBRARIES.add(lib) for dependency in d.get('dependencies', []): # Each dependency is listed as # <path_to_file>:<target>#target li = dependency.split(':') assert(len(li) <= 2 and len(li) >= 1) sub_targets = [] if len(li) == 2 and li[1] != '*': sub_targets.append(li[1].split('#')[0]) sub_path = li[0] assert(sub_path.endswith('.gyp')) # Although the original reference is to a .gyp, parse the corresponding # gypd file, which was constructed by gyp. sub_path = sub_path + 'd' parse_gypd(var_dict, sub_path, sub_targets) if 'default_configuration' in d: config_name = d['default_configuration'] # default_configuration is meaningless without configurations assert('configurations' in d) config = d['configurations'][config_name] parse_dictionary(var_dict, config, current_target_name) for flag in d.get('cflags', []): var_dict.LOCAL_CFLAGS.add(flag) for flag in d.get('cflags_cc', []): var_dict.LOCAL_CPPFLAGS.add(flag) for include in d.get('include_dirs', []): # The input path will be relative to gyp/, but Android wants relative to # LOCAL_PATH include = include.replace('..', '$(LOCAL_PATH)', 1) # Remove a trailing slash, if present. if include.endswith('/'): include = include[:-1] var_dict.LOCAL_C_INCLUDES.add(include) # For the top level, libskia, include directories should be exported. if current_target_name == 'libskia': var_dict.LOCAL_EXPORT_C_INCLUDE_DIRS.add(include) for define in d.get('defines', []): var_dict.DEFINES.add(define) def parse_gypd(var_dict, path, desired_targets=None): """ Parse a gypd file. @param var_dict VarsDict object for storing the result of the parse. @param path Path to gypd file. @param desired_targets List of targets to be parsed from this file. If empty, parse all targets. """ d = {} with open(path, 'r') as f: # Read the entire file as a dictionary d = eval(f.read()) # The gypd file is structured such that the top level dictionary has an entry # named 'targets' for target in d['targets']: target_name = target['target_name'] if target_name in var_dict.KNOWN_TARGETS: # Avoid circular dependencies continue if desired_targets and target_name not in desired_targets: # Our caller does not depend on this one continue # Add it to our known targets so we don't parse it again var_dict.KNOWN_TARGETS.add(target_name) parse_dictionary(var_dict, target, target_name)
bsd-3-clause
drxos/python-social-auth
social/backends/open_id.py
66
14306
import datetime from calendar import timegm from jwt import InvalidTokenError, decode as jwt_decode from openid.consumer.consumer import Consumer, SUCCESS, CANCEL, FAILURE from openid.consumer.discover import DiscoveryFailure from openid.extensions import sreg, ax, pape from social.utils import url_add_parameters from social.exceptions import AuthException, AuthFailed, AuthCanceled, \ AuthUnknownError, AuthMissingParameter, \ AuthTokenError from social.backends.base import BaseAuth from social.backends.oauth import BaseOAuth2 # OpenID configuration OLD_AX_ATTRS = [ ('http://schema.openid.net/contact/email', 'old_email'), ('http://schema.openid.net/namePerson', 'old_fullname'), ('http://schema.openid.net/namePerson/friendly', 'old_nickname') ] AX_SCHEMA_ATTRS = [ # Request both the full name and first/last components since some # providers offer one but not the other. ('http://axschema.org/contact/email', 'email'), ('http://axschema.org/namePerson', 'fullname'), ('http://axschema.org/namePerson/first', 'first_name'), ('http://axschema.org/namePerson/last', 'last_name'), ('http://axschema.org/namePerson/friendly', 'nickname'), ] SREG_ATTR = [ ('email', 'email'), ('fullname', 'fullname'), ('nickname', 'nickname') ] OPENID_ID_FIELD = 'openid_identifier' SESSION_NAME = 'openid' class OpenIdAuth(BaseAuth): """Generic OpenID authentication backend""" name = 'openid' URL = None USERNAME_KEY = 'username' def get_user_id(self, details, response): """Return user unique id provided by service""" return response.identity_url def get_ax_attributes(self): attrs = self.setting('AX_SCHEMA_ATTRS', []) if attrs and self.setting('IGNORE_DEFAULT_AX_ATTRS', True): return attrs return attrs + AX_SCHEMA_ATTRS + OLD_AX_ATTRS def get_sreg_attributes(self): return self.setting('SREG_ATTR') or SREG_ATTR def values_from_response(self, response, sreg_names=None, ax_names=None): """Return values from SimpleRegistration response or AttributeExchange response if present. @sreg_names and @ax_names must be a list of name and aliases for such name. The alias will be used as mapping key. """ values = {} # Use Simple Registration attributes if provided if sreg_names: resp = sreg.SRegResponse.fromSuccessResponse(response) if resp: values.update((alias, resp.get(name) or '') for name, alias in sreg_names) # Use Attribute Exchange attributes if provided if ax_names: resp = ax.FetchResponse.fromSuccessResponse(response) if resp: for src, alias in ax_names: name = alias.replace('old_', '') values[name] = resp.getSingle(src, '') or values.get(name) return values def get_user_details(self, response): """Return user details from an OpenID request""" values = {'username': '', 'email': '', 'fullname': '', 'first_name': '', 'last_name': ''} # update values using SimpleRegistration or AttributeExchange # values values.update(self.values_from_response( response, self.get_sreg_attributes(), self.get_ax_attributes() )) fullname = values.get('fullname') or '' first_name = values.get('first_name') or '' last_name = values.get('last_name') or '' email = values.get('email') or '' if not fullname and first_name and last_name: fullname = first_name + ' ' + last_name elif fullname: try: first_name, last_name = fullname.rsplit(' ', 1) except ValueError: last_name = fullname username_key = self.setting('USERNAME_KEY') or self.USERNAME_KEY values.update({'fullname': fullname, 'first_name': first_name, 'last_name': last_name, 'username': values.get(username_key) or (first_name.title() + last_name.title()), 'email': email}) return values def extra_data(self, user, uid, response, details=None, *args, **kwargs): """Return defined extra data names to store in extra_data field. Settings will be inspected to get more values names that should be stored on extra_data field. Setting name is created from current backend name (all uppercase) plus _SREG_EXTRA_DATA and _AX_EXTRA_DATA because values can be returned by SimpleRegistration or AttributeExchange schemas. Both list must be a value name and an alias mapping similar to SREG_ATTR, OLD_AX_ATTRS or AX_SCHEMA_ATTRS """ sreg_names = self.setting('SREG_EXTRA_DATA') ax_names = self.setting('AX_EXTRA_DATA') values = self.values_from_response(response, sreg_names, ax_names) from_details = super(OpenIdAuth, self).extra_data( user, uid, {}, details, *args, **kwargs ) values.update(from_details) return values def auth_url(self): """Return auth URL returned by service""" openid_request = self.setup_request(self.auth_extra_arguments()) # Construct completion URL, including page we should redirect to return_to = self.strategy.absolute_uri(self.redirect_uri) return openid_request.redirectURL(self.trust_root(), return_to) def auth_html(self): """Return auth HTML returned by service""" openid_request = self.setup_request(self.auth_extra_arguments()) return_to = self.strategy.absolute_uri(self.redirect_uri) form_tag = {'id': 'openid_message'} return openid_request.htmlMarkup(self.trust_root(), return_to, form_tag_attrs=form_tag) def trust_root(self): """Return trust-root option""" return self.setting('OPENID_TRUST_ROOT') or \ self.strategy.absolute_uri('/') def continue_pipeline(self, *args, **kwargs): """Continue previous halted pipeline""" response = self.consumer().complete(dict(self.data.items()), self.strategy.absolute_uri( self.redirect_uri )) kwargs.update({'response': response, 'backend': self}) return self.strategy.authenticate(*args, **kwargs) def auth_complete(self, *args, **kwargs): """Complete auth process""" response = self.consumer().complete(dict(self.data.items()), self.strategy.absolute_uri( self.redirect_uri )) self.process_error(response) kwargs.update({'response': response, 'backend': self}) return self.strategy.authenticate(*args, **kwargs) def process_error(self, data): if not data: raise AuthException(self, 'OpenID relying party endpoint') elif data.status == FAILURE: raise AuthFailed(self, data.message) elif data.status == CANCEL: raise AuthCanceled(self) elif data.status != SUCCESS: raise AuthUnknownError(self, data.status) def setup_request(self, params=None): """Setup request""" request = self.openid_request(params) # Request some user details. Use attribute exchange if provider # advertises support. if request.endpoint.supportsType(ax.AXMessage.ns_uri): fetch_request = ax.FetchRequest() # Mark all attributes as required, Google ignores optional ones for attr, alias in self.get_ax_attributes(): fetch_request.add(ax.AttrInfo(attr, alias=alias, required=True)) else: fetch_request = sreg.SRegRequest( optional=list(dict(self.get_sreg_attributes()).keys()) ) request.addExtension(fetch_request) # Add PAPE Extension for if configured preferred_policies = self.setting( 'OPENID_PAPE_PREFERRED_AUTH_POLICIES' ) preferred_level_types = self.setting( 'OPENID_PAPE_PREFERRED_AUTH_LEVEL_TYPES' ) max_age = self.setting('OPENID_PAPE_MAX_AUTH_AGE') if max_age is not None: try: max_age = int(max_age) except (ValueError, TypeError): max_age = None if max_age is not None or preferred_policies or preferred_level_types: pape_request = pape.Request( max_auth_age=max_age, preferred_auth_policies=preferred_policies, preferred_auth_level_types=preferred_level_types ) request.addExtension(pape_request) return request def consumer(self): """Create an OpenID Consumer object for the given Django request.""" if not hasattr(self, '_consumer'): self._consumer = self.create_consumer(self.strategy.openid_store()) return self._consumer def create_consumer(self, store=None): return Consumer(self.strategy.openid_session_dict(SESSION_NAME), store) def uses_redirect(self): """Return true if openid request will be handled with redirect or HTML content will be returned. """ return self.openid_request().shouldSendRedirect() def openid_request(self, params=None): """Return openid request""" try: return self.consumer().begin(url_add_parameters(self.openid_url(), params)) except DiscoveryFailure as err: raise AuthException(self, 'OpenID discovery error: {0}'.format( err )) def openid_url(self): """Return service provider URL. This base class is generic accepting a POST parameter that specifies provider URL.""" if self.URL: return self.URL elif OPENID_ID_FIELD in self.data: return self.data[OPENID_ID_FIELD] else: raise AuthMissingParameter(self, OPENID_ID_FIELD) class OpenIdConnectAssociation(object): """ Use Association model to save the nonce by force. """ def __init__(self, handle, secret='', issued=0, lifetime=0, assoc_type=''): self.handle = handle # as nonce self.secret = secret.encode() # not use self.issued = issued # not use self.lifetime = lifetime # not use self.assoc_type = assoc_type # as state class OpenIdConnectAuth(BaseOAuth2): """ Base class for Open ID Connect backends. Currently only the code response type is supported. """ ID_TOKEN_ISSUER = None DEFAULT_SCOPE = ['openid'] EXTRA_DATA = ['id_token', 'refresh_token', ('sub', 'id')] # Set after access_token is retrieved id_token = None def auth_params(self, state=None): """Return extra arguments needed on auth process.""" params = super(OpenIdConnectAuth, self).auth_params(state) params['nonce'] = self.get_and_store_nonce( self.AUTHORIZATION_URL, state ) return params def auth_complete_params(self, state=None): params = super(OpenIdConnectAuth, self).auth_complete_params(state) # Add a nonce to the request so that to help counter CSRF params['nonce'] = self.get_and_store_nonce( self.ACCESS_TOKEN_URL, state ) return params def get_and_store_nonce(self, url, state): # Create a nonce nonce = self.strategy.random_string(64) # Store the nonce association = OpenIdConnectAssociation(nonce, assoc_type=state) self.strategy.storage.association.store(url, association) return nonce def get_nonce(self, nonce): try: return self.strategy.storage.association.get( server_url=self.ACCESS_TOKEN_URL, handle=nonce )[0] except IndexError: pass def remove_nonce(self, nonce_id): self.strategy.storage.association.remove([nonce_id]) def validate_and_return_id_token(self, id_token): """ Validates the id_token according to the steps at http://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation. """ client_id, _client_secret = self.get_key_and_secret() decryption_key = self.setting('ID_TOKEN_DECRYPTION_KEY') try: # Decode the JWT and raise an error if the secret is invalid or # the response has expired. id_token = jwt_decode(id_token, decryption_key, audience=client_id, issuer=self.ID_TOKEN_ISSUER, algorithms=['HS256']) except InvalidTokenError as err: raise AuthTokenError(self, err) # Verify the token was issued in the last 10 minutes utc_timestamp = timegm(datetime.datetime.utcnow().utctimetuple()) if id_token['iat'] < (utc_timestamp - 600): raise AuthTokenError(self, 'Incorrect id_token: iat') # Validate the nonce to ensure the request was not modified nonce = id_token.get('nonce') if not nonce: raise AuthTokenError(self, 'Incorrect id_token: nonce') nonce_obj = self.get_nonce(nonce) if nonce_obj: self.remove_nonce(nonce_obj.id) else: raise AuthTokenError(self, 'Incorrect id_token: nonce') return id_token def request_access_token(self, *args, **kwargs): """ Retrieve the access token. Also, validate the id_token and store it (temporarily). """ response = self.get_json(*args, **kwargs) self.id_token = self.validate_and_return_id_token(response['id_token']) return response
bsd-3-clause
JonathanStein/odoo
addons/membership/__openerp__.py
197
2207
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Membership Management', 'version': '0.1', 'category': 'Association', 'description': """ This module allows you to manage all operations for managing memberships. ========================================================================= It supports different kind of members: -------------------------------------- * Free member * Associated member (e.g.: a group subscribes to a membership for all subsidiaries) * Paid members * Special member prices It is integrated with sales and accounting to allow you to automatically invoice and send propositions for membership renewal. """, 'author': 'OpenERP SA', 'depends': ['base', 'product', 'account'], 'data': [ 'security/ir.model.access.csv', 'wizard/membership_invoice_view.xml', 'membership_data.xml', 'membership_view.xml', 'report/report_membership_view.xml', ], 'demo': [ 'membership_demo.xml', 'membership_demo.yml' ], 'website': 'https://www.odoo.com/page/community-builder', 'test': ['test/test_membership.yml'], 'installable': True, 'auto_install': False, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
Shimejing/jz2440
tools/perf/scripts/python/failed-syscalls-by-pid.py
1996
2233
# failed system call counts, by pid # (c) 2010, Tom Zanussi <tzanussi@gmail.com> # Licensed under the terms of the GNU GPL License version 2 # # Displays system-wide failed system call totals, broken down by pid. # If a [comm] arg is specified, only syscalls called by [comm] are displayed. import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import * usage = "perf script -s syscall-counts-by-pid.py [comm|pid]\n"; for_comm = None for_pid = None if len(sys.argv) > 2: sys.exit(usage) if len(sys.argv) > 1: try: for_pid = int(sys.argv[1]) except: for_comm = sys.argv[1] syscalls = autodict() def trace_begin(): print "Press control+C to stop and show the summary" def trace_end(): print_error_totals() def raw_syscalls__sys_exit(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, common_callchain, id, ret): if (for_comm and common_comm != for_comm) or \ (for_pid and common_pid != for_pid ): return if ret < 0: try: syscalls[common_comm][common_pid][id][ret] += 1 except TypeError: syscalls[common_comm][common_pid][id][ret] = 1 def syscalls__sys_exit(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, id, ret): raw_syscalls__sys_exit(**locals()) def print_error_totals(): if for_comm is not None: print "\nsyscall errors for %s:\n\n" % (for_comm), else: print "\nsyscall errors:\n\n", print "%-30s %10s\n" % ("comm [pid]", "count"), print "%-30s %10s\n" % ("------------------------------", \ "----------"), comm_keys = syscalls.keys() for comm in comm_keys: pid_keys = syscalls[comm].keys() for pid in pid_keys: print "\n%s [%d]\n" % (comm, pid), id_keys = syscalls[comm][pid].keys() for id in id_keys: print " syscall: %-16s\n" % syscall_name(id), ret_keys = syscalls[comm][pid][id].keys() for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True): print " err = %-20s %10d\n" % (strerror(ret), val),
gpl-2.0