repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
vnsofthe/odoo-dev | addons/web_linkedin/web_linkedin.py | 333 | 4485 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base64
import urllib2
from urlparse import urlparse, urlunparse
import openerp
import openerp.addons.web
from openerp.osv import fields, osv
class Binary(openerp.http.Controller):
@openerp.http.route('/web_linkedin/binary/url2binary', type='json', auth='user')
def url2binary(self, url):
"""Used exclusively to load images from LinkedIn profiles, must not be used for anything else."""
_scheme, _netloc, path, params, query, fragment = urlparse(url)
# media.linkedin.com is the master domain for LinkedIn media (replicated to CDNs),
# so forcing it should always work and prevents abusing this method to load arbitrary URLs
url = urlunparse(('http', 'media.licdn.com', path, params, query, fragment))
bfile = urllib2.urlopen(url)
return base64.b64encode(bfile.read())
class web_linkedin_settings(osv.osv_memory):
_inherit = 'sale.config.settings'
_columns = {
'api_key': fields.char(string="API Key", size=50),
'server_domain': fields.char(),
}
def get_default_linkedin(self, cr, uid, fields, context=None):
key = self.pool.get("ir.config_parameter").get_param(cr, uid, "web.linkedin.apikey") or ""
dom = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
return {'api_key': key, 'server_domain': dom,}
def set_linkedin(self, cr, uid, ids, context=None):
key = self.browse(cr, uid, ids[0], context)["api_key"] or ""
self.pool.get("ir.config_parameter").set_param(cr, uid, "web.linkedin.apikey", key, groups=['base.group_user'])
class web_linkedin_fields(osv.Model):
_inherit = 'res.partner'
def _get_url(self, cr, uid, ids, name, arg, context=None):
res = dict((id, False) for id in ids)
for partner in self.browse(cr, uid, ids, context=context):
res[partner.id] = partner.linkedin_url
return res
def linkedin_check_similar_partner(self, cr, uid, linkedin_datas, context=None):
res = []
res_partner = self.pool.get('res.partner')
for linkedin_data in linkedin_datas:
partner_ids = res_partner.search(cr, uid, ["|", ("linkedin_id", "=", linkedin_data['id']),
"&", ("linkedin_id", "=", False),
"|", ("name", "ilike", linkedin_data['firstName'] + "%" + linkedin_data['lastName']), ("name", "ilike", linkedin_data['lastName'] + "%" + linkedin_data['firstName'])], context=context)
if partner_ids:
partner = res_partner.read(cr, uid, partner_ids[0], ["image", "mobile", "phone", "parent_id", "name", "email", "function", "linkedin_id"], context=context)
if partner['linkedin_id'] and partner['linkedin_id'] != linkedin_data['id']:
partner.pop('id')
if partner['parent_id']:
partner['parent_id'] = partner['parent_id'][0]
for key, val in partner.items():
if not val:
partner.pop(key)
res.append(partner)
else:
res.append({})
return res
_columns = {
'linkedin_id': fields.char(string="LinkedIn ID"),
'linkedin_url': fields.char(string="LinkedIn url", store=True),
'linkedin_public_url': fields.function(_get_url, type='text', string="LinkedIn url",
help="This url is set automatically when you join the partner with a LinkedIn account."),
}
| agpl-3.0 |
habibiefaried/ryu | ryu/contrib/tinyrpc/protocols/jsonrpc.py | 41 | 8589 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from .. import RPCBatchProtocol, RPCRequest, RPCResponse, RPCErrorResponse,\
InvalidRequestError, MethodNotFoundError, ServerError,\
InvalidReplyError, RPCError, RPCBatchRequest, RPCBatchResponse
import json
class FixedErrorMessageMixin(object):
def __init__(self, *args, **kwargs):
if not args:
args = [self.message]
super(FixedErrorMessageMixin, self).__init__(*args, **kwargs)
def error_respond(self):
response = JSONRPCErrorResponse()
response.error = self.message
response.unique_id = None
response._jsonrpc_error_code = self.jsonrpc_error_code
return response
class JSONRPCParseError(FixedErrorMessageMixin, InvalidRequestError):
jsonrpc_error_code = -32700
message = 'Parse error'
class JSONRPCInvalidRequestError(FixedErrorMessageMixin, InvalidRequestError):
jsonrpc_error_code = -32600
message = 'Invalid Request'
class JSONRPCMethodNotFoundError(FixedErrorMessageMixin, MethodNotFoundError):
jsonrpc_error_code = -32601
message = 'Method not found'
class JSONRPCInvalidParamsError(FixedErrorMessageMixin, InvalidRequestError):
jsonrpc_error_code = -32602
message = 'Invalid params'
class JSONRPCInternalError(FixedErrorMessageMixin, InvalidRequestError):
jsonrpc_error_code = -32603
message = 'Internal error'
class JSONRPCServerError(FixedErrorMessageMixin, InvalidRequestError):
jsonrpc_error_code = -32000
message = ''
class JSONRPCSuccessResponse(RPCResponse):
def _to_dict(self):
return {
'jsonrpc': JSONRPCProtocol.JSON_RPC_VERSION,
'id': self.unique_id,
'result': self.result,
}
def serialize(self):
return json.dumps(self._to_dict())
class JSONRPCErrorResponse(RPCErrorResponse):
def _to_dict(self):
return {
'jsonrpc': JSONRPCProtocol.JSON_RPC_VERSION,
'id': self.unique_id,
'error': {
'message': str(self.error),
'code': self._jsonrpc_error_code,
}
}
def serialize(self):
return json.dumps(self._to_dict())
def _get_code_and_message(error):
assert isinstance(error, (Exception, basestring))
if isinstance(error, Exception):
if hasattr(error, 'jsonrpc_error_code'):
code = error.jsonrpc_error_code
msg = str(error)
elif isinstance(error, InvalidRequestError):
code = JSONRPCInvalidRequestError.jsonrpc_error_code
msg = JSONRPCInvalidRequestError.message
elif isinstance(error, MethodNotFoundError):
code = JSONRPCMethodNotFoundError.jsonrpc_error_code
msg = JSONRPCMethodNotFoundError.message
else:
# allow exception message to propagate
code = JSONRPCServerError.jsonrpc_error_code
msg = str(error)
else:
code = -32000
msg = error
return code, msg
class JSONRPCRequest(RPCRequest):
def error_respond(self, error):
if not self.unique_id:
return None
response = JSONRPCErrorResponse()
code, msg = _get_code_and_message(error)
response.error = msg
response.unique_id = self.unique_id
response._jsonrpc_error_code = code
return response
def respond(self, result):
response = JSONRPCSuccessResponse()
if not self.unique_id:
return None
response.result = result
response.unique_id = self.unique_id
return response
def _to_dict(self):
jdata = {
'jsonrpc': JSONRPCProtocol.JSON_RPC_VERSION,
'method': self.method,
}
if self.args:
jdata['params'] = self.args
if self.kwargs:
jdata['params'] = self.kwargs
if self.unique_id != None:
jdata['id'] = self.unique_id
return jdata
def serialize(self):
return json.dumps(self._to_dict())
class JSONRPCBatchRequest(RPCBatchRequest):
def create_batch_response(self):
if self._expects_response():
return JSONRPCBatchResponse()
def _expects_response(self):
for request in self:
if isinstance(request, Exception):
return True
if request.unique_id != None:
return True
return False
def serialize(self):
return json.dumps([req._to_dict() for req in self])
class JSONRPCBatchResponse(RPCBatchResponse):
def serialize(self):
return json.dumps([resp._to_dict() for resp in self if resp != None])
class JSONRPCProtocol(RPCBatchProtocol):
"""JSONRPC protocol implementation.
Currently, only version 2.0 is supported."""
JSON_RPC_VERSION = "2.0"
_ALLOWED_REPLY_KEYS = sorted(['id', 'jsonrpc', 'error', 'result'])
_ALLOWED_REQUEST_KEYS = sorted(['id', 'jsonrpc', 'method', 'params'])
def __init__(self, *args, **kwargs):
super(JSONRPCProtocol, self).__init__(*args, **kwargs)
self._id_counter = 0
def _get_unique_id(self):
self._id_counter += 1
return self._id_counter
def create_batch_request(self, requests=None):
return JSONRPCBatchRequest(requests or [])
def create_request(self, method, args=None, kwargs=None, one_way=False):
if args and kwargs:
raise InvalidRequestError('Does not support args and kwargs at '\
'the same time')
request = JSONRPCRequest()
if not one_way:
request.unique_id = self._get_unique_id()
request.method = method
request.args = args
request.kwargs = kwargs
return request
def parse_reply(self, data):
try:
rep = json.loads(data)
except Exception as e:
raise InvalidReplyError(e)
for k in rep.iterkeys():
if not k in self._ALLOWED_REPLY_KEYS:
raise InvalidReplyError('Key not allowed: %s' % k)
if not 'jsonrpc' in rep:
raise InvalidReplyError('Missing jsonrpc (version) in response.')
if rep['jsonrpc'] != self.JSON_RPC_VERSION:
raise InvalidReplyError('Wrong JSONRPC version')
if not 'id' in rep:
raise InvalidReplyError('Missing id in response')
if ('error' in rep) == ('result' in rep):
raise InvalidReplyError(
'Reply must contain exactly one of result and error.'
)
if 'error' in rep:
response = JSONRPCErrorResponse()
error = rep['error']
response.error = error['message']
response._jsonrpc_error_code = error['code']
else:
response = JSONRPCSuccessResponse()
response.result = rep.get('result', None)
response.unique_id = rep['id']
return response
def parse_request(self, data):
try:
req = json.loads(data)
except Exception as e:
raise JSONRPCParseError()
if isinstance(req, list):
# batch request
requests = JSONRPCBatchRequest()
for subreq in req:
try:
requests.append(self._parse_subrequest(subreq))
except RPCError as e:
requests.append(e)
except Exception as e:
requests.append(JSONRPCInvalidRequestError())
if not requests:
raise JSONRPCInvalidRequestError()
return requests
else:
return self._parse_subrequest(req)
def _parse_subrequest(self, req):
for k in req.iterkeys():
if not k in self._ALLOWED_REQUEST_KEYS:
raise JSONRPCInvalidRequestError()
if req.get('jsonrpc', None) != self.JSON_RPC_VERSION:
raise JSONRPCInvalidRequestError()
if not isinstance(req['method'], basestring):
raise JSONRPCInvalidRequestError()
request = JSONRPCRequest()
request.method = str(req['method'])
request.unique_id = req.get('id', None)
params = req.get('params', None)
if params != None:
if isinstance(params, list):
request.args = req['params']
elif isinstance(params, dict):
request.kwargs = req['params']
else:
raise JSONRPCInvalidParamsError()
return request
| apache-2.0 |
YAJATapps/FlickLauncher | fill_screens.py | 17 | 2574 | #!/usr/bin/env python2.5
import cgi
import os
import shutil
import sys
import sqlite3
SCREENS = 5
COLUMNS = 4
ROWS = 4
CELL_SIZE = 110
DIR = "db_files"
AUTO_FILE = "launcher.db"
APPLICATION_COMPONENTS = [
"com.android.calculator2/com.android.calculator2.Calculator",
"com.android.providers.downloads.ui/com.android.providers.downloads.ui.DownloadList",
"com.android.settings/com.android.settings.Settings",
"com.android.mms/com.android.mms.ui.ConversationList",
"com.android.contacts/com.android.contacts.activities.PeopleActivity",
"com.android.dialer/com.android.dialer.DialtactsActivity"
]
def usage():
print "usage: fill_screens.py -- fills up the launcher db"
def make_dir():
shutil.rmtree(DIR, True)
os.makedirs(DIR)
def pull_file(fn):
print "pull_file: " + fn
rv = os.system("adb pull"
+ " /data/data/com.android.launcher/databases/launcher.db"
+ " " + fn);
if rv != 0:
print "adb pull failed"
sys.exit(1)
def push_file(fn):
print "push_file: " + fn
rv = os.system("adb push"
+ " " + fn
+ " /data/data/com.android.launcher/databases/launcher.db")
if rv != 0:
print "adb push failed"
sys.exit(1)
def process_file(fn):
print "process_file: " + fn
conn = sqlite3.connect(fn)
c = conn.cursor()
c.execute("DELETE FROM favorites")
intentFormat = "#Intent;action=android.intent.action.MAIN;category=android.intent.category.LAUNCHER;launchFlags=0x10200000;component=%s;end"
id = 0;
for s in range(SCREENS):
for x in range(ROWS):
for y in range(COLUMNS):
id += 1
insert = "INSERT into favorites (_id, title, intent, container, screen, cellX, cellY, spanX, spanY, itemType, appWidgetId, iconType) VALUES (%d, '%s', '%s', %d, %d, %d, %d, %d, %d, %d, %d, %d)"
insert = insert % (id, "title", "", -100, s, x, y, 1, 1, 2, -1, 0)
c.execute(insert)
folder_id = id
for z in range(15):
id += 1
intent = intentFormat % (APPLICATION_COMPONENTS[id % len(APPLICATION_COMPONENTS)])
insert = "INSERT into favorites (_id, title, intent, container, screen, cellX, cellY, spanX, spanY, itemType, appWidgetId, iconType) VALUES (%d, '%s', '%s', %d, %d, %d, %d, %d, %d, %d, %d, %d)"
insert = insert % (id, "title", intent, folder_id, 0, 0, 0, 1, 1, 0, -1, 0)
c.execute(insert)
conn.commit()
c.close()
def main(argv):
if len(argv) == 1:
make_dir()
pull_file(AUTO_FILE)
process_file(AUTO_FILE)
push_file(AUTO_FILE)
else:
usage()
if __name__=="__main__":
main(sys.argv)
| apache-2.0 |
arunsingh/selenium | py/selenium/webdriver/common/desired_capabilities.py | 54 | 3499 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
The Desired Capabilities implementation.
"""
class DesiredCapabilities(object):
"""
Set of default supported desired capabilities.
Use this as a starting point for creating a desired capabilities object for
requesting remote webdrivers for connecting to selenium server or selenium grid.
Usage Example:
from selenium import webdriver
selenium_grid_url = "http://198.0.0.1:4444/wd/hub"
# Create a desired capabilities object as a starting point.
capabilities = DesiredCapabilities.FIREFOX.copy()
capabilities['platform'] = "WINDOWS"
capabilities['version'] = "10"
# Instantiate an instance of Remote WebDriver with the desired capabilities.
driver = webdriver.Remote(desired_capabilities=capabilities,
command_executor=selenium_grid_url)
Note: Always use '.copy()' on the DesiredCapabilities object to avoid the side
effects of altering the Global class instance.
"""
FIREFOX = {
"browserName": "firefox",
"version": "",
"platform": "ANY",
"javascriptEnabled": True,
"marionette": False,
}
INTERNETEXPLORER = {
"browserName": "internet explorer",
"version": "",
"platform": "WINDOWS",
"javascriptEnabled": True,
}
EDGE = {
"browserName": "MicrosoftEdge",
"version": "",
"platform": "WINDOWS"
}
CHROME = {
"browserName": "chrome",
"version": "",
"platform": "ANY",
"javascriptEnabled": True,
}
OPERA = {
"browserName": "opera",
"version": "",
"platform": "ANY",
"javascriptEnabled": True,
}
SAFARI = {
"browserName": "safari",
"version": "",
"platform": "ANY",
"javascriptEnabled": True,
}
HTMLUNIT = {
"browserName": "htmlunit",
"version": "",
"platform": "ANY",
}
HTMLUNITWITHJS = {
"browserName": "htmlunit",
"version": "firefox",
"platform": "ANY",
"javascriptEnabled": True,
}
IPHONE = {
"browserName": "iPhone",
"version": "",
"platform": "MAC",
"javascriptEnabled": True,
}
IPAD = {
"browserName": "iPad",
"version": "",
"platform": "MAC",
"javascriptEnabled": True,
}
ANDROID = {
"browserName": "android",
"version": "",
"platform": "ANDROID",
"javascriptEnabled": True,
}
PHANTOMJS = {
"browserName":"phantomjs",
"version": "",
"platform": "ANY",
"javascriptEnabled": True,
}
| apache-2.0 |
def-/commandergenius | project/jni/python/src/Lib/sqlite3/test/factory.py | 51 | 7928 | #-*- coding: ISO-8859-1 -*-
# pysqlite2/test/factory.py: tests for the various factories in pysqlite
#
# Copyright (C) 2005-2007 Gerhard Häring <gh@ghaering.de>
#
# This file is part of pysqlite.
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
import unittest
import sqlite3 as sqlite
class MyConnection(sqlite.Connection):
def __init__(self, *args, **kwargs):
sqlite.Connection.__init__(self, *args, **kwargs)
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
class MyCursor(sqlite.Cursor):
def __init__(self, *args, **kwargs):
sqlite.Cursor.__init__(self, *args, **kwargs)
self.row_factory = dict_factory
class ConnectionFactoryTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:", factory=MyConnection)
def tearDown(self):
self.con.close()
def CheckIsInstance(self):
self.failUnless(isinstance(self.con,
MyConnection),
"connection is not instance of MyConnection")
class CursorFactoryTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
def tearDown(self):
self.con.close()
def CheckIsInstance(self):
cur = self.con.cursor(factory=MyCursor)
self.failUnless(isinstance(cur,
MyCursor),
"cursor is not instance of MyCursor")
class RowFactoryTestsBackwardsCompat(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
def CheckIsProducedByFactory(self):
cur = self.con.cursor(factory=MyCursor)
cur.execute("select 4+5 as foo")
row = cur.fetchone()
self.failUnless(isinstance(row,
dict),
"row is not instance of dict")
cur.close()
def tearDown(self):
self.con.close()
class RowFactoryTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
def CheckCustomFactory(self):
self.con.row_factory = lambda cur, row: list(row)
row = self.con.execute("select 1, 2").fetchone()
self.failUnless(isinstance(row,
list),
"row is not instance of list")
def CheckSqliteRowIndex(self):
self.con.row_factory = sqlite.Row
row = self.con.execute("select 1 as a, 2 as b").fetchone()
self.failUnless(isinstance(row,
sqlite.Row),
"row is not instance of sqlite.Row")
col1, col2 = row["a"], row["b"]
self.failUnless(col1 == 1, "by name: wrong result for column 'a'")
self.failUnless(col2 == 2, "by name: wrong result for column 'a'")
col1, col2 = row["A"], row["B"]
self.failUnless(col1 == 1, "by name: wrong result for column 'A'")
self.failUnless(col2 == 2, "by name: wrong result for column 'B'")
col1, col2 = row[0], row[1]
self.failUnless(col1 == 1, "by index: wrong result for column 0")
self.failUnless(col2 == 2, "by index: wrong result for column 1")
def CheckSqliteRowIter(self):
"""Checks if the row object is iterable"""
self.con.row_factory = sqlite.Row
row = self.con.execute("select 1 as a, 2 as b").fetchone()
for col in row:
pass
def CheckSqliteRowAsTuple(self):
"""Checks if the row object can be converted to a tuple"""
self.con.row_factory = sqlite.Row
row = self.con.execute("select 1 as a, 2 as b").fetchone()
t = tuple(row)
def CheckSqliteRowAsDict(self):
"""Checks if the row object can be correctly converted to a dictionary"""
self.con.row_factory = sqlite.Row
row = self.con.execute("select 1 as a, 2 as b").fetchone()
d = dict(row)
self.failUnlessEqual(d["a"], row["a"])
self.failUnlessEqual(d["b"], row["b"])
def CheckSqliteRowHashCmp(self):
"""Checks if the row object compares and hashes correctly"""
self.con.row_factory = sqlite.Row
row_1 = self.con.execute("select 1 as a, 2 as b").fetchone()
row_2 = self.con.execute("select 1 as a, 2 as b").fetchone()
row_3 = self.con.execute("select 1 as a, 3 as b").fetchone()
self.failUnless(row_1 == row_1)
self.failUnless(row_1 == row_2)
self.failUnless(row_2 != row_3)
self.failIf(row_1 != row_1)
self.failIf(row_1 != row_2)
self.failIf(row_2 == row_3)
self.failUnlessEqual(row_1, row_2)
self.failUnlessEqual(hash(row_1), hash(row_2))
self.failIfEqual(row_1, row_3)
self.failIfEqual(hash(row_1), hash(row_3))
def tearDown(self):
self.con.close()
class TextFactoryTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
def CheckUnicode(self):
austria = unicode("Österreich", "latin1")
row = self.con.execute("select ?", (austria,)).fetchone()
self.failUnless(type(row[0]) == unicode, "type of row[0] must be unicode")
def CheckString(self):
self.con.text_factory = str
austria = unicode("Österreich", "latin1")
row = self.con.execute("select ?", (austria,)).fetchone()
self.failUnless(type(row[0]) == str, "type of row[0] must be str")
self.failUnless(row[0] == austria.encode("utf-8"), "column must equal original data in UTF-8")
def CheckCustom(self):
self.con.text_factory = lambda x: unicode(x, "utf-8", "ignore")
austria = unicode("Österreich", "latin1")
row = self.con.execute("select ?", (austria.encode("latin1"),)).fetchone()
self.failUnless(type(row[0]) == unicode, "type of row[0] must be unicode")
self.failUnless(row[0].endswith(u"reich"), "column must contain original data")
def CheckOptimizedUnicode(self):
self.con.text_factory = sqlite.OptimizedUnicode
austria = unicode("Österreich", "latin1")
germany = unicode("Deutchland")
a_row = self.con.execute("select ?", (austria,)).fetchone()
d_row = self.con.execute("select ?", (germany,)).fetchone()
self.failUnless(type(a_row[0]) == unicode, "type of non-ASCII row must be unicode")
self.failUnless(type(d_row[0]) == str, "type of ASCII-only row must be str")
def tearDown(self):
self.con.close()
def suite():
connection_suite = unittest.makeSuite(ConnectionFactoryTests, "Check")
cursor_suite = unittest.makeSuite(CursorFactoryTests, "Check")
row_suite_compat = unittest.makeSuite(RowFactoryTestsBackwardsCompat, "Check")
row_suite = unittest.makeSuite(RowFactoryTests, "Check")
text_suite = unittest.makeSuite(TextFactoryTests, "Check")
return unittest.TestSuite((connection_suite, cursor_suite, row_suite_compat, row_suite, text_suite))
def test():
runner = unittest.TextTestRunner()
runner.run(suite())
if __name__ == "__main__":
test()
| lgpl-2.1 |
kanagasabapathi/python-for-android | python3-alpha/python3-src/Lib/lib2to3/fixes/fix_apply.py | 161 | 1901 | # Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for apply().
This converts apply(func, v, k) into (func)(*v, **k)."""
# Local imports
from .. import pytree
from ..pgen2 import token
from .. import fixer_base
from ..fixer_util import Call, Comma, parenthesize
class FixApply(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """
power< 'apply'
trailer<
'('
arglist<
(not argument<NAME '=' any>) func=any ','
(not argument<NAME '=' any>) args=any [','
(not argument<NAME '=' any>) kwds=any] [',']
>
')'
>
>
"""
def transform(self, node, results):
syms = self.syms
assert results
func = results["func"]
args = results["args"]
kwds = results.get("kwds")
prefix = node.prefix
func = func.clone()
if (func.type not in (token.NAME, syms.atom) and
(func.type != syms.power or
func.children[-2].type == token.DOUBLESTAR)):
# Need to parenthesize
func = parenthesize(func)
func.prefix = ""
args = args.clone()
args.prefix = ""
if kwds is not None:
kwds = kwds.clone()
kwds.prefix = ""
l_newargs = [pytree.Leaf(token.STAR, "*"), args]
if kwds is not None:
l_newargs.extend([Comma(),
pytree.Leaf(token.DOUBLESTAR, "**"),
kwds])
l_newargs[-2].prefix = " " # that's the ** token
# XXX Sometimes we could be cleverer, e.g. apply(f, (x, y) + t)
# can be translated into f(x, y, *t) instead of f(*(x, y) + t)
#new = pytree.Node(syms.power, (func, ArgList(l_newargs)))
return Call(func, l_newargs, prefix=prefix)
| apache-2.0 |
ashishnitinpatil/dotawp_crawler | DotaWp/spiders/dotawp.py | 1 | 1275 | # This is the scrapy spider that is run.
# The parse_item method of the DotawpSpider extracts the image data
# from the site's pages which are then downloaded & stored by ImagePipeline
from scrapy.selector import Selector
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.contrib.spiders import CrawlSpider, Rule
from DotaWp.items import DotawpItem
from scrapy.log import ScrapyFileLogObserver, logging
from urlparse import urlparse
class DotawpSpider(CrawlSpider):
name = 'dotawp'
allowed_domains = ['www.dotawp.com']
start_urls = ['http://www.dotawp.com/']
rules = (
Rule(
SgmlLinkExtractor(
allow = ('dotawp.com/[\w-]+'),
unique = True
),
callback = 'parse_item',
follow = True
),
)
def parse_item(self, response):
sel = Selector(response)
item = DotawpItem()
if not 'tag' in response.url and \
not 'page' in response.url and \
not 'category' in response.url:
title = sel.xpath('//h1/text()').extract()[0]
item['title'] = title.strip()
item['image_urls'] = [sel.xpath('//p/a/img/@src').extract()[0]]
return item
| bsd-3-clause |
resmo/ansible | lib/ansible/module_utils/network/ios/argspec/lacp/lacp.py | 21 | 1284 | #
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#############################################
# WARNING #
#############################################
#
# This file is auto generated by the resource
# module builder playbook.
#
# Do not edit this file manually.
#
# Changes to this file will be over written
# by the resource module builder.
#
# Changes should be made in the model used to
# generate this file or in the resource module
# builder template.
#
#############################################
"""
The arg spec for the ios_lacp module
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
class LacpArgs(object):
"""The arg spec for the ios_lacp module
"""
def __init__(self, **kwargs):
pass
argument_spec = {
'config': {'options': {'system': {'options': {'priority': {'required': True, 'type': 'int'}},
'type': 'dict'}
}, 'type': 'dict'
},
'state': {'choices': ['merged', 'replaced', 'deleted'], 'default': 'merged',
'type': 'str'}
}
| gpl-3.0 |
jettisonjoe/openhtf | openhtf/output/callbacks/mfg_inspector.py | 1 | 8230 | """Output and/or upload a TestRun or MfgEvent proto for mfg-inspector.com.
"""
import json
import logging
import threading
import time
import zlib
import httplib2
import oauth2client.client
from openhtf.output import callbacks
from openhtf.output.proto import test_runs_converter
from openhtf.output.proto import guzzle_pb2
class UploadFailedError(Exception):
"""Raised when an upload to mfg-inspector fails."""
class InvalidTestRunError(Exception):
"""Raised if test run is invalid."""
def _send_mfg_inspector_request(envelope_data, credentials, destination_url):
"""Send upload http request. Intended to be run in retry loop."""
logging.info('Uploading result...')
http = httplib2.Http()
if credentials.access_token_expired:
credentials.refresh(http)
credentials.authorize(http)
resp, content = http.request(destination_url, 'POST', envelope_data)
try:
result = json.loads(content)
except Exception:
logging.debug('Upload failed with response %s: %s', resp, content)
raise UploadFailedError(resp, content)
if resp.status != 200:
logging.debug('Upload failed: %s', result)
raise UploadFailedError(result['error'], result)
return result
def send_mfg_inspector_data(inspector_proto, credentials, destination_url):
"""Upload MfgEvent to steam_engine."""
envelope = guzzle_pb2.TestRunEnvelope()
envelope.payload = zlib.compress(inspector_proto.SerializeToString())
envelope.payload_type = guzzle_pb2.COMPRESSED_MFG_EVENT
envelope_data = envelope.SerializeToString()
for _ in xrange(5):
try:
result = _send_mfg_inspector_request(
envelope_data, credentials, destination_url)
return result
except UploadFailedError:
time.sleep(1)
logging.critical(
'Could not upload to mfg-inspector after 5 attempts. Giving up.')
return {}
class _MemStorage(oauth2client.client.Storage):
# pylint: disable=invalid-name
"""Helper Storage class that keeps credentials in memory."""
def __init__(self):
self._lock = threading.Lock()
self._credentials = None
def acquire_lock(self):
self._lock.acquire(True)
def release_lock(self):
self._lock.release()
def locked_get(self):
return self._credentials
def locked_put(self, credentials):
self._credentials = credentials
class MfgInspector(object):
"""Interface to convert a TestRun to a mfg-inspector compatible proto.
Instances of this class are typically used to create callbacks that are
compatible with the OpenHTF output callbacks.
Typical usage:
interface = mfg_inspector.MfgInspector.from_json().set_converter(
my_custom_converter)
my_tester.add_output_callbacks(interface.save_to_disk(), interface.upload())
**Important** the conversion of the TestRecord to protofbuf as specified in
the _converter callable attribute only occurs once and the resulting protobuf
is cached in memory on the instance.
The upload callback will upload to mfg-inspector.com using the given
username and authentication key (which should be the key data itself, not a
filename or file).
In typical productin setups, we *first* save the protobuf to disk then attempt
to upload the protobuf to mfg-inspector. In the event of a network outage,
the result of the test run is available on disk and a separate process can
retry the upload when network is available.
"""
TOKEN_URI = 'https://accounts.google.com/o/oauth2/token'
SCOPE_CODE_URI = 'https://www.googleapis.com/auth/glass.infra.quantum_upload'
DESTINATION_URL = ('https://clients2.google.com/factoryfactory/'
'uploads/quantum_upload/?json')
# These attributes control format of callback and what actions are undertaken
# when called. These should either be set by a subclass or via configure.
# _converter is a callable that can be set either via set_converter method
# or by defining a _converter @staticmethod on subclasses.
_converter = None
# A default filename pattern can be specified on subclasses for use when
# saving to disk via save_to_disk.
_default_filename_pattern = None
def __init__(self, user=None, keydata=None,
token_uri=TOKEN_URI, destination_url=DESTINATION_URL):
self.user = user
self.keydata = keydata
self.token_uri = token_uri
self.destination_url = destination_url
if user and keydata:
self.credentials = oauth2client.client.SignedJwtAssertionCredentials(
service_account_name=self.user,
private_key=self.keydata,
scope=self.SCOPE_CODE_URI,
user_agent='OpenHTF Guzzle Upload Client',
token_uri=self.token_uri)
self.credentials.set_store(_MemStorage())
else:
self.credentials = None
self.upload_result = None
self._cached_proto = None
@classmethod
def from_json(cls, json_data):
"""Create an uploader given (parsed) JSON data.
Note that this is a JSON-formatted key file downloaded from Google when
the service account key is created, *NOT* a json-encoded
oauth2client.client.SignedJwtAssertionCredentials object.
Args:
json_data: Dict containing the loaded JSON key data.
Returns:
a MfgInspectorCallback with credentials.
"""
return cls(user=json_data['client_email'],
keydata=json_data['private_key'],
token_uri=json_data['token_uri'])
def _convert(self, test_record_obj):
"""Convert and cache a test record to a mfg-inspector proto."""
if self._cached_proto is None:
self._cached_proto = self._converter(test_record_obj)
return self._cached_proto
def save_to_disk(self, filename_pattern=None):
"""Returns a callback to convert test record to proto and save to disk."""
if not self._converter:
raise RuntimeError(
'Must set _converter on subclass or via set_converter before calling '
'save_to_disk.')
pattern = filename_pattern or self._default_filename_pattern
if not pattern:
raise RuntimeError(
'Must specify provide a filename_pattern or set a '
'_default_filename_pattern on subclass.')
def save_to_disk_callback(test_record_obj):
proto = self._convert(test_record_obj)
output_to_file = callbacks.OutputToFile(pattern)
with output_to_file.open_output_file(test_record_obj) as outfile:
outfile.write(proto.SerializeToString())
return save_to_disk_callback
def upload(self):
"""Returns a callback to convert a test record to a proto and upload."""
if not self._converter:
raise RuntimeError(
'Must set _converter on subclass or via set_converter before calling '
'upload.')
if not self.credentials:
raise RuntimeError('Must provide credentials to use upload callback.')
def upload_callback(test_record_obj):
proto = self._convert(test_record_obj)
self.upload_result = send_mfg_inspector_data(
proto, self.credentials, self.destination_url)
return upload_callback
def set_converter(self, converter):
"""Set converter callable to convert a OpenHTF tester_record to a proto.
Args:
converter: a callable that accepts an OpenHTF TestRecord and returns a
manufacturing-inspector compatible protobuf.
Returns:
self to make this call chainable.
"""
assert callable(converter), 'Converter must be callable.'
self._converter = converter
return self
# LEGACY / DEPRECATED
class UploadToMfgInspector(MfgInspector):
"""Generate a mfg-inspector TestRun proto and upload it.
LEGACY / DEPRECATED
This class is provided only for legacy reasons and may be deleted in future.
Please replace usage by configuring a MfgInspectorCallback directly. For
example:
test.add_output_callbacks(
mfg_inspector.MfgInspectorCallback.from_json(**json_data).set_converter(
test_runs_converter.test_run_from_test_record).upload()
)
"""
@staticmethod
def _converter(test_record_obj):
return test_runs_converter.test_run_from_test_record(test_record_obj)
def __call__(self, test_record_obj): # pylint: disable=invalid-name
upload_callback = self.upload()
upload_callback(test_record_obj)
| apache-2.0 |
defionscode/ansible | test/units/plugins/lookup/test_aws_ssm.py | 40 | 6486 | #
# (c) 2017 Michael De La Rue
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
import pytest
from copy import copy
from ansible.errors import AnsibleError
import ansible.plugins.lookup.aws_ssm as aws_ssm
try:
import boto3
from botocore.exceptions import ClientError
except ImportError:
pytestmark = pytest.mark.skip("This test requires the boto3 and botocore Python libraries")
simple_variable_success_response = {
'Parameters': [
{
'Name': 'simple_variable',
'Type': 'String',
'Value': 'simplevalue',
'Version': 1
}
],
'InvalidParameters': [],
'ResponseMetadata': {
'RequestId': '12121212-3434-5656-7878-9a9a9a9a9a9a',
'HTTPStatusCode': 200,
'HTTPHeaders': {
'x-amzn-requestid': '12121212-3434-5656-7878-9a9a9a9a9a9a',
'content-type': 'application/x-amz-json-1.1',
'content-length': '116',
'date': 'Tue, 23 Jan 2018 11:04:27 GMT'
},
'RetryAttempts': 0
}
}
path_success_response = copy(simple_variable_success_response)
path_success_response['Parameters'] = [
{'Name': '/testpath/too', 'Type': 'String', 'Value': 'simple_value_too', 'Version': 1},
{'Name': '/testpath/won', 'Type': 'String', 'Value': 'simple_value_won', 'Version': 1}
]
missing_variable_response = copy(simple_variable_success_response)
missing_variable_response['Parameters'] = []
missing_variable_response['InvalidParameters'] = ['missing_variable']
some_missing_variable_response = copy(simple_variable_success_response)
some_missing_variable_response['Parameters'] = [
{'Name': 'simple', 'Type': 'String', 'Value': 'simple_value', 'Version': 1},
{'Name': '/testpath/won', 'Type': 'String', 'Value': 'simple_value_won', 'Version': 1}
]
some_missing_variable_response['InvalidParameters'] = ['missing_variable']
dummy_credentials = {}
dummy_credentials['boto_profile'] = None
dummy_credentials['aws_secret_key'] = "notasecret"
dummy_credentials['aws_access_key'] = "notakey"
dummy_credentials['aws_security_token'] = None
dummy_credentials['region'] = 'eu-west-1'
def test_lookup_variable(mocker):
lookup = aws_ssm.LookupModule()
lookup._load_name = "aws_ssm"
boto3_double = mocker.MagicMock()
boto3_double.Session.return_value.client.return_value.get_parameters.return_value = simple_variable_success_response
boto3_client_double = boto3_double.Session.return_value.client
with mocker.patch.object(boto3, 'session', boto3_double):
retval = lookup.run(["simple_variable"], {}, **dummy_credentials)
assert(retval[0] == "simplevalue")
boto3_client_double.assert_called_with('ssm', 'eu-west-1', aws_access_key_id='notakey',
aws_secret_access_key="notasecret", aws_session_token=None)
def test_path_lookup_variable(mocker):
lookup = aws_ssm.LookupModule()
lookup._load_name = "aws_ssm"
boto3_double = mocker.MagicMock()
get_path_fn = boto3_double.Session.return_value.client.return_value.get_parameters_by_path
get_path_fn.return_value = path_success_response
boto3_client_double = boto3_double.Session.return_value.client
with mocker.patch.object(boto3, 'session', boto3_double):
args = copy(dummy_credentials)
args["bypath"] = 'true'
retval = lookup.run(["/testpath"], {}, **args)
assert(retval[0]["/testpath/won"] == "simple_value_won")
assert(retval[0]["/testpath/too"] == "simple_value_too")
boto3_client_double.assert_called_with('ssm', 'eu-west-1', aws_access_key_id='notakey',
aws_secret_access_key="notasecret", aws_session_token=None)
get_path_fn.assert_called_with(Path="/testpath", Recursive=False, WithDecryption=True)
def test_return_none_for_missing_variable(mocker):
"""
during jinja2 templates, we can't shouldn't normally raise exceptions since this blocks the ability to use defaults.
for this reason we return ```None``` for missing variables
"""
lookup = aws_ssm.LookupModule()
lookup._load_name = "aws_ssm"
boto3_double = mocker.MagicMock()
boto3_double.Session.return_value.client.return_value.get_parameters.return_value = missing_variable_response
with mocker.patch.object(boto3, 'session', boto3_double):
retval = lookup.run(["missing_variable"], {}, **dummy_credentials)
assert(retval[0] is None)
def test_match_retvals_to_call_params_even_with_some_missing_variables(mocker):
"""
If we get a complex list of variables with some missing and some not, we still have to return a
list which matches with the original variable list.
"""
lookup = aws_ssm.LookupModule()
lookup._load_name = "aws_ssm"
boto3_double = mocker.MagicMock()
boto3_double.Session.return_value.client.return_value.get_parameters.return_value = some_missing_variable_response
with mocker.patch.object(boto3, 'session', boto3_double):
retval = lookup.run(["simple", "missing_variable", "/testpath/won", "simple"], {}, **dummy_credentials)
assert(retval == ["simple_value", None, "simple_value_won", "simple_value"])
error_response = {'Error': {'Code': 'ResourceNotFoundException', 'Message': 'Fake Testing Error'}}
operation_name = 'FakeOperation'
def test_warn_denied_variable(mocker):
lookup = aws_ssm.LookupModule()
lookup._load_name = "aws_ssm"
boto3_double = mocker.MagicMock()
boto3_double.Session.return_value.client.return_value.get_parameters.side_effect = ClientError(error_response, operation_name)
with pytest.raises(AnsibleError):
with mocker.patch.object(boto3, 'session', boto3_double):
lookup.run(["denied_variable"], {}, **dummy_credentials)
| gpl-3.0 |
minhphung171093/OpenERP_V7 | openerp/addons/point_of_sale/wizard/pos_box_entries.py | 54 | 6577 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv, fields
from openerp.tools.translate import _
def get_journal(self, cr, uid, context=None):
"""
Make the selection list of Cash Journal .
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return :Return the list of journal
"""
journal_obj = self.pool.get('account.journal')
statement_obj = self.pool.get('account.bank.statement')
j_ids = journal_obj.search(cr, uid, [('journal_user','=',1)], context=context)
obj_ids = statement_obj.search(cr, uid, [('state', '=', 'open'), ('user_id', '=', uid), ('journal_id', 'in', j_ids)], context=context)
res = statement_obj.read(cr, uid, obj_ids, ['journal_id'], context=context)
res = [(r['journal_id']) for r in res]
if not len(res) and context:
raise osv.except_osv(_('Error!'), _('You do not have any open cash register. You must create a payment method or open a cash register.'))
return res
class pos_box_entries(osv.osv_memory):
_name = 'pos.box.entries'
_description = 'Pos Box Entries'
def _get_income_product(self, cr, uid, context=None):
"""
Make the selection list of purchasing products.
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return :Return of operation of product
"""
product_obj = self.pool.get('product.product')
ids = product_obj.search(cr, uid, [('income_pdt', '=', True)], context=context)
res = product_obj.read(cr, uid, ids, ['id', 'name'], context=context)
res = [(r['id'], r['name']) for r in res]
res.insert(0, ('', ''))
return res
_columns = {
'name': fields.char('Reason', size=32, required=True),
'journal_id': fields.many2one('account.journal', 'Cash Register', required=True, domain="[('journal_id.type', '=', 'cash')]"),
'product_id': fields.selection(_get_income_product, "Operation", required=True, size=-1),
'amount': fields.float('Amount', digits=(16, 2), required=True),
'ref': fields.char('Ref', size=32),
'session_id' : fields.many2one('pos.session', 'Session'),
'user_id' : fields.many2one('res.users', 'User'),
}
def _default_session_id(self, cr, uid, context=None):
return context and context.get('active_id', False) or False
def _default_cash_register(self, cr, uid, context=None):
#import pdb
#pdb.set_trace()
if not context:
context = {}
result = context.get('active_id', False) or False
return result
_defaults = {
#'session_id' : _default_session_id,
#'journal_id': _default_cash_register,
#'product_id': 1,
'user_id' : lambda obj, cr, uid, context: uid,
}
def get_in(self, cr, uid, ids, context=None):
"""
Create the entry of statement in journal.
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param context: A standard dictionary
@return :Return of operation of product
"""
statement_obj = self.pool.get('account.bank.statement')
res_obj = self.pool.get('res.users')
product_obj = self.pool.get('product.product')
bank_statement = self.pool.get('account.bank.statement.line')
for data in self.read(cr, uid, ids, context=context):
vals = {}
curr_company = res_obj.browse(cr, uid, uid, context=context).company_id.id
statement_id = statement_obj.search(cr, uid, [('journal_id', '=', int(data['journal_id'])), ('company_id', '=', curr_company), ('user_id', '=', uid), ('state', '=', 'open')], context=context)
if not statement_id:
raise osv.except_osv(_('Error!'), _('You have to open at least one cashbox.'))
product = product_obj.browse(cr, uid, int(data['product_id']))
acc_id = product.property_account_income or product.categ_id.property_account_income_categ
if not acc_id:
raise osv.except_osv(_('Error!'), _('Please check that income account is set to %s.')%(product_obj.browse(cr, uid, data['product_id']).name))
if statement_id:
statement_id = statement_id[0]
if not statement_id:
statement_id = statement_obj.create(cr, uid, {
'date': time.strftime('%Y-%m-%d 00:00:00'),
'journal_id': data['journal_id'],
'company_id': curr_company,
'user_id': uid,
}, context=context)
vals['statement_id'] = statement_id
vals['journal_id'] = data['journal_id']
if acc_id:
vals['account_id'] = acc_id.id
vals['amount'] = data['amount'] or 0.0
vals['ref'] = "%s" % (data['ref'] or '')
vals['name'] = "%s: %s " % (product_obj.browse(cr, uid, data['product_id'], context=context).name, data['name'].decode('utf8'))
bank_statement.create(cr, uid, vals, context=context)
return {}
pos_box_entries()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
laborautonomo/pip | pip/_vendor/cachecontrol/serialize.py | 24 | 3151 | import io
from pip._vendor.requests.structures import CaseInsensitiveDict
from .compat import HTTPResponse, pickle
class Serializer(object):
def dumps(self, request, response, body=None):
response_headers = CaseInsensitiveDict(response.headers)
if body is None:
body = response.read(decode_content=False)
response._fp = io.BytesIO(body)
data = {
"response": {
"body": body,
"headers": response.headers,
"status": response.status,
"version": response.version,
"reason": response.reason,
"strict": response.strict,
"decode_content": response.decode_content,
},
}
# Construct our vary headers
data["vary"] = {}
if "vary" in response_headers:
varied_headers = response_headers['vary'].split(',')
for header in varied_headers:
header = header.strip()
data["vary"][header] = request.headers.get(header, None)
return b"cc=1," + pickle.dumps(data, pickle.HIGHEST_PROTOCOL)
def loads(self, request, data):
# Short circuit if we've been given an empty set of data
if not data:
return
# Determine what version of the serializer the data was serialized
# with
try:
ver, data = data.split(b",", 1)
except ValueError:
ver = b"cc=0"
# Make sure that our "ver" is actually a version and isn't a false
# positive from a , being in the data stream.
if ver[:3] != b"cc=":
data = ver + data
ver = b"cc=0"
# Get the version number out of the cc=N
ver = ver.split(b"=", 1)[-1].decode("ascii")
# Dispatch to the actual load method for the given version
try:
return getattr(self, "_loads_v{0}".format(ver))(request, data)
except AttributeError:
# This is a version we don't have a loads function for, so we'll
# just treat it as a miss and return None
return
def _loads_v0(self, request, data):
# The original legacy cache data. This doesn't contain enough
# information to construct everything we need, so we'll treat this as
# a miss.
return
def _loads_v1(self, request, data):
try:
cached = pickle.loads(data)
except ValueError:
return
# Special case the '*' Vary value as it means we cannot actually
# determine if the cached response is suitable for this request.
if "*" in cached.get("vary", {}):
return
# Ensure that the Vary headers for the cached response match our
# request
for header, value in cached.get("vary", {}).items():
if request.headers.get(header, None) != value:
return
body = io.BytesIO(cached["response"].pop("body"))
return HTTPResponse(
body=body,
preload_content=False,
**cached["response"]
)
| mit |
fausecteam/ctf-gameserver | examples/checker/example_checker.py | 1 | 2527 | #!/usr/bin/env python3
import logging
import socket
from ctf_gameserver import checkerlib
class ExampleChecker(checkerlib.BaseChecker):
def place_flag(self, tick):
conn = connect(self.ip)
flag = checkerlib.get_flag(tick)
conn.sendall('SET {} {}\n'.format(tick, flag).encode('utf-8'))
logging.info('Sent SET command: Flag %s', flag)
try:
resp = recv_line(conn)
logging.info('Received response to SET command: %s', repr(resp))
except UnicodeDecodeError:
logging.warning('Received non-UTF-8 data: %s', repr(resp))
return checkerlib.CheckResult.FAULTY
if resp != 'OK':
logging.warning('Received wrong response to SET command')
return checkerlib.CheckResult.FAULTY
conn.close()
return checkerlib.CheckResult.OK
def check_service(self):
conn = connect(self.ip)
conn.sendall(b'XXX\n')
logging.info('Sent dummy command')
try:
recv_line(conn)
logging.info('Received response to dummy command')
except UnicodeDecodeError:
logging.warning('Received non-UTF-8 data')
return checkerlib.CheckResult.FAULTY
conn.close()
return checkerlib.CheckResult.OK
def check_flag(self, tick):
flag = checkerlib.get_flag(tick)
conn = connect(self.ip)
conn.sendall('GET {}\n'.format(tick).encode('utf-8'))
logging.info('Sent GET command')
try:
resp = recv_line(conn)
logging.info('Received response to GET command: %s', repr(resp))
except UnicodeDecodeError:
logging.warning('Received non-UTF-8 data: %s', repr(resp))
return checkerlib.CheckResult.FAULTY
if resp != flag:
logging.warning('Received wrong response to GET command')
return checkerlib.CheckResult.FLAG_NOT_FOUND
conn.close()
return checkerlib.CheckResult.OK
def connect(ip):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ip, 9999))
return sock
def recv_line(conn):
received = b''
while not received.endswith(b'\n'):
new = conn.recv(1024)
if len(new) == 0:
if not received.endswith(b'\n'):
raise EOFError('Unexpected EOF')
break
received += new
return received.decode('utf-8').rstrip()
if __name__ == '__main__':
checkerlib.run_check(ExampleChecker)
| isc |
barnone/EigenD | pigui/mathutils.py | 3 | 2709 |
#
# Copyright 2009 Eigenlabs Ltd. http://www.eigenlabs.com
#
# This file is part of EigenD.
#
# EigenD is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# EigenD is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EigenD. If not, see <http://www.gnu.org/licenses/>.
#
import math,types
class vector:
def __init__(self,p1,p2):
self.p1=p1
self.p2=p2
self.delta=p2-p1
def getP1(self):
return self.p1
def getP2(self):
return self.p2
def isHorizontal(self):
return self.delta.getY()==0.0
def isVertical(self):
return self.delta.getX()==0.0
def getLength(self):
return self.delta.getLength()
def getUnitVector(self):
len=self.getLength()
return point(self.delta.getX()/len,self.delta.getY()/len)
def getNormalUnitVector(self):
len=self.getLength()
return point(-1*self.delta.getY()/len,self.delta.getX()/len)
def getX(self):
return self.delta.getX()
def getY(self):
return self.delta.getY()
def getMidPoint(self):
return self.getPoint(0.5)
def getPoint(self,proportion):
return self.p1+self.delta*proportion
def isZero(self):
return self.p1==self.p2
def asTuple(self):
return self.p1.asTuple()+self.p2.asTuple()
class point:
def __init__(self,x,y):
self.x=x
self.y=y
def __add__(self,other):
return point(self.x+other.x,self.y+other.y)
def __mul__(self,other):
if isinstance(other,types.IntType) or isinstance(other,types.FloatType):
return point(self.x*other,self.y*other)
def __sub__(self,other):
return point(self.x-other.x,self.y-other.y)
def getX(self):
return self.x
def __eq__(self,other):
return self.x==other.x and self.y ==other.y
def getY(self):
return self.y
def getPosition(self):
return (self.x,self.y)
def getLength(self):
return math.pow((self.x*self.x)+(self.y*self.y),0.5)
def asTuple(self):
return (self.getX(),self.getY())
class point_t(point):
def __init__(self,pos):
point.__init__(self,pos[0],pos[1])
| gpl-3.0 |
pdellaert/ansible | lib/ansible/modules/network/aci/aci_l3out.py | 13 | 11042 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: aci_l3out
short_description: Manage Layer 3 Outside (L3Out) objects (l3ext:Out)
description:
- Manage Layer 3 Outside (L3Out) on Cisco ACI fabrics.
version_added: '2.6'
options:
tenant:
description:
- Name of an existing tenant.
type: str
required: yes
aliases: [ tenant_name ]
l3out:
description:
- Name of L3Out being created.
type: str
required: yes
aliases: [ l3out_name, name ]
vrf:
description:
- Name of the VRF being associated with the L3Out.
type: str
required: yes
aliases: [ vrf_name ]
domain:
description:
- Name of the external L3 domain being associated with the L3Out.
type: str
required: yes
aliases: [ ext_routed_domain_name, routed_domain ]
dscp:
description:
- The target Differentiated Service (DSCP) value.
- The APIC defaults to C(unspecified) when unset during creation.
type: str
choices: [ AF11, AF12, AF13, AF21, AF22, AF23, AF31, AF32, AF33, AF41, AF42, AF43, CS0, CS1, CS2, CS3, CS4, CS5, CS6, CS7, EF, VA, unspecified ]
aliases: [ target ]
route_control:
description:
- Route Control enforcement direction. The only allowed values are export or import,export.
type: list
choices: [ export, import ]
aliases: [ route_control_enforcement ]
l3protocol:
description:
- Routing protocol for the L3Out
type: list
choices: [ bgp, eigrp, ospf, pim, static ]
asn:
description:
- The AS number for the L3Out.
- Only applicable when using 'eigrp' as the l3protocol
type: int
aliases: [ as_number ]
version_added: '2.8'
description:
description:
- Description for the L3Out.
type: str
aliases: [ descr ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
type: str
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
notes:
- The C(tenant) and C(domain) and C(vrf) used must exist before using this module in your playbook.
The M(aci_tenant) and M(aci_domain) and M(aci_vrf) modules can be used for this.
seealso:
- module: aci_tenant
- module: aci_domain
- module: aci_vrf
- name: APIC Management Information Model reference
description: More information about the internal APIC class B(l3ext:Out).
link: https://developer.cisco.com/docs/apic-mim-ref/
author:
- Rostyslav Davydenko (@rost-d)
'''
EXAMPLES = r'''
- name: Add a new L3Out
aci_l3out:
host: apic
username: admin
password: SomeSecretPassword
tenant: production
name: prod_l3out
description: L3Out for Production tenant
domain: l3dom_prod
vrf: prod
l3protocol: ospf
state: present
delegate_to: localhost
- name: Delete L3Out
aci_l3out:
host: apic
username: admin
password: SomeSecretPassword
tenant: production
name: prod_l3out
state: absent
delegate_to: localhost
- name: Query L3Out information
aci_l3out:
host: apic
username: admin
password: SomeSecretPassword
tenant: production
name: prod_l3out
state: query
delegate_to: localhost
register: query_result
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: str
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: str
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: str
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: str
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: str
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
tenant=dict(type='str', aliases=['tenant_name']), # Not required for querying all objects
l3out=dict(type='str', aliases=['l3out_name', 'name']), # Not required for querying all objects
domain=dict(type='str', aliases=['ext_routed_domain_name', 'routed_domain']),
vrf=dict(type='str', aliases=['vrf_name']),
description=dict(type='str', aliases=['descr']),
route_control=dict(type='list', choices=['export', 'import'], aliases=['route_control_enforcement']),
dscp=dict(type='str',
choices=['AF11', 'AF12', 'AF13', 'AF21', 'AF22', 'AF23', 'AF31', 'AF32', 'AF33', 'AF41', 'AF42',
'AF43', 'CS0', 'CS1', 'CS2', 'CS3', 'CS4', 'CS5', 'CS6', 'CS7', 'EF', 'VA', 'unspecified'],
aliases=['target']),
l3protocol=dict(type='list', choices=['bgp', 'eigrp', 'ospf', 'pim', 'static']),
asn=dict(type='int', aliases=['as_number']),
state=dict(type='str', default='present', choices=['absent', 'present', 'query'])
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['l3out', 'tenant']],
['state', 'present', ['l3out', 'tenant', 'domain', 'vrf']],
],
)
aci = ACIModule(module)
l3out = module.params['l3out']
domain = module.params['domain']
dscp = module.params['dscp']
description = module.params['description']
enforceRtctrl = module.params['route_control']
vrf = module.params['vrf']
l3protocol = module.params['l3protocol']
asn = module.params['asn']
state = module.params['state']
tenant = module.params['tenant']
if l3protocol:
if 'eigrp' in l3protocol and asn is None:
module.fail_json(msg="Parameter 'asn' is required when l3protocol is 'eigrp'")
if 'eigrp' not in l3protocol and asn is not None:
module.warn("Parameter 'asn' is only applicable when l3protocol is 'eigrp'. The ASN will be ignored")
enforce_ctrl = ''
if enforceRtctrl is not None:
if len(enforceRtctrl) == 1 and enforceRtctrl[0] == 'import':
aci.fail_json(
"The route_control parameter is invalid: allowed options are export or import,export only")
elif len(enforceRtctrl) == 1 and enforceRtctrl[0] == 'export':
enforce_ctrl = 'export'
else:
enforce_ctrl = 'export,import'
child_classes = ['l3extRsL3DomAtt', 'l3extRsEctx', 'bgpExtP', 'ospfExtP', 'eigrpExtP', 'pimExtP']
aci.construct_url(
root_class=dict(
aci_class='fvTenant',
aci_rn='tn-{0}'.format(tenant),
module_object=tenant,
target_filter={'name': tenant},
),
subclass_1=dict(
aci_class='l3extOut',
aci_rn='out-{0}'.format(l3out),
module_object=l3out,
target_filter={'name': l3out},
),
child_classes=child_classes,
)
aci.get_existing()
child_configs = [
dict(l3extRsL3DomAtt=dict(attributes=dict(
tDn='uni/l3dom-{0}'.format(domain)))),
dict(l3extRsEctx=dict(attributes=dict(tnFvCtxName=vrf))),
]
if l3protocol is not None:
for protocol in l3protocol:
if protocol == 'bgp':
child_configs.append(
dict(bgpExtP=dict(attributes=dict(descr='', nameAlias=''))))
elif protocol == 'eigrp':
child_configs.append(
dict(eigrpExtP=dict(attributes=dict(descr='', nameAlias='', asn=asn))))
elif protocol == 'ospf':
child_configs.append(
dict(ospfExtP=dict(attributes=dict(descr='', nameAlias=''))))
elif protocol == 'pim':
child_configs.append(
dict(pimExtP=dict(attributes=dict(descr='', nameAlias=''))))
if state == 'present':
aci.payload(
aci_class='l3extOut',
class_config=dict(
name=l3out,
descr=description,
dn='uni/tn-{0}/out-{1}'.format(tenant, l3out),
enforceRtctrl=enforce_ctrl,
targetDscp=dscp
),
child_configs=child_configs,
)
aci.get_diff(aci_class='l3extOut')
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| gpl-3.0 |
michaelbramwell/sms-tools | lectures/03-Fourier-properties/plots-code/fft-zero-phase.py | 24 | 1140 | import matplotlib.pyplot as plt
import numpy as np
from scipy.fftpack import fft, fftshift
import sys
sys.path.append('../../../software/models/')
import utilFunctions as UF
(fs, x) = UF.wavread('../../../sounds/oboe-A4.wav')
N = 512
M = 401
hN = N/2
hM = (M+1)/2
start = .8*fs
xw = x[start-hM:start+hM-1] * np.hamming(M)
plt.figure(1, figsize=(9.5, 6.5))
plt.subplot(411)
plt.plot(np.arange(-hM, hM-1), xw, lw=1.5)
plt.axis([-hN, hN-1, min(xw), max(xw)])
plt.title('x (oboe-A4.wav), M = 401')
fftbuffer = np.zeros(N)
fftbuffer[:hM] = xw[hM-1:]
fftbuffer[N-hM+1:] = xw[:hM-1]
plt.subplot(412)
plt.plot(np.arange(0, N), fftbuffer, lw=1.5)
plt.axis([0, N, min(xw), max(xw)])
plt.title('fftbuffer: N = 512')
X = fftshift(fft(fftbuffer))
mX = 20 * np.log10(abs(X)/N)
pX = np.unwrap(np.angle(X))
plt.subplot(413)
plt.plot(np.arange(-hN, hN), mX, 'r', lw=1.5)
plt.axis([-hN,hN-1,-100,max(mX)])
plt.title('mX')
plt.subplot(414)
plt.plot(np.arange(-hN, hN), pX, 'c', lw=1.5)
plt.axis([-hN,hN-1,min(pX),max(pX)])
plt.title('pX')
plt.tight_layout()
plt.savefig('fft-zero-phase.png')
plt.show()
| agpl-3.0 |
ThinkOpen-Solutions/odoo | addons/bus/bus.py | 325 | 7324 | # -*- coding: utf-8 -*-
import datetime
import json
import logging
import select
import threading
import time
import random
import simplejson
import openerp
from openerp.osv import osv, fields
from openerp.http import request
from openerp.tools.misc import DEFAULT_SERVER_DATETIME_FORMAT
_logger = logging.getLogger(__name__)
TIMEOUT = 50
#----------------------------------------------------------
# Bus
#----------------------------------------------------------
def json_dump(v):
return simplejson.dumps(v, separators=(',', ':'))
def hashable(key):
if isinstance(key, list):
key = tuple(key)
return key
class ImBus(osv.Model):
_name = 'bus.bus'
_columns = {
'id' : fields.integer('Id'),
'create_date' : fields.datetime('Create date'),
'channel' : fields.char('Channel'),
'message' : fields.char('Message'),
}
def gc(self, cr, uid):
timeout_ago = datetime.datetime.utcnow()-datetime.timedelta(seconds=TIMEOUT*2)
domain = [('create_date', '<', timeout_ago.strftime(DEFAULT_SERVER_DATETIME_FORMAT))]
ids = self.search(cr, openerp.SUPERUSER_ID, domain)
self.unlink(cr, openerp.SUPERUSER_ID, ids)
def sendmany(self, cr, uid, notifications):
channels = set()
for channel, message in notifications:
channels.add(channel)
values = {
"channel" : json_dump(channel),
"message" : json_dump(message)
}
self.pool['bus.bus'].create(cr, openerp.SUPERUSER_ID, values)
cr.commit()
if random.random() < 0.01:
self.gc(cr, uid)
if channels:
with openerp.sql_db.db_connect('postgres').cursor() as cr2:
cr2.execute("notify imbus, %s", (json_dump(list(channels)),))
def sendone(self, cr, uid, channel, message):
self.sendmany(cr, uid, [[channel, message]])
def poll(self, cr, uid, channels, last=0):
# first poll return the notification in the 'buffer'
if last == 0:
timeout_ago = datetime.datetime.utcnow()-datetime.timedelta(seconds=TIMEOUT)
domain = [('create_date', '>', timeout_ago.strftime(DEFAULT_SERVER_DATETIME_FORMAT))]
else:
# else returns the unread notifications
domain = [('id','>',last)]
channels = [json_dump(c) for c in channels]
domain.append(('channel','in',channels))
notifications = self.search_read(cr, openerp.SUPERUSER_ID, domain)
return [{"id":notif["id"], "channel": simplejson.loads(notif["channel"]), "message":simplejson.loads(notif["message"])} for notif in notifications]
class ImDispatch(object):
def __init__(self):
self.channels = {}
def poll(self, dbname, channels, last, timeout=TIMEOUT):
# Dont hang ctrl-c for a poll request, we need to bypass private
# attribute access because we dont know before starting the thread that
# it will handle a longpolling request
if not openerp.evented:
current = threading.current_thread()
current._Thread__daemonic = True
# rename the thread to avoid tests waiting for a longpolling
current.setName("openerp.longpolling.request.%s" % current.ident)
registry = openerp.registry(dbname)
# immediatly returns if past notifications exist
with registry.cursor() as cr:
notifications = registry['bus.bus'].poll(cr, openerp.SUPERUSER_ID, channels, last)
# or wait for future ones
if not notifications:
event = self.Event()
for c in channels:
self.channels.setdefault(hashable(c), []).append(event)
try:
event.wait(timeout=timeout)
with registry.cursor() as cr:
notifications = registry['bus.bus'].poll(cr, openerp.SUPERUSER_ID, channels, last)
except Exception:
# timeout
pass
return notifications
def loop(self):
""" Dispatch postgres notifications to the relevant polling threads/greenlets """
_logger.info("Bus.loop listen imbus on db postgres")
with openerp.sql_db.db_connect('postgres').cursor() as cr:
conn = cr._cnx
cr.execute("listen imbus")
cr.commit();
while True:
if select.select([conn], [], [], TIMEOUT) == ([],[],[]):
pass
else:
conn.poll()
channels = []
while conn.notifies:
channels.extend(json.loads(conn.notifies.pop().payload))
# dispatch to local threads/greenlets
events = set()
for c in channels:
events.update(self.channels.pop(hashable(c),[]))
for e in events:
e.set()
def run(self):
while True:
try:
self.loop()
except Exception, e:
_logger.exception("Bus.loop error, sleep and retry")
time.sleep(TIMEOUT)
def start(self):
if openerp.evented:
# gevent mode
import gevent
self.Event = gevent.event.Event
gevent.spawn(self.run)
elif openerp.multi_process:
# disabled in prefork mode
return
else:
# threaded mode
self.Event = threading.Event
t = threading.Thread(name="%s.Bus" % __name__, target=self.run)
t.daemon = True
t.start()
return self
dispatch = ImDispatch().start()
#----------------------------------------------------------
# Controller
#----------------------------------------------------------
class Controller(openerp.http.Controller):
""" Examples:
openerp.jsonRpc('/longpolling/poll','call',{"channels":["c1"],last:0}).then(function(r){console.log(r)});
openerp.jsonRpc('/longpolling/send','call',{"channel":"c1","message":"m1"});
openerp.jsonRpc('/longpolling/send','call',{"channel":"c2","message":"m2"});
"""
@openerp.http.route('/longpolling/send', type="json", auth="public")
def send(self, channel, message):
if not isinstance(channel, basestring):
raise Exception("bus.Bus only string channels are allowed.")
registry, cr, uid, context = request.registry, request.cr, request.session.uid, request.context
return registry['bus.bus'].sendone(cr, uid, channel, message)
# override to add channels
def _poll(self, dbname, channels, last, options):
request.cr.close()
request._cr = None
return dispatch.poll(dbname, channels, last)
@openerp.http.route('/longpolling/poll', type="json", auth="public")
def poll(self, channels, last, options=None):
if options is None:
options = {}
if not dispatch:
raise Exception("bus.Bus unavailable")
if [c for c in channels if not isinstance(c, basestring)]:
print channels
raise Exception("bus.Bus only string channels are allowed.")
return self._poll(request.db, channels, last, options)
# vim:et:
| agpl-3.0 |
MinimalOS/android_kernel_moto_shamu | tools/perf/scripts/python/syscall-counts-by-pid.py | 11180 | 1927 | # system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
klusark/android_external_chromium_org | third_party/re2/re2/make_unicode_casefold.py | 218 | 3591 | #!/usr/bin/python
# coding=utf-8
#
# Copyright 2008 The RE2 Authors. All Rights Reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
# See unicode_casefold.h for description of case folding tables.
"""Generate C++ table for Unicode case folding."""
import unicode, sys
_header = """
// GENERATED BY make_unicode_casefold.py; DO NOT EDIT.
// make_unicode_casefold.py >unicode_casefold.cc
#include "re2/unicode_casefold.h"
namespace re2 {
"""
_trailer = """
} // namespace re2
"""
def _Delta(a, b):
"""Compute the delta for b - a. Even/odd and odd/even
are handled specially, as described above."""
if a+1 == b:
if a%2 == 0:
return 'EvenOdd'
else:
return 'OddEven'
if a == b+1:
if a%2 == 0:
return 'OddEven'
else:
return 'EvenOdd'
return b - a
def _AddDelta(a, delta):
"""Return a + delta, handling EvenOdd and OddEven specially."""
if type(delta) == int:
return a+delta
if delta == 'EvenOdd':
if a%2 == 0:
return a+1
else:
return a-1
if delta == 'OddEven':
if a%2 == 1:
return a+1
else:
return a-1
print >>sys.stderr, "Bad Delta: ", delta
raise "Bad Delta"
def _MakeRanges(pairs):
"""Turn a list like [(65,97), (66, 98), ..., (90,122)]
into [(65, 90, +32)]."""
ranges = []
last = -100
def evenodd(last, a, b, r):
if a != last+1 or b != _AddDelta(a, r[2]):
return False
r[1] = a
return True
def evenoddpair(last, a, b, r):
if a != last+2:
return False
delta = r[2]
d = delta
if type(delta) is not str:
return False
if delta.endswith('Skip'):
d = delta[:-4]
else:
delta = d + 'Skip'
if b != _AddDelta(a, d):
return False
r[1] = a
r[2] = delta
return True
for a, b in pairs:
if ranges and evenodd(last, a, b, ranges[-1]):
pass
elif ranges and evenoddpair(last, a, b, ranges[-1]):
pass
else:
ranges.append([a, a, _Delta(a, b)])
last = a
return ranges
# The maximum size of a case-folding group.
# Case folding is implemented in parse.cc by a recursive process
# with a recursion depth equal to the size of the largest
# case-folding group, so it is important that this bound be small.
# The current tables have no group bigger than 4.
# If there are ever groups bigger than 10 or so, it will be
# time to rework the code in parse.cc.
MaxCasefoldGroup = 4
def main():
lowergroups, casegroups = unicode.CaseGroups()
foldpairs = []
seen = {}
for c in casegroups:
if len(c) > MaxCasefoldGroup:
raise unicode.Error("casefold group too long: %s" % (c,))
for i in range(len(c)):
if c[i-1] in seen:
raise unicode.Error("bad casegroups %d -> %d" % (c[i-1], c[i]))
seen[c[i-1]] = True
foldpairs.append([c[i-1], c[i]])
lowerpairs = []
for lower, group in lowergroups.iteritems():
for g in group:
if g != lower:
lowerpairs.append([g, lower])
def printpairs(name, foldpairs):
foldpairs.sort()
foldranges = _MakeRanges(foldpairs)
print "// %d groups, %d pairs, %d ranges" % (len(casegroups), len(foldpairs), len(foldranges))
print "CaseFold unicode_%s[] = {" % (name,)
for lo, hi, delta in foldranges:
print "\t{ %d, %d, %s }," % (lo, hi, delta)
print "};"
print "int num_unicode_%s = %d;" % (name, len(foldranges),)
print ""
print _header
printpairs("casefold", foldpairs)
printpairs("tolower", lowerpairs)
print _trailer
if __name__ == '__main__':
main()
| bsd-3-clause |
linked67/p2pool-leaguecoin | p2pool/util/expiring_dict.py | 237 | 5233 | from __future__ import division
import time
import weakref
from p2pool.util import deferral
class Node(object):
def __init__(self, contents, prev=None, next=None):
self.contents, self.prev, self.next = contents, prev, next
def insert_before(self, contents):
self.prev.next = self.prev = node = Node(contents, self.prev, self)
return node
def insert_after(self, contents):
self.next.prev = self.next = node = Node(contents, self, self.next)
return node
@staticmethod
def connect(prev, next):
if prev.next is not None or next.prev is not None:
raise ValueError('node already connected')
prev.next, next.prev = next, prev
def replace(self, contents):
self.contents = contents
def delete(self):
if self.prev.next is None or self.next.prev is None:
raise ValueError('node not connected')
self.prev.next, self.next.prev = self.next, self.prev
self.next = self.prev = None
class LinkedList(object):
def __init__(self, iterable=[]):
self.start, self.end = Node(None), Node(None)
Node.connect(self.start, self.end)
for item in iterable:
self.append(item)
def __repr__(self):
return 'LinkedList(%r)' % (list(self),)
def __len__(self):
return sum(1 for x in self)
def __iter__(self):
cur = self.start.next
while cur is not self.end:
cur2 = cur
cur = cur.next
yield cur2 # in case cur is deleted, but items inserted after are ignored
def __reversed__(self):
cur = self.end.prev
while cur is not self.start:
cur2 = cur
cur = cur.prev
yield cur2
def __getitem__(self, index):
if index < 0:
cur = self.end
for i in xrange(-index):
cur = cur.prev
if cur is self.start:
raise IndexError('index out of range')
else:
cur = self.start
for i in xrange(index + 1):
cur = cur.next
if cur is self.end:
raise IndexError('index out of range')
return cur
def appendleft(self, item):
return self.start.insert_after(item)
def append(self, item):
return self.end.insert_before(item)
def popleft(self):
node = self.start.next
if node is self.end:
raise IndexError('popleft from empty')
node.delete()
return node.contents
def pop(self):
node = self.end.prev
if node is self.start:
raise IndexError('pop from empty')
node.delete()
return node.contents
class ExpiringDict(object):
def __init__(self, expiry_time, get_touches=True):
self.expiry_time = expiry_time
self.get_touches = get_touches
self.expiry_deque = LinkedList()
self.d = dict() # key -> node, value
self_ref = weakref.ref(self, lambda _: expire_loop.stop() if expire_loop.running else None)
self._expire_loop = expire_loop = deferral.RobustLoopingCall(lambda: self_ref().expire())
expire_loop.start(1)
def stop(self):
self._expire_loop.stop()
def __repr__(self):
return 'ExpiringDict' + repr(self.__dict__)
def __len__(self):
return len(self.d)
_nothing = object()
def touch(self, key, value=_nothing):
'Updates expiry node, optionally replacing value, returning new value'
if value is self._nothing or key in self.d:
node, old_value = self.d[key]
node.delete()
new_value = old_value if value is self._nothing else value
self.d[key] = self.expiry_deque.append((time.time() + self.expiry_time, key)), new_value
return new_value
def expire(self):
t = time.time()
for node in self.expiry_deque:
timestamp, key = node.contents
if timestamp > t:
break
del self.d[key]
node.delete()
def __contains__(self, key):
return key in self.d
def __getitem__(self, key):
if self.get_touches:
value = self.touch(key)
else:
node, value = self.d[key]
return value
def __setitem__(self, key, value):
self.touch(key, value)
def __delitem__(self, key):
node, value = self.d.pop(key)
node.delete()
def get(self, key, default_value=None):
if key in self.d:
res = self[key]
else:
res = default_value
return res
def setdefault(self, key, default_value):
if key in self.d:
return self[key]
else:
self[key] = default_value
return default_value
def keys(self):
return self.d.keys()
def values(self):
return [value for node, value in self.d.itervalues()]
def itervalues(self):
for node, value in self.d.itervalues():
yield value
| gpl-3.0 |
zachmullen/boto | tests/integration/ec2/test_connection.py | 100 | 9106 | # Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2009, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Some unit tests for the EC2Connection
"""
import unittest
import time
import telnetlib
import socket
from nose.plugins.attrib import attr
from boto.ec2.connection import EC2Connection
from boto.exception import EC2ResponseError
import boto.ec2
class EC2ConnectionTest(unittest.TestCase):
ec2 = True
@attr('notdefault')
def test_launch_permissions(self):
# this is my user_id, if you want to run these tests you should
# replace this with yours or they won't work
user_id = '963068290131'
print('--- running EC2Connection tests ---')
c = EC2Connection()
# get list of private AMI's
rs = c.get_all_images(owners=[user_id])
assert len(rs) > 0
# now pick the first one
image = rs[0]
# temporarily make this image runnable by everyone
status = image.set_launch_permissions(group_names=['all'])
assert status
d = image.get_launch_permissions()
assert 'groups' in d
assert len(d['groups']) > 0
# now remove that permission
status = image.remove_launch_permissions(group_names=['all'])
assert status
time.sleep(10)
d = image.get_launch_permissions()
assert 'groups' not in d
def test_1_basic(self):
# create 2 new security groups
c = EC2Connection()
group1_name = 'test-%d' % int(time.time())
group_desc = 'This is a security group created during unit testing'
group1 = c.create_security_group(group1_name, group_desc)
time.sleep(2)
group2_name = 'test-%d' % int(time.time())
group_desc = 'This is a security group created during unit testing'
group2 = c.create_security_group(group2_name, group_desc)
# now get a listing of all security groups and look for our new one
rs = c.get_all_security_groups()
found = False
for g in rs:
if g.name == group1_name:
found = True
assert found
# now pass arg to filter results to only our new group
rs = c.get_all_security_groups([group1_name])
assert len(rs) == 1
# try some group to group authorizations/revocations
# first try the old style
status = c.authorize_security_group(group1.name,
group2.name,
group2.owner_id)
assert status
status = c.revoke_security_group(group1.name,
group2.name,
group2.owner_id)
assert status
# now try specifying a specific port
status = c.authorize_security_group(group1.name,
group2.name,
group2.owner_id,
'tcp', 22, 22)
assert status
status = c.revoke_security_group(group1.name,
group2.name,
group2.owner_id,
'tcp', 22, 22)
assert status
# now delete the second security group
status = c.delete_security_group(group2_name)
# now make sure it's really gone
rs = c.get_all_security_groups()
found = False
for g in rs:
if g.name == group2_name:
found = True
assert not found
group = group1
# now try to launch apache image with our new security group
rs = c.get_all_images()
img_loc = 'ec2-public-images/fedora-core4-apache.manifest.xml'
for image in rs:
if image.location == img_loc:
break
reservation = image.run(security_groups=[group.name])
instance = reservation.instances[0]
while instance.state != 'running':
print('\tinstance is %s' % instance.state)
time.sleep(30)
instance.update()
# instance in now running, try to telnet to port 80
t = telnetlib.Telnet()
try:
t.open(instance.dns_name, 80)
except socket.error:
pass
# now open up port 80 and try again, it should work
group.authorize('tcp', 80, 80, '0.0.0.0/0')
t.open(instance.dns_name, 80)
t.close()
# now revoke authorization and try again
group.revoke('tcp', 80, 80, '0.0.0.0/0')
try:
t.open(instance.dns_name, 80)
except socket.error:
pass
# now kill the instance and delete the security group
instance.terminate()
# check that state and previous_state have updated
assert instance.state == 'shutting-down'
assert instance.state_code == 32
assert instance.previous_state == 'running'
assert instance.previous_state_code == 16
# unfortunately, I can't delete the sg within this script
#sg.delete()
# create a new key pair
key_name = 'test-%d' % int(time.time())
status = c.create_key_pair(key_name)
assert status
# now get a listing of all key pairs and look for our new one
rs = c.get_all_key_pairs()
found = False
for k in rs:
if k.name == key_name:
found = True
assert found
# now pass arg to filter results to only our new key pair
rs = c.get_all_key_pairs([key_name])
assert len(rs) == 1
key_pair = rs[0]
# now delete the key pair
status = c.delete_key_pair(key_name)
# now make sure it's really gone
rs = c.get_all_key_pairs()
found = False
for k in rs:
if k.name == key_name:
found = True
assert not found
# short test around Paid AMI capability
demo_paid_ami_id = 'ami-bd9d78d4'
demo_paid_ami_product_code = 'A79EC0DB'
l = c.get_all_images([demo_paid_ami_id])
assert len(l) == 1
assert len(l[0].product_codes) == 1
assert l[0].product_codes[0] == demo_paid_ami_product_code
print('--- tests completed ---')
def test_dry_run(self):
c = EC2Connection()
dry_run_msg = 'Request would have succeeded, but DryRun flag is set.'
try:
rs = c.get_all_images(dry_run=True)
self.fail("Should have gotten an exception")
except EC2ResponseError as e:
self.assertTrue(dry_run_msg in str(e))
try:
rs = c.run_instances(
image_id='ami-a0cd60c9',
instance_type='m1.small',
dry_run=True
)
self.fail("Should have gotten an exception")
except EC2ResponseError as e:
self.assertTrue(dry_run_msg in str(e))
# Need an actual instance for the rest of this...
rs = c.run_instances(
image_id='ami-a0cd60c9',
instance_type='m1.small'
)
time.sleep(120)
try:
rs = c.stop_instances(
instance_ids=[rs.instances[0].id],
dry_run=True
)
self.fail("Should have gotten an exception")
except EC2ResponseError as e:
self.assertTrue(dry_run_msg in str(e))
try:
rs = c.terminate_instances(
instance_ids=[rs.instances[0].id],
dry_run=True
)
self.fail("Should have gotten an exception")
except EC2ResponseError as e:
self.assertTrue(dry_run_msg in str(e))
# And kill it.
rs.instances[0].terminate()
def test_can_get_all_instances_sigv4(self):
connection = boto.ec2.connect_to_region('eu-central-1')
self.assertTrue(isinstance(connection.get_all_instances(), list))
| mit |
khkaminska/djangoproject.com | aggregator/migrations/0001_initial.py | 9 | 2609 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Feed',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=500)),
('feed_url', models.URLField(unique=True, max_length=500)),
('public_url', models.URLField(max_length=500)),
('approval_status', models.CharField(default='P', max_length=1, choices=[('P', 'Pending'), ('D', 'Denied'), ('A', 'Approved')])),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='FeedItem',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=500)),
('link', models.URLField(max_length=500)),
('summary', models.TextField(blank=True)),
('date_modified', models.DateTimeField()),
('guid', models.CharField(unique=True, max_length=500, db_index=True)),
('feed', models.ForeignKey(to='aggregator.Feed')),
],
options={
'ordering': ('-date_modified',),
},
bases=(models.Model,),
),
migrations.CreateModel(
name='FeedType',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=250)),
('slug', models.SlugField(max_length=250)),
('can_self_add', models.BooleanField(default=True)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='feed',
name='feed_type',
field=models.ForeignKey(to='aggregator.FeedType'),
preserve_default=True,
),
migrations.AddField(
model_name='feed',
name='owner',
field=models.ForeignKey(related_name='owned_feeds', blank=True, to=settings.AUTH_USER_MODEL, null=True),
preserve_default=True,
),
]
| bsd-3-clause |
How2Compute/SmartHome | cli/py3env/lib/python3.4/site-packages/requests/models.py | 59 | 33067 | # -*- coding: utf-8 -*-
"""
requests.models
~~~~~~~~~~~~~~~
This module contains the primary objects that power Requests.
"""
import collections
import datetime
import sys
# Import encoding now, to avoid implicit import later.
# Implicit import within threads may cause LookupError when standard library is in a ZIP,
# such as in Embedded Python. See https://github.com/kennethreitz/requests/issues/3578.
import encodings.idna
from io import BytesIO, UnsupportedOperation
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .packages.urllib3.fields import RequestField
from .packages.urllib3.filepost import encode_multipart_formdata
from .packages.urllib3.util import parse_url
from .packages.urllib3.exceptions import (
DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
from .exceptions import (
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
ContentDecodingError, ConnectionError, StreamConsumedError)
from ._internal_utils import to_native_string, unicode_is_ascii
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, check_header_validity)
from .compat import (
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
is_py2, chardet, builtin_str, basestring)
from .compat import json as complexjson
from .status_codes import codes
#: The set of HTTP status codes that indicate an automatically
#: processable redirect.
REDIRECT_STATI = (
codes.moved, # 301
codes.found, # 302
codes.other, # 303
codes.temporary_redirect, # 307
codes.permanent_redirect, # 308
)
DEFAULT_REDIRECT_LIMIT = 30
CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512
class RequestEncodingMixin(object):
@property
def path_url(self):
"""Build the path URL to use."""
url = []
p = urlsplit(self.url)
path = p.path
if not path:
path = '/'
url.append(path)
query = p.query
if query:
url.append('?')
url.append(query)
return ''.join(url)
@staticmethod
def _encode_params(data):
"""Encode parameters in a piece of data.
Will successfully encode parameters when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if isinstance(data, (str, bytes)):
return data
elif hasattr(data, 'read'):
return data
elif hasattr(data, '__iter__'):
result = []
for k, vs in to_key_val_list(data):
if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
vs = [vs]
for v in vs:
if v is not None:
result.append(
(k.encode('utf-8') if isinstance(k, str) else k,
v.encode('utf-8') if isinstance(v, str) else v))
return urlencode(result, doseq=True)
else:
return data
@staticmethod
def _encode_files(files, data):
"""Build the body for a multipart/form-data request.
Will successfully encode files when passed as a dict or a list of
tuples. Order is retained if data is a list of tuples but arbitrary
if parameters are supplied as a dict.
The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype)
or 4-tuples (filename, fileobj, contentype, custom_headers).
"""
if (not files):
raise ValueError("Files must be provided.")
elif isinstance(data, basestring):
raise ValueError("Data must not be a string.")
new_fields = []
fields = to_key_val_list(data or {})
files = to_key_val_list(files or {})
for field, val in fields:
if isinstance(val, basestring) or not hasattr(val, '__iter__'):
val = [val]
for v in val:
if v is not None:
# Don't call str() on bytestrings: in Py3 it all goes wrong.
if not isinstance(v, bytes):
v = str(v)
new_fields.append(
(field.decode('utf-8') if isinstance(field, bytes) else field,
v.encode('utf-8') if isinstance(v, str) else v))
for (k, v) in files:
# support for explicit filename
ft = None
fh = None
if isinstance(v, (tuple, list)):
if len(v) == 2:
fn, fp = v
elif len(v) == 3:
fn, fp, ft = v
else:
fn, fp, ft, fh = v
else:
fn = guess_filename(v) or k
fp = v
if isinstance(fp, (str, bytes, bytearray)):
fdata = fp
else:
fdata = fp.read()
rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
rf.make_multipart(content_type=ft)
new_fields.append(rf)
body, content_type = encode_multipart_formdata(new_fields)
return body, content_type
class RequestHooksMixin(object):
def register_hook(self, event, hook):
"""Properly register a hook."""
if event not in self.hooks:
raise ValueError('Unsupported event specified, with event name "%s"' % (event))
if isinstance(hook, collections.Callable):
self.hooks[event].append(hook)
elif hasattr(hook, '__iter__'):
self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))
def deregister_hook(self, event, hook):
"""Deregister a previously registered hook.
Returns True if the hook existed, False if not.
"""
try:
self.hooks[event].remove(hook)
return True
except ValueError:
return False
class Request(RequestHooksMixin):
"""A user-created :class:`Request <Request>` object.
Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
:param method: HTTP method to use.
:param url: URL to send.
:param headers: dictionary of headers to send.
:param files: dictionary of {filename: fileobject} files to multipart upload.
:param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
:param json: json for the body to attach to the request (if files or data is not specified).
:param params: dictionary of URL parameters to append to the URL.
:param auth: Auth handler or (user, pass) tuple.
:param cookies: dictionary or CookieJar of cookies to attach to this request.
:param hooks: dictionary of callback hooks, for internal usage.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> req.prepare()
<PreparedRequest [GET]>
"""
def __init__(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
# Default empty dicts for dict params.
data = [] if data is None else data
files = [] if files is None else files
headers = {} if headers is None else headers
params = {} if params is None else params
hooks = {} if hooks is None else hooks
self.hooks = default_hooks()
for (k, v) in list(hooks.items()):
self.register_hook(event=k, hook=v)
self.method = method
self.url = url
self.headers = headers
self.files = files
self.data = data
self.json = json
self.params = params
self.auth = auth
self.cookies = cookies
def __repr__(self):
return '<Request [%s]>' % (self.method)
def prepare(self):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
p = PreparedRequest()
p.prepare(
method=self.method,
url=self.url,
headers=self.headers,
files=self.files,
data=self.data,
json=self.json,
params=self.params,
auth=self.auth,
cookies=self.cookies,
hooks=self.hooks,
)
return p
class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
containing the exact bytes that will be sent to the server.
Generated from either a :class:`Request <Request>` object or manually.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> r = req.prepare()
<PreparedRequest [GET]>
>>> s = requests.Session()
>>> s.send(r)
<Response [200]>
"""
def __init__(self):
#: HTTP verb to send to the server.
self.method = None
#: HTTP URL to send the request to.
self.url = None
#: dictionary of HTTP headers.
self.headers = None
# The `CookieJar` used to create the Cookie header will be stored here
# after prepare_cookies is called
self._cookies = None
#: request body to send to the server.
self.body = None
#: dictionary of callback hooks, for internal usage.
self.hooks = default_hooks()
#: integer denoting starting position of a readable file-like body.
self._body_position = None
def prepare(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
"""Prepares the entire request with the given parameters."""
self.prepare_method(method)
self.prepare_url(url, params)
self.prepare_headers(headers)
self.prepare_cookies(cookies)
self.prepare_body(data, files, json)
self.prepare_auth(auth, url)
# Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request.
# This MUST go after prepare_auth. Authenticators could add a hook
self.prepare_hooks(hooks)
def __repr__(self):
return '<PreparedRequest [%s]>' % (self.method)
def copy(self):
p = PreparedRequest()
p.method = self.method
p.url = self.url
p.headers = self.headers.copy() if self.headers is not None else None
p._cookies = _copy_cookie_jar(self._cookies)
p.body = self.body
p.hooks = self.hooks
p._body_position = self._body_position
return p
def prepare_method(self, method):
"""Prepares the given HTTP method."""
self.method = method
if self.method is not None:
self.method = to_native_string(self.method.upper())
@staticmethod
def _get_idna_encoded_host(host):
try:
from .packages import idna
except ImportError:
# tolerate the possibility of downstream repackagers unvendoring `requests`
# For more information, read: packages/__init__.py
import idna
sys.modules['requests.packages.idna'] = idna
try:
host = idna.encode(host, uts46=True).decode('utf-8')
except idna.IDNAError:
raise UnicodeError
return host
def prepare_url(self, url, params):
"""Prepares the given HTTP URL."""
#: Accept objects that have string representations.
#: We're unable to blindly call unicode/str functions
#: as this will include the bytestring indicator (b'')
#: on python 3.x.
#: https://github.com/kennethreitz/requests/pull/2238
if isinstance(url, bytes):
url = url.decode('utf8')
else:
url = unicode(url) if is_py2 else str(url)
# Remove leading whitespaces from url
url = url.lstrip()
# Don't do any URL preparation for non-HTTP schemes like `mailto`,
# `data` etc to work around exceptions from `url_parse`, which
# handles RFC 3986 only.
if ':' in url and not url.lower().startswith('http'):
self.url = url
return
# Support for unicode domain names and paths.
try:
scheme, auth, host, port, path, query, fragment = parse_url(url)
except LocationParseError as e:
raise InvalidURL(*e.args)
if not scheme:
error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
error = error.format(to_native_string(url, 'utf8'))
raise MissingSchema(error)
if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url)
# In general, we want to try IDNA encoding the hostname if the string contains
# non-ASCII characters. This allows users to automatically get the correct IDNA
# behaviour. For strings containing only ASCII characters, we need to also verify
# it doesn't start with a wildcard (*), before allowing the unencoded hostname.
if not unicode_is_ascii(host):
try:
host = self._get_idna_encoded_host(host)
except UnicodeError:
raise InvalidURL('URL has an invalid label.')
elif host.startswith(u'*'):
raise InvalidURL('URL has an invalid label.')
# Carefully reconstruct the network location
netloc = auth or ''
if netloc:
netloc += '@'
netloc += host
if port:
netloc += ':' + str(port)
# Bare domains aren't valid URLs.
if not path:
path = '/'
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
if isinstance(params, (str, bytes)):
params = to_native_string(params)
enc_params = self._encode_params(params)
if enc_params:
if query:
query = '%s&%s' % (query, enc_params)
else:
query = enc_params
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
self.url = url
def prepare_headers(self, headers):
"""Prepares the given HTTP headers."""
self.headers = CaseInsensitiveDict()
if headers:
for header in headers.items():
# Raise exception on invalid header value.
check_header_validity(header)
name, value = header
self.headers[to_native_string(name)] = value
def prepare_body(self, data, files, json=None):
"""Prepares the given HTTP body data."""
# Check if file, fo, generator, iterator.
# If not, run through normal process.
# Nottin' on you.
body = None
content_type = None
if not data and json is not None:
# urllib3 requires a bytes-like body. Python 2's json.dumps
# provides this natively, but Python 3 gives a Unicode string.
content_type = 'application/json'
body = complexjson.dumps(json)
if not isinstance(body, bytes):
body = body.encode('utf-8')
is_stream = all([
hasattr(data, '__iter__'),
not isinstance(data, (basestring, list, tuple, collections.Mapping))
])
try:
length = super_len(data)
except (TypeError, AttributeError, UnsupportedOperation):
length = None
if is_stream:
body = data
if getattr(body, 'tell', None) is not None:
# Record the current file position before reading.
# This will allow us to rewind a file in the event
# of a redirect.
try:
self._body_position = body.tell()
except (IOError, OSError):
# This differentiates from None, allowing us to catch
# a failed `tell()` later when trying to rewind the body
self._body_position = object()
if files:
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
if length:
self.headers['Content-Length'] = builtin_str(length)
else:
self.headers['Transfer-Encoding'] = 'chunked'
else:
# Multi-part file uploads.
if files:
(body, content_type) = self._encode_files(files, data)
else:
if data:
body = self._encode_params(data)
if isinstance(data, basestring) or hasattr(data, 'read'):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
self.prepare_content_length(body)
# Add content-type if it wasn't explicitly provided.
if content_type and ('content-type' not in self.headers):
self.headers['Content-Type'] = content_type
self.body = body
def prepare_content_length(self, body):
"""Prepare Content-Length header based on request method and body"""
if body is not None:
length = super_len(body)
if length:
# If length exists, set it. Otherwise, we fallback
# to Transfer-Encoding: chunked.
self.headers['Content-Length'] = builtin_str(length)
elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None:
# Set Content-Length to 0 for methods that can have a body
# but don't provide one. (i.e. not GET or HEAD)
self.headers['Content-Length'] = '0'
def prepare_auth(self, auth, url=''):
"""Prepares the given HTTP auth data."""
# If no Auth is explicitly provided, extract it from the URL first.
if auth is None:
url_auth = get_auth_from_url(self.url)
auth = url_auth if any(url_auth) else None
if auth:
if isinstance(auth, tuple) and len(auth) == 2:
# special-case basic HTTP auth
auth = HTTPBasicAuth(*auth)
# Allow auth to make its changes.
r = auth(self)
# Update self to reflect the auth changes.
self.__dict__.update(r.__dict__)
# Recompute Content-Length
self.prepare_content_length(self.body)
def prepare_cookies(self, cookies):
"""Prepares the given HTTP cookie data.
This function eventually generates a ``Cookie`` header from the
given cookies using cookielib. Due to cookielib's design, the header
will not be regenerated if it already exists, meaning this function
can only be called once for the life of the
:class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
header is removed beforehand.
"""
if isinstance(cookies, cookielib.CookieJar):
self._cookies = cookies
else:
self._cookies = cookiejar_from_dict(cookies)
cookie_header = get_cookie_header(self._cookies, self)
if cookie_header is not None:
self.headers['Cookie'] = cookie_header
def prepare_hooks(self, hooks):
"""Prepares the given hooks."""
# hooks can be passed as None to the prepare method and to this
# method. To prevent iterating over None, simply use an empty list
# if hooks is False-y
hooks = hooks or []
for event in hooks:
self.register_hook(event, hooks[event])
class Response(object):
"""The :class:`Response <Response>` object, which contains a
server's response to an HTTP request.
"""
__attrs__ = [
'_content', 'status_code', 'headers', 'url', 'history',
'encoding', 'reason', 'cookies', 'elapsed', 'request'
]
def __init__(self):
super(Response, self).__init__()
self._content = False
self._content_consumed = False
#: Integer Code of responded HTTP Status, e.g. 404 or 200.
self.status_code = None
#: Case-insensitive Dictionary of Response Headers.
#: For example, ``headers['content-encoding']`` will return the
#: value of a ``'Content-Encoding'`` response header.
self.headers = CaseInsensitiveDict()
#: File-like object representation of response (for advanced usage).
#: Use of ``raw`` requires that ``stream=True`` be set on the request.
# This requirement does not apply for use internally to Requests.
self.raw = None
#: Final URL location of Response.
self.url = None
#: Encoding to decode with when accessing r.text.
self.encoding = None
#: A list of :class:`Response <Response>` objects from
#: the history of the Request. Any redirect responses will end
#: up here. The list is sorted from the oldest to the most recent request.
self.history = []
#: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
self.reason = None
#: A CookieJar of Cookies the server sent back.
self.cookies = cookiejar_from_dict({})
#: The amount of time elapsed between sending the request
#: and the arrival of the response (as a timedelta).
#: This property specifically measures the time taken between sending
#: the first byte of the request and finishing parsing the headers. It
#: is therefore unaffected by consuming the response content or the
#: value of the ``stream`` keyword argument.
self.elapsed = datetime.timedelta(0)
#: The :class:`PreparedRequest <PreparedRequest>` object to which this
#: is a response.
self.request = None
def __getstate__(self):
# Consume everything; accessing the content attribute makes
# sure the content has been fully read.
if not self._content_consumed:
self.content
return dict(
(attr, getattr(self, attr, None))
for attr in self.__attrs__
)
def __setstate__(self, state):
for name, value in state.items():
setattr(self, name, value)
# pickled objects do not have .raw
setattr(self, '_content_consumed', True)
setattr(self, 'raw', None)
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
def __bool__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __nonzero__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __iter__(self):
"""Allows you to use a response as an iterator."""
return self.iter_content(128)
@property
def ok(self):
try:
self.raise_for_status()
except HTTPError:
return False
return True
@property
def is_redirect(self):
"""True if this Response is a well-formed HTTP redirect that could have
been processed automatically (by :meth:`Session.resolve_redirects`).
"""
return ('location' in self.headers and self.status_code in REDIRECT_STATI)
@property
def is_permanent_redirect(self):
"""True if this Response one of the permanent versions of redirect"""
return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
@property
def apparent_encoding(self):
"""The apparent encoding, provided by the chardet library"""
return chardet.detect(self.content)['encoding']
def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the
request, this avoids reading the content at once into memory for
large responses. The chunk size is the number of bytes it should
read into memory. This is not necessarily the length of each item
returned as decoding can take place.
chunk_size must be of type int or None. A value of None will
function differently depending on the value of `stream`.
stream=True will read data as it arrives in whatever size the
chunks are received. If stream=False, data is returned as
a single chunk.
If decode_unicode is True, content will be decoded using the best
available encoding based on the response.
"""
def generate():
# Special case for urllib3.
if hasattr(self.raw, 'stream'):
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
except ProtocolError as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
raise ContentDecodingError(e)
except ReadTimeoutError as e:
raise ConnectionError(e)
else:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
if self._content_consumed and isinstance(self._content, bool):
raise StreamConsumedError()
elif chunk_size is not None and not isinstance(chunk_size, int):
raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size))
# simulate reading small chunks of the content
reused_chunks = iter_slices(self._content, chunk_size)
stream_chunks = generate()
chunks = reused_chunks if self._content_consumed else stream_chunks
if decode_unicode:
chunks = stream_decode_response_unicode(chunks, self)
return chunks
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None):
"""Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the
content at once into memory for large responses.
.. note:: This method is not reentrant safe.
"""
pending = None
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
if delimiter:
lines = chunk.split(delimiter)
else:
lines = chunk.splitlines()
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
pending = lines.pop()
else:
pending = None
for line in lines:
yield line
if pending is not None:
yield pending
@property
def content(self):
"""Content of the response, in bytes."""
if self._content is False:
# Read the contents.
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed')
if self.status_code == 0 or self.raw is None:
self._content = None
else:
self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
self._content_consumed = True
# don't need to release the connection; that's been handled by urllib3
# since we exhausted the data.
return self._content
@property
def text(self):
"""Content of the response, in unicode.
If Response.encoding is None, encoding will be guessed using
``chardet``.
The encoding of the response content is determined based solely on HTTP
headers, following RFC 2616 to the letter. If you can take advantage of
non-HTTP knowledge to make a better guess at the encoding, you should
set ``r.encoding`` appropriately before accessing this property.
"""
# Try charset from content-type
content = None
encoding = self.encoding
if not self.content:
return str('')
# Fallback to auto-detected encoding.
if self.encoding is None:
encoding = self.apparent_encoding
# Decode unicode from given encoding.
try:
content = str(self.content, encoding, errors='replace')
except (LookupError, TypeError):
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
#
# A TypeError can be raised if encoding is None
#
# So we try blindly encoding.
content = str(self.content, errors='replace')
return content
def json(self, **kwargs):
"""Returns the json-encoded content of a response, if any.
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
:raises ValueError: If the response body does not contain valid json.
"""
if not self.encoding and self.content and len(self.content) > 3:
# No encoding set. JSON RFC 4627 section 3 states we should expect
# UTF-8, -16 or -32. Detect which one to use; If the detection or
# decoding fails, fall back to `self.text` (using chardet to make
# a best guess).
encoding = guess_json_utf(self.content)
if encoding is not None:
try:
return complexjson.loads(
self.content.decode(encoding), **kwargs
)
except UnicodeDecodeError:
# Wrong UTF codec detected; usually because it's not UTF-8
# but some other 8-bit codec. This is an RFC violation,
# and the server didn't bother to tell us what codec *was*
# used.
pass
return complexjson.loads(self.text, **kwargs)
@property
def links(self):
"""Returns the parsed header links of the response, if any."""
header = self.headers.get('link')
# l = MultiDict()
l = {}
if header:
links = parse_header_links(header)
for link in links:
key = link.get('rel') or link.get('url')
l[key] = link
return l
def raise_for_status(self):
"""Raises stored :class:`HTTPError`, if one occurred."""
http_error_msg = ''
if isinstance(self.reason, bytes):
# We attempt to decode utf-8 first because some servers
# choose to localize their reason strings. If the string
# isn't utf-8, we fall back to iso-8859-1 for all other
# encodings. (See PR #3538)
try:
reason = self.reason.decode('utf-8')
except UnicodeDecodeError:
reason = self.reason.decode('iso-8859-1')
else:
reason = self.reason
if 400 <= self.status_code < 500:
http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url)
elif 500 <= self.status_code < 600:
http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url)
if http_error_msg:
raise HTTPError(http_error_msg, response=self)
def close(self):
"""Releases the connection back to the pool. Once this method has been
called the underlying ``raw`` object must not be accessed again.
*Note: Should not normally need to be called explicitly.*
"""
if not self._content_consumed:
self.raw.close()
release_conn = getattr(self.raw, 'release_conn', None)
if release_conn is not None:
release_conn()
| mit |
kwyoung11/ciml | labs/lab2-KNN/HighD.py | 4 | 1526 | from math import *
import random
from numpy import *
import matplotlib.pyplot as plt
waitForEnter=False
def generateUniformExample(numDim):
return [random.random() for d in range(numDim)]
def generateUniformDataset(numDim, numEx):
return [generateUniformExample(numDim) for n in range(numEx)]
def computeExampleDistance(x1, x2):
dist = 0.0
for d in range(len(x1)):
dist += (x1[d] - x2[d]) * (x1[d] - x2[d])
return sqrt(dist)
def computeDistances(data):
N = len(data)
D = len(data[0])
dist = []
for n in range(N):
for m in range(n):
dist.append( computeExampleDistance(data[n],data[m]) / sqrt(D))
return dist
N = 200 # number of examples
Dims = [2, 8, 32, 128, 512] # dimensionalities to try
Cols = ['#FF0000', '#880000', '#000000', '#000088', '#0000FF']
Bins = arange(0, 1, 0.02)
plt.xlabel('distance / sqrt(dimensionality)')
plt.ylabel('# of pairs of points at that distance')
plt.title('dimensionality versus uniform point distances')
for i,d in enumerate(Dims):
distances = computeDistances(generateUniformDataset(d, N))
print "D=%d, average distance=%g" % (d, mean(distances) * sqrt(d))
plt.hist(distances,
Bins,
histtype='step',
color=Cols[i])
if waitForEnter:
plt.legend(['%d dims' % d for d in Dims])
plt.show(False)
x = raw_input('Press enter to continue...')
plt.legend(['%d dims' % d for d in Dims])
plt.savefig('fig.pdf')
plt.show()
| gpl-2.0 |
jounex/hue | desktop/core/ext-py/tablib-0.10.0/tablib/packages/odf/xforms.py | 96 | 1231 | # -*- coding: utf-8 -*-
# Copyright (C) 2006-2007 Søren Roug, European Environment Agency
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Contributor(s):
#
from namespaces import XFORMSNS
from element import Element
# ODF 1.0 section 11.2
# XForms is designed to be embedded in another XML format.
# Autogenerated
def Model(**args):
return Element(qname = (XFORMSNS,'model'), **args)
def Instance(**args):
return Element(qname = (XFORMSNS,'instance'), **args)
def Bind(**args):
return Element(qname = (XFORMSNS,'bind'), **args)
| apache-2.0 |
tdudz/elements | contrib/zmq/zmq_sub.py | 38 | 1411 | #!/usr/bin/env python2
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import binascii
import zmq
import struct
port = 28332
zmqContext = zmq.Context()
zmqSubSocket = zmqContext.socket(zmq.SUB)
zmqSubSocket.setsockopt(zmq.SUBSCRIBE, "hashblock")
zmqSubSocket.setsockopt(zmq.SUBSCRIBE, "hashtx")
zmqSubSocket.setsockopt(zmq.SUBSCRIBE, "rawblock")
zmqSubSocket.setsockopt(zmq.SUBSCRIBE, "rawtx")
zmqSubSocket.connect("tcp://127.0.0.1:%i" % port)
try:
while True:
msg = zmqSubSocket.recv_multipart()
topic = str(msg[0])
body = msg[1]
sequence = "Unknown"
if len(msg[-1]) == 4:
msgSequence = struct.unpack('<I', msg[-1])[-1]
sequence = str(msgSequence)
if topic == "hashblock":
print '- HASH BLOCK ('+sequence+') -'
print binascii.hexlify(body)
elif topic == "hashtx":
print '- HASH TX ('+sequence+') -'
print binascii.hexlify(body)
elif topic == "rawblock":
print '- RAW BLOCK HEADER ('+sequence+') -'
print binascii.hexlify(body[:80])
elif topic == "rawtx":
print '- RAW TX ('+sequence+') -'
print binascii.hexlify(body)
except KeyboardInterrupt:
zmqContext.destroy()
| mit |
leiferikb/bitpop-private | chrome/common/extensions/docs/examples/apps/hello-python/oauth2/__init__.py | 257 | 25629 | """
The MIT License
Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import urllib
import time
import random
import urlparse
import hmac
import binascii
import httplib2
try:
from urlparse import parse_qs, parse_qsl
except ImportError:
from cgi import parse_qs, parse_qsl
VERSION = '1.0' # Hi Blaine!
HTTP_METHOD = 'GET'
SIGNATURE_METHOD = 'PLAINTEXT'
class Error(RuntimeError):
"""Generic exception class."""
def __init__(self, message='OAuth error occurred.'):
self._message = message
@property
def message(self):
"""A hack to get around the deprecation errors in 2.6."""
return self._message
def __str__(self):
return self._message
class MissingSignature(Error):
pass
def build_authenticate_header(realm=''):
"""Optional WWW-Authenticate header (401 error)"""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def build_xoauth_string(url, consumer, token=None):
"""Build an XOAUTH string for use in SMTP/IMPA authentication."""
request = Request.from_consumer_and_token(consumer, token,
"GET", url)
signing_method = SignatureMethod_HMAC_SHA1()
request.sign_request(signing_method, consumer, token)
params = []
for k, v in sorted(request.iteritems()):
if v is not None:
params.append('%s="%s"' % (k, escape(v)))
return "%s %s %s" % ("GET", url, ','.join(params))
def escape(s):
"""Escape a URL including any /."""
return urllib.quote(s, safe='~')
def generate_timestamp():
"""Get seconds since epoch (UTC)."""
return int(time.time())
def generate_nonce(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
def generate_verifier(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
class Consumer(object):
"""A consumer of OAuth-protected services.
The OAuth consumer is a "third-party" service that wants to access
protected resources from an OAuth service provider on behalf of an end
user. It's kind of the OAuth client.
Usually a consumer must be registered with the service provider by the
developer of the consumer software. As part of that process, the service
provider gives the consumer a *key* and a *secret* with which the consumer
software can identify itself to the service. The consumer will include its
key in each request to identify itself, but will use its secret only when
signing requests, to prove that the request is from that particular
registered consumer.
Once registered, the consumer can then use its consumer credentials to ask
the service provider for a request token, kicking off the OAuth
authorization process.
"""
key = None
secret = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
if self.key is None or self.secret is None:
raise ValueError("Key and secret must be set.")
def __str__(self):
data = {'oauth_consumer_key': self.key,
'oauth_consumer_secret': self.secret}
return urllib.urlencode(data)
class Token(object):
"""An OAuth credential used to request authorization or a protected
resource.
Tokens in OAuth comprise a *key* and a *secret*. The key is included in
requests to identify the token being used, but the secret is used only in
the signature, to prove that the requester is who the server gave the
token to.
When first negotiating the authorization, the consumer asks for a *request
token* that the live user authorizes with the service provider. The
consumer then exchanges the request token for an *access token* that can
be used to access protected resources.
"""
key = None
secret = None
callback = None
callback_confirmed = None
verifier = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
if self.key is None or self.secret is None:
raise ValueError("Key and secret must be set.")
def set_callback(self, callback):
self.callback = callback
self.callback_confirmed = 'true'
def set_verifier(self, verifier=None):
if verifier is not None:
self.verifier = verifier
else:
self.verifier = generate_verifier()
def get_callback_url(self):
if self.callback and self.verifier:
# Append the oauth_verifier.
parts = urlparse.urlparse(self.callback)
scheme, netloc, path, params, query, fragment = parts[:6]
if query:
query = '%s&oauth_verifier=%s' % (query, self.verifier)
else:
query = 'oauth_verifier=%s' % self.verifier
return urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
return self.callback
def to_string(self):
"""Returns this token as a plain string, suitable for storage.
The resulting string includes the token's secret, so you should never
send or store this string where a third party can read it.
"""
data = {
'oauth_token': self.key,
'oauth_token_secret': self.secret,
}
if self.callback_confirmed is not None:
data['oauth_callback_confirmed'] = self.callback_confirmed
return urllib.urlencode(data)
@staticmethod
def from_string(s):
"""Deserializes a token from a string like one returned by
`to_string()`."""
if not len(s):
raise ValueError("Invalid parameter string.")
params = parse_qs(s, keep_blank_values=False)
if not len(params):
raise ValueError("Invalid parameter string.")
try:
key = params['oauth_token'][0]
except Exception:
raise ValueError("'oauth_token' not found in OAuth request.")
try:
secret = params['oauth_token_secret'][0]
except Exception:
raise ValueError("'oauth_token_secret' not found in "
"OAuth request.")
token = Token(key, secret)
try:
token.callback_confirmed = params['oauth_callback_confirmed'][0]
except KeyError:
pass # 1.0, no callback confirmed.
return token
def __str__(self):
return self.to_string()
def setter(attr):
name = attr.__name__
def getter(self):
try:
return self.__dict__[name]
except KeyError:
raise AttributeError(name)
def deleter(self):
del self.__dict__[name]
return property(getter, attr, deleter)
class Request(dict):
"""The parameters and information for an HTTP request, suitable for
authorizing with OAuth credentials.
When a consumer wants to access a service's protected resources, it does
so using a signed HTTP request identifying itself (the consumer) with its
key, and providing an access token authorized by the end user to access
those resources.
"""
version = VERSION
def __init__(self, method=HTTP_METHOD, url=None, parameters=None):
self.method = method
self.url = url
if parameters is not None:
self.update(parameters)
@setter
def url(self, value):
self.__dict__['url'] = value
if value is not None:
scheme, netloc, path, params, query, fragment = urlparse.urlparse(value)
# Exclude default port numbers.
if scheme == 'http' and netloc[-3:] == ':80':
netloc = netloc[:-3]
elif scheme == 'https' and netloc[-4:] == ':443':
netloc = netloc[:-4]
if scheme not in ('http', 'https'):
raise ValueError("Unsupported URL %s (%s)." % (value, scheme))
# Normalized URL excludes params, query, and fragment.
self.normalized_url = urlparse.urlunparse((scheme, netloc, path, None, None, None))
else:
self.normalized_url = None
self.__dict__['url'] = None
@setter
def method(self, value):
self.__dict__['method'] = value.upper()
def _get_timestamp_nonce(self):
return self['oauth_timestamp'], self['oauth_nonce']
def get_nonoauth_parameters(self):
"""Get any non-OAuth parameters."""
return dict([(k, v) for k, v in self.iteritems()
if not k.startswith('oauth_')])
def to_header(self, realm=''):
"""Serialize as a header for an HTTPAuth request."""
oauth_params = ((k, v) for k, v in self.items()
if k.startswith('oauth_'))
stringy_params = ((k, escape(str(v))) for k, v in oauth_params)
header_params = ('%s="%s"' % (k, v) for k, v in stringy_params)
params_header = ', '.join(header_params)
auth_header = 'OAuth realm="%s"' % realm
if params_header:
auth_header = "%s, %s" % (auth_header, params_header)
return {'Authorization': auth_header}
def to_postdata(self):
"""Serialize as post data for a POST request."""
# tell urlencode to deal with sequence values and map them correctly
# to resulting querystring. for example self["k"] = ["v1", "v2"] will
# result in 'k=v1&k=v2' and not k=%5B%27v1%27%2C+%27v2%27%5D
return urllib.urlencode(self, True).replace('+', '%20')
def to_url(self):
"""Serialize as a URL for a GET request."""
base_url = urlparse.urlparse(self.url)
try:
query = base_url.query
except AttributeError:
# must be python <2.5
query = base_url[4]
query = parse_qs(query)
for k, v in self.items():
query.setdefault(k, []).append(v)
try:
scheme = base_url.scheme
netloc = base_url.netloc
path = base_url.path
params = base_url.params
fragment = base_url.fragment
except AttributeError:
# must be python <2.5
scheme = base_url[0]
netloc = base_url[1]
path = base_url[2]
params = base_url[3]
fragment = base_url[5]
url = (scheme, netloc, path, params,
urllib.urlencode(query, True), fragment)
return urlparse.urlunparse(url)
def get_parameter(self, parameter):
ret = self.get(parameter)
if ret is None:
raise Error('Parameter not found: %s' % parameter)
return ret
def get_normalized_parameters(self):
"""Return a string that contains the parameters that must be signed."""
items = []
for key, value in self.iteritems():
if key == 'oauth_signature':
continue
# 1.0a/9.1.1 states that kvp must be sorted by key, then by value,
# so we unpack sequence values into multiple items for sorting.
if hasattr(value, '__iter__'):
items.extend((key, item) for item in value)
else:
items.append((key, value))
# Include any query string parameters from the provided URL
query = urlparse.urlparse(self.url)[4]
url_items = self._split_url_string(query).items()
non_oauth_url_items = list([(k, v) for k, v in url_items if not k.startswith('oauth_')])
items.extend(non_oauth_url_items)
encoded_str = urllib.urlencode(sorted(items))
# Encode signature parameters per Oauth Core 1.0 protocol
# spec draft 7, section 3.6
# (http://tools.ietf.org/html/draft-hammer-oauth-07#section-3.6)
# Spaces must be encoded with "%20" instead of "+"
return encoded_str.replace('+', '%20').replace('%7E', '~')
def sign_request(self, signature_method, consumer, token):
"""Set the signature parameter to the result of sign."""
if 'oauth_consumer_key' not in self:
self['oauth_consumer_key'] = consumer.key
if token and 'oauth_token' not in self:
self['oauth_token'] = token.key
self['oauth_signature_method'] = signature_method.name
self['oauth_signature'] = signature_method.sign(self, consumer, token)
@classmethod
def make_timestamp(cls):
"""Get seconds since epoch (UTC)."""
return str(int(time.time()))
@classmethod
def make_nonce(cls):
"""Generate pseudorandom number."""
return str(random.randint(0, 100000000))
@classmethod
def from_request(cls, http_method, http_url, headers=None, parameters=None,
query_string=None):
"""Combines multiple parameter sources."""
if parameters is None:
parameters = {}
# Headers
if headers and 'Authorization' in headers:
auth_header = headers['Authorization']
# Check that the authorization header is OAuth.
if auth_header[:6] == 'OAuth ':
auth_header = auth_header[6:]
try:
# Get the parameters from the header.
header_params = cls._split_header(auth_header)
parameters.update(header_params)
except:
raise Error('Unable to parse OAuth parameters from '
'Authorization header.')
# GET or POST query string.
if query_string:
query_params = cls._split_url_string(query_string)
parameters.update(query_params)
# URL parameters.
param_str = urlparse.urlparse(http_url)[4] # query
url_params = cls._split_url_string(param_str)
parameters.update(url_params)
if parameters:
return cls(http_method, http_url, parameters)
return None
@classmethod
def from_consumer_and_token(cls, consumer, token=None,
http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
defaults = {
'oauth_consumer_key': consumer.key,
'oauth_timestamp': cls.make_timestamp(),
'oauth_nonce': cls.make_nonce(),
'oauth_version': cls.version,
}
defaults.update(parameters)
parameters = defaults
if token:
parameters['oauth_token'] = token.key
if token.verifier:
parameters['oauth_verifier'] = token.verifier
return Request(http_method, http_url, parameters)
@classmethod
def from_token_and_callback(cls, token, callback=None,
http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
parameters['oauth_token'] = token.key
if callback:
parameters['oauth_callback'] = callback
return cls(http_method, http_url, parameters)
@staticmethod
def _split_header(header):
"""Turn Authorization: header into parameters."""
params = {}
parts = header.split(',')
for param in parts:
# Ignore realm parameter.
if param.find('realm') > -1:
continue
# Remove whitespace.
param = param.strip()
# Split key-value.
param_parts = param.split('=', 1)
# Remove quotes and unescape the value.
params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"'))
return params
@staticmethod
def _split_url_string(param_str):
"""Turn URL string into parameters."""
parameters = parse_qs(param_str, keep_blank_values=False)
for k, v in parameters.iteritems():
parameters[k] = urllib.unquote(v[0])
return parameters
class Client(httplib2.Http):
"""OAuthClient is a worker to attempt to execute a request."""
def __init__(self, consumer, token=None, cache=None, timeout=None,
proxy_info=None):
if consumer is not None and not isinstance(consumer, Consumer):
raise ValueError("Invalid consumer.")
if token is not None and not isinstance(token, Token):
raise ValueError("Invalid token.")
self.consumer = consumer
self.token = token
self.method = SignatureMethod_HMAC_SHA1()
httplib2.Http.__init__(self, cache=cache, timeout=timeout,
proxy_info=proxy_info)
def set_signature_method(self, method):
if not isinstance(method, SignatureMethod):
raise ValueError("Invalid signature method.")
self.method = method
def request(self, uri, method="GET", body=None, headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None):
DEFAULT_CONTENT_TYPE = 'application/x-www-form-urlencoded'
if not isinstance(headers, dict):
headers = {}
is_multipart = method == 'POST' and headers.get('Content-Type',
DEFAULT_CONTENT_TYPE) != DEFAULT_CONTENT_TYPE
if body and method == "POST" and not is_multipart:
parameters = dict(parse_qsl(body))
else:
parameters = None
req = Request.from_consumer_and_token(self.consumer,
token=self.token, http_method=method, http_url=uri,
parameters=parameters)
req.sign_request(self.method, self.consumer, self.token)
if method == "POST":
headers['Content-Type'] = headers.get('Content-Type',
DEFAULT_CONTENT_TYPE)
if is_multipart:
headers.update(req.to_header())
else:
body = req.to_postdata()
elif method == "GET":
uri = req.to_url()
else:
headers.update(req.to_header())
return httplib2.Http.request(self, uri, method=method, body=body,
headers=headers, redirections=redirections,
connection_type=connection_type)
class Server(object):
"""A skeletal implementation of a service provider, providing protected
resources to requests from authorized consumers.
This class implements the logic to check requests for authorization. You
can use it with your web server or web framework to protect certain
resources with OAuth.
"""
timestamp_threshold = 300 # In seconds, five minutes.
version = VERSION
signature_methods = None
def __init__(self, signature_methods=None):
self.signature_methods = signature_methods or {}
def add_signature_method(self, signature_method):
self.signature_methods[signature_method.name] = signature_method
return self.signature_methods
def verify_request(self, request, consumer, token):
"""Verifies an api call and checks all the parameters."""
version = self._get_version(request)
self._check_signature(request, consumer, token)
parameters = request.get_nonoauth_parameters()
return parameters
def build_authenticate_header(self, realm=''):
"""Optional support for the authenticate header."""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def _get_version(self, request):
"""Verify the correct version request for this server."""
try:
version = request.get_parameter('oauth_version')
except:
version = VERSION
if version and version != self.version:
raise Error('OAuth version %s not supported.' % str(version))
return version
def _get_signature_method(self, request):
"""Figure out the signature with some defaults."""
try:
signature_method = request.get_parameter('oauth_signature_method')
except:
signature_method = SIGNATURE_METHOD
try:
# Get the signature method object.
signature_method = self.signature_methods[signature_method]
except:
signature_method_names = ', '.join(self.signature_methods.keys())
raise Error('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names))
return signature_method
def _get_verifier(self, request):
return request.get_parameter('oauth_verifier')
def _check_signature(self, request, consumer, token):
timestamp, nonce = request._get_timestamp_nonce()
self._check_timestamp(timestamp)
signature_method = self._get_signature_method(request)
try:
signature = request.get_parameter('oauth_signature')
except:
raise MissingSignature('Missing oauth_signature.')
# Validate the signature.
valid = signature_method.check(request, consumer, token, signature)
if not valid:
key, base = signature_method.signing_base(request, consumer, token)
raise Error('Invalid signature. Expected signature base '
'string: %s' % base)
built = signature_method.sign(request, consumer, token)
def _check_timestamp(self, timestamp):
"""Verify that timestamp is recentish."""
timestamp = int(timestamp)
now = int(time.time())
lapsed = now - timestamp
if lapsed > self.timestamp_threshold:
raise Error('Expired timestamp: given %d and now %s has a '
'greater difference than threshold %d' % (timestamp, now,
self.timestamp_threshold))
class SignatureMethod(object):
"""A way of signing requests.
The OAuth protocol lets consumers and service providers pick a way to sign
requests. This interface shows the methods expected by the other `oauth`
modules for signing requests. Subclass it and implement its methods to
provide a new way to sign requests.
"""
def signing_base(self, request, consumer, token):
"""Calculates the string that needs to be signed.
This method returns a 2-tuple containing the starting key for the
signing and the message to be signed. The latter may be used in error
messages to help clients debug their software.
"""
raise NotImplementedError
def sign(self, request, consumer, token):
"""Returns the signature for the given request, based on the consumer
and token also provided.
You should use your implementation of `signing_base()` to build the
message to sign. Otherwise it may be less useful for debugging.
"""
raise NotImplementedError
def check(self, request, consumer, token, signature):
"""Returns whether the given signature is the correct signature for
the given consumer and token signing the given request."""
built = self.sign(request, consumer, token)
return built == signature
class SignatureMethod_HMAC_SHA1(SignatureMethod):
name = 'HMAC-SHA1'
def signing_base(self, request, consumer, token):
if request.normalized_url is None:
raise ValueError("Base URL for request is not set.")
sig = (
escape(request.method),
escape(request.normalized_url),
escape(request.get_normalized_parameters()),
)
key = '%s&' % escape(consumer.secret)
if token:
key += escape(token.secret)
raw = '&'.join(sig)
return key, raw
def sign(self, request, consumer, token):
"""Builds the base signature string."""
key, raw = self.signing_base(request, consumer, token)
# HMAC object.
try:
from hashlib import sha1 as sha
except ImportError:
import sha # Deprecated
hashed = hmac.new(key, raw, sha)
# Calculate the digest base 64.
return binascii.b2a_base64(hashed.digest())[:-1]
class SignatureMethod_PLAINTEXT(SignatureMethod):
name = 'PLAINTEXT'
def signing_base(self, request, consumer, token):
"""Concatenates the consumer key and secret with the token's
secret."""
sig = '%s&' % escape(consumer.secret)
if token:
sig = sig + escape(token.secret)
return sig, sig
def sign(self, request, consumer, token):
key, raw = self.signing_base(request, consumer, token)
return raw
| bsd-3-clause |
tanium/pytan | lib/taniumpy/object_types/action.py | 1 | 2109 |
# Copyright (c) 2015 Tanium Inc
#
# Generated from console.wsdl version 0.0.1
#
#
from .base import BaseType
class Action(BaseType):
_soap_tag = 'action'
def __init__(self):
BaseType.__init__(
self,
simple_properties={'id': int,
'name': str,
'comment': str,
'start_time': str,
'expiration_time': str,
'status': str,
'skip_lock_flag': int,
'expire_seconds': int,
'distribute_seconds': int,
'creation_time': str,
'stopped_flag': int,
'cache_row_id': int},
complex_properties={'target_group': Group,
'action_group': Group,
'package_spec': PackageSpec,
'user': User,
'approver': User,
'history_saved_question': SavedQuestion,
'saved_action': SavedAction,
'metadata': MetadataList},
list_properties={},
)
self.id = None
self.name = None
self.comment = None
self.start_time = None
self.expiration_time = None
self.status = None
self.skip_lock_flag = None
self.expire_seconds = None
self.distribute_seconds = None
self.creation_time = None
self.stopped_flag = None
self.cache_row_id = None
self.target_group = None
self.action_group = None
self.package_spec = None
self.user = None
self.approver = None
self.history_saved_question = None
self.saved_action = None
self.metadata = None
from group import Group
from group import Group
from package_spec import PackageSpec
from user import User
from user import User
from saved_question import SavedQuestion
from saved_action import SavedAction
from metadata_list import MetadataList
| mit |
Ziyann/android_kernel_samsung_espresso | tools/perf/python/twatch.py | 3213 | 1338 | #! /usr/bin/python
# -*- python -*-
# -*- coding: utf-8 -*-
# twatch - Experimental use of the perf python interface
# Copyright (C) 2011 Arnaldo Carvalho de Melo <acme@redhat.com>
#
# This application is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import perf
def main():
cpus = perf.cpu_map()
threads = perf.thread_map()
evsel = perf.evsel(task = 1, comm = 1, mmap = 0,
wakeup_events = 1, sample_period = 1,
sample_id_all = 1,
sample_type = perf.SAMPLE_PERIOD | perf.SAMPLE_TID | perf.SAMPLE_CPU | perf.SAMPLE_TID)
evsel.open(cpus = cpus, threads = threads);
evlist = perf.evlist(cpus, threads)
evlist.add(evsel)
evlist.mmap()
while True:
evlist.poll(timeout = -1)
for cpu in cpus:
event = evlist.read_on_cpu(cpu)
if not event:
continue
print "cpu: %2d, pid: %4d, tid: %4d" % (event.sample_cpu,
event.sample_pid,
event.sample_tid),
print event
if __name__ == '__main__':
main()
| gpl-2.0 |
dermoth/gramps | gramps/gen/lib/place.py | 6 | 20184 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
# Copyright (C) 2010 Michiel D. Nauta
# Copyright (C) 2011 Tim G L Lyons
# Copyright (C) 2013 Doug Blank <doug.blank@gmail.com>
# Copyright (C) 2017 Nick Hall
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Place object for Gramps.
"""
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from .primaryobj import PrimaryObject
from .placeref import PlaceRef
from .placename import PlaceName
from .placetype import PlaceType
from .citationbase import CitationBase
from .notebase import NoteBase
from .mediabase import MediaBase
from .urlbase import UrlBase
from .tagbase import TagBase
from .location import Location
from ..const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Place class
#
#-------------------------------------------------------------------------
class Place(CitationBase, NoteBase, MediaBase, UrlBase, PrimaryObject):
"""
Contains information related to a place, including multiple address
information (since place names can change with time), longitude, latitude,
a collection of images and URLs, a note and a source.
"""
def __init__(self, source=None):
"""
Create a new Place object, copying from the source if present.
:param source: A Place object used to initialize the new Place
:type source: Place
"""
PrimaryObject.__init__(self, source)
CitationBase.__init__(self, source)
NoteBase.__init__(self, source)
MediaBase.__init__(self, source)
UrlBase.__init__(self, source)
if source:
self.long = source.long
self.lat = source.lat
self.title = source.title
self.name = source.name
self.alt_names = source.alt_names
self.placeref_list = list(map(PlaceRef, source.placeref_list))
self.place_type = source.place_type
self.code = source.code
self.alt_loc = list(map(Location, source.alt_loc))
else:
self.long = ""
self.lat = ""
self.title = ""
self.name = PlaceName()
self.alt_names = []
self.placeref_list = []
self.place_type = PlaceType()
self.code = ""
self.alt_loc = []
def serialize(self):
"""
Convert the data held in the Place to a Python tuple that
represents all the data elements.
This method is used to convert the object into a form that can easily
be saved to a database.
These elements may be primitive Python types (string, integers),
complex Python types (lists or tuples, or Python objects. If the
target database cannot handle complex types (such as objects or
lists), the database is responsible for converting the data into
a form that it can use.
:returns: Returns a python tuple containing the data that should
be considered persistent.
:rtype: tuple
"""
return (self.handle, self.gramps_id, self.title, self.long, self.lat,
[pr.serialize() for pr in self.placeref_list],
self.name.serialize(),
[an.serialize() for an in self.alt_names],
self.place_type.serialize(), self.code,
[al.serialize() for al in self.alt_loc],
UrlBase.serialize(self),
MediaBase.serialize(self),
CitationBase.serialize(self),
NoteBase.serialize(self),
self.change, TagBase.serialize(self), self.private)
@classmethod
def get_schema(cls):
"""
Returns the JSON Schema for this class.
:returns: Returns a dict containing the schema.
:rtype: dict
"""
from .url import Url
from .mediaref import MediaRef
return {
"type": "object",
"title": _("Place"),
"properties": {
"_class": {"enum": [cls.__name__]},
"handle": {"type": "string",
"maxLength": 50,
"title": _("Handle")},
"gramps_id": {"type": "string",
"title": _("Gramps ID")},
"title": {"type": "string",
"title": _("Title")},
"long": {"type": "string",
"title": _("Longitude")},
"lat": {"type": "string",
"title": _("Latitude")},
"placeref_list": {"type": "array",
"items": PlaceRef.get_schema(),
"title": _("Places")},
"name": PlaceName.get_schema(),
"alt_names": {"type": "array",
"items": PlaceName.get_schema(),
"title": _("Alternate Names")},
"place_type": PlaceType.get_schema(),
"code": {"type": "string",
"title": _("Code")},
"alt_loc": {"type": "array",
"items": Location.get_schema(),
"title": _("Alternate Locations")},
"urls": {"type": "array",
"items": Url.get_schema(),
"title": _("URLs")},
"media_list": {"type": "array",
"items": MediaRef.get_schema(),
"title": _("Media")},
"citation_list": {"type": "array",
"items": {"type": "string",
"maxLength": 50},
"title": _("Citations")},
"note_list": {"type": "array",
"items": {"type": "string",
"maxLength": 50},
"title": _("Notes")},
"change": {"type": "integer",
"title": _("Last changed")},
"tag_list": {"type": "array",
"items": {"type": "string",
"maxLength": 50},
"title": _("Tags")},
"private": {"type": "boolean",
"title": _("Private")}
}
}
def unserialize(self, data):
"""
Convert the data held in a tuple created by the serialize method
back into the data in a Place object.
:param data: tuple containing the persistent data associated with the
Place object
:type data: tuple
"""
(self.handle, self.gramps_id, self.title, self.long, self.lat,
placeref_list, name, alt_names, the_type, self.code,
alt_loc, urls, media_list, citation_list, note_list,
self.change, tag_list, self.private) = data
self.place_type = PlaceType()
self.place_type.unserialize(the_type)
self.alt_loc = [Location().unserialize(al) for al in alt_loc]
self.placeref_list = [PlaceRef().unserialize(pr) for pr in placeref_list]
self.name = PlaceName().unserialize(name)
self.alt_names = [PlaceName().unserialize(an) for an in alt_names]
UrlBase.unserialize(self, urls)
MediaBase.unserialize(self, media_list)
CitationBase.unserialize(self, citation_list)
NoteBase.unserialize(self, note_list)
TagBase.unserialize(self, tag_list)
return self
def get_text_data_list(self):
"""
Return the list of all textual attributes of the object.
:returns: Returns the list of all textual attributes of the object.
:rtype: list
"""
return [self.long, self.lat, self.title, self.gramps_id]
def get_text_data_child_list(self):
"""
Return the list of child objects that may carry textual data.
:returns: Returns the list of child objects that may carry textual data.
:rtype: list
"""
ret = (self.media_list + self.alt_loc + self.urls +
self.name.get_text_data_child_list() + self.alt_names)
return ret
def get_citation_child_list(self):
"""
Return the list of child secondary objects that may refer citations.
:returns: List of child secondary child objects that may refer citations.
:rtype: list
"""
return self.media_list
def get_note_child_list(self):
"""
Return the list of child secondary objects that may refer notes.
:returns: Returns the list of child secondary child objects that may
refer notes.
:rtype: list
"""
return self.media_list
def get_handle_referents(self):
"""
Return the list of child objects which may, directly or through
their children, reference primary objects.
:returns: Returns the list of objects referencing primary objects.
:rtype: list
"""
return self.get_citation_child_list() + self.placeref_list
def get_referenced_handles(self):
"""
Return the list of (classname, handle) tuples for all directly
referenced primary objects.
:returns: List of (classname, handle) tuples for referenced objects.
:rtype: list
"""
return (self.get_referenced_note_handles() +
self.get_referenced_citation_handles() +
self.get_referenced_tag_handles())
def merge(self, acquisition):
""" Merge the content of acquisition into this place.
:param acquisition: The place to merge with the present place.
:type acquisition: Place
"""
self._merge_privacy(acquisition)
self._merge_locations(acquisition)
self._merge_alt_names(acquisition)
self._merge_media_list(acquisition)
self._merge_url_list(acquisition)
self._merge_note_list(acquisition)
self._merge_citation_list(acquisition)
self._merge_tag_list(acquisition)
self._merge_placeref_list(acquisition)
def set_title(self, title):
"""
Set the descriptive title of the Place object.
:param title: descriptive title to assign to the Place
:type title: str
"""
self.title = title
def get_title(self):
"""
Return the descriptive title of the Place object.
:returns: Returns the descriptive title of the Place
:rtype: str
"""
return self.title
def set_name(self, name):
"""
Set the name of the Place object.
:param name: name to assign to the Place
:type name: PlaceName
"""
if not isinstance(name, PlaceName):
raise ValueError("Place.set_name(name) requires a PlaceName()")
self.name = name
def get_name(self):
"""
Return the name of the Place object.
:returns: Returns the name of the Place
:rtype: PlaceName
"""
return self.name
def get_all_names(self):
"""
Return a list of all names of the Place object.
:returns: Returns a list of all names of the Place
:rtype: list of PlaceName
"""
return [self.name] + self.alt_names
def set_longitude(self, longitude):
"""
Set the longitude of the Place object.
:param longitude: longitude to assign to the Place
:type longitude: str
"""
self.long = longitude
def get_longitude(self):
"""
Return the longitude of the Place object.
:returns: Returns the longitude of the Place
:rtype: str
"""
return self.long
def set_latitude(self, latitude):
"""
Set the latitude of the Place object.
:param latitude: latitude to assign to the Place
:type latitude: str
"""
self.lat = latitude
def get_latitude(self):
"""
Return the latitude of the Place object.
:returns: Returns the latitude of the Place
:rtype: str
"""
return self.lat
def set_type(self, place_type):
"""
Set the type of the Place object.
:param type: type to assign to the Place
:type type: PlaceType
"""
self.place_type.set(place_type)
def get_type(self):
"""
Return the type of the Place object.
:returns: Returns the type of the Place
:rtype: PlaceType
"""
return self.place_type
def set_code(self, code):
"""
Set the code of the Place object.
:param code: code to assign to the Place
:type code: str
"""
self.code = code
def get_code(self):
"""
Return the code of the Place object.
:returns: Returns the code of the Place
:rtype: str
"""
return self.code
def add_placeref(self, placeref):
"""
Add a place reference to the list of place references.
:param code: place reference to append to the list
:type code: PlaceRef
"""
self.placeref_list.append(placeref)
def get_placeref_list(self):
"""
Return the place reference list for the Place object.
:returns: Returns the place reference list for the Place
:rtype: list
"""
return self.placeref_list
def set_placeref_list(self, placeref_list):
"""
Set the place reference list for the Place object.
:param code: place reference list to assign to the Place
:type code: list
"""
self.placeref_list = placeref_list
def _merge_placeref_list(self, acquisition):
"""
Add the main and alternate locations of acquisition to the alternate
location list.
:param acquisition: instance to merge
:type acquisition: :class:'~.place.Place
"""
placeref_list = self.placeref_list[:]
add_list = acquisition.placeref_list
for addendum in add_list:
for placeref in placeref_list:
if placeref.is_equal(addendum):
break
else:
self.placeref_list.append(addendum)
def _has_handle_reference(self, classname, handle):
"""
Return True if the object has reference to a given handle of given
primary object type.
:param classname: The name of the primary object class.
:type classname: str
:param handle: The handle to be checked.
:type handle: str
:returns: Returns whether the object has reference to this handle of
this object type.
:rtype: bool
"""
if classname == 'Place':
for placeref in self.placeref_list:
if placeref.ref == handle:
return True
return False
def _replace_handle_reference(self, classname, old_handle, new_handle):
"""
Replace all references to old handle with those to the new handle.
:param classname: The name of the primary object class.
:type classname: str
:param old_handle: The handle to be replaced.
:type old_handle: str
:param new_handle: The handle to replace the old one with.
:type new_handle: str
"""
if classname == 'Place':
for placeref in self.placeref_list:
if placeref.ref == old_handle:
placeref.ref = new_handle
def get_alternative_names(self):
"""
Return a list of alternative names for the current Place.
:returns: Returns the alternative names for the Place
:rtype: list of PlaceName
"""
return self.alt_names
def set_alternative_names(self, name_list):
"""
Replace the current alternative names list with the new one.
:param name_list: The list of names to assign to the Place's internal
list.
:type name_list: list of PlaceName
"""
self.alt_names = name_list
def add_alternative_name(self, name):
"""
Add a name to the alternative names list.
:param name: name to add
:type name: string
"""
if name not in self.alt_names:
self.alt_names.append(name)
def get_alternate_locations(self):
"""
Return a list of alternate :class:`~.location.Location` objects the
present alternate information about the current Place.
A Place can have more than one :class:`~.location.Location`, since
names and jurisdictions can change over time for the same place.
:returns: Returns the alternate :class:`~.location.Location` objects
for the Place
:rtype: list of :class:`~.location.Location` objects
"""
return self.alt_loc
def set_alternate_locations(self, location_list):
"""
Replace the current alternate :class:`~.location.Location` object list
with the new one.
:param location_list: The list of :class:`~.location.Location` objects
to assign to the Place's internal list.
:type location_list: list of :class:`~.location.Location` objects
"""
self.alt_loc = location_list
def add_alternate_locations(self, location):
"""
Add a :class:`~.location.Location` object to the alternate location
list.
:param location: :class:`~.location.Location` instance to add
:type location: :class:`~.location.Location`
"""
if location not in self.alt_loc:
self.alt_loc.append(location)
def _merge_locations(self, acquisition):
"""
Add the main and alternate locations of acquisition to the alternate
location list.
:param acquisition: instance to merge
:type acquisition: :class:'~.place.Place
"""
altloc_list = self.alt_loc[:]
add_list = acquisition.get_alternate_locations()
for addendum in add_list:
for altloc in altloc_list:
if altloc.is_equal(addendum):
break
else:
self.alt_loc.append(addendum)
def _merge_alt_names(self, acquisition):
"""
Add the main and alternative names of acquisition to the alternative
names list.
:param acquisition: instance to merge
:type acquisition: :class:'~.place.Place
"""
if acquisition.name.value:
if acquisition.name != self.name:
if acquisition.name not in self.alt_names:
self.alt_names.append(acquisition.name)
for addendum in acquisition.alt_names:
if addendum.value:
if addendum != self.name:
if addendum not in self.alt_names:
self.alt_names.append(addendum)
| gpl-2.0 |
yfried/ansible | lib/ansible/modules/web_infrastructure/ansible_tower/tower_job_template.py | 10 | 10722 | #!/usr/bin/python
# coding: utf-8 -*-
# (c) 2017, Wayne Witzel III <wayne@riotousliving.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tower_job_template
author: "Wayne Witzel III (@wwitzel3)"
version_added: "2.3"
short_description: create, update, or destroy Ansible Tower job template.
description:
- Create, update, or destroy Ansible Tower job templates. See
U(https://www.ansible.com/tower) for an overview.
options:
name:
description:
- Name to use for the job template.
required: True
description:
description:
- Description to use for the job template.
job_type:
description:
- The job type to use for the job template.
required: True
choices: ["run", "check", "scan"]
inventory:
description:
- Name of the inventory to use for the job template.
project:
description:
- Name of the project to use for the job template.
required: True
playbook:
description:
- Path to the playbook to use for the job template within the project provided.
required: True
credential:
description:
- Name of the credential to use for the job template.
version_added: 2.7
vault_credential:
description:
- Name of the vault credential to use for the job template.
version_added: 2.7
forks:
description:
- The number of parallel or simultaneous processes to use while executing the playbook.
limit:
description:
- A host pattern to further constrain the list of hosts managed or affected by the playbook
verbosity:
description:
- Control the output level Ansible produces as the playbook runs. 0 - Normal, 1 - Verbose, 2 - More Verbose, 3 - Debug, 4 - Connection Debug.
choices: [0, 1, 2, 3, 4]
default: 0
extra_vars_path:
description:
- Path to the C(extra_vars) YAML file.
job_tags:
description:
- Comma separated list of the tags to use for the job template.
force_handlers_enabled:
description:
- Enable forcing playbook handlers to run even if a task fails.
version_added: 2.7
type: bool
default: 'no'
skip_tags:
description:
- Comma separated list of the tags to skip for the job template.
start_at_task:
description:
- Start the playbook at the task matching this name.
version_added: 2.7
fact_caching_enabled:
description:
- Enable use of fact caching for the job template.
version_added: 2.7
type: bool
default: 'no'
host_config_key:
description:
- Allow provisioning callbacks using this host config key.
ask_diff_mode:
description:
- Prompt user to enable diff mode (show changes) to files when supported by modules.
version_added: 2.7
type: bool
default: 'no'
ask_extra_vars:
description:
- Prompt user for (extra_vars) on launch.
type: bool
default: 'no'
ask_limit:
description:
- Prompt user for a limit on launch.
version_added: 2.7
type: bool
default: 'no'
ask_tags:
description:
- Prompt user for job tags on launch.
type: bool
default: 'no'
ask_skip_tags:
description:
- Prompt user for job tags to skip on launch.
version_added: 2.7
type: bool
default: 'no'
ask_job_type:
description:
- Prompt user for job type on launch.
type: bool
default: 'no'
ask_verbosity:
description:
- Prompt user to choose a verbosity level on launch.
version_added: 2.7
type: bool
default: 'no'
ask_inventory:
description:
- Propmt user for inventory on launch.
type: bool
default: 'no'
ask_credential:
description:
- Prompt user for credential on launch.
type: bool
default: 'no'
survey_enabled:
description:
- Enable a survey on the job template.
version_added: 2.7
type: bool
default: 'no'
survey_spec:
description:
- JSON/YAML dict formatted survey definition.
version_added: 2.8
type: dict
required: False
become_enabled:
description:
- Activate privilege escalation.
type: bool
default: 'no'
concurrent_jobs_enabled:
description:
- Allow simultaneous runs of the job template.
version_added: 2.7
type: bool
default: 'no'
state:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent"]
extends_documentation_fragment: tower
notes:
- JSON for survey_spec can be found in Tower API Documentation. See
U(https://docs.ansible.com/ansible-tower/latest/html/towerapi/api_ref.html#/Job_Templates/Job_Templates_job_templates_survey_spec_create)
for POST operation payload example.
'''
EXAMPLES = '''
- name: Create tower Ping job template
tower_job_template:
name: "Ping"
job_type: "run"
inventory: "Local"
project: "Demo"
playbook: "ping.yml"
credential: "Local"
state: "present"
tower_config_file: "~/tower_cli.cfg"
survey_enabled: yes
survey_spec: "{{ lookup('file', 'my_survey.json') }}"
'''
from ansible.module_utils.ansible_tower import TowerModule, tower_auth_config, tower_check_mode
try:
import tower_cli
import tower_cli.utils.exceptions as exc
from tower_cli.conf import settings
except ImportError:
pass
def update_fields(p):
'''This updates the module field names
to match the field names tower-cli expects to make
calling of the modify/delete methods easier.
'''
params = p.copy()
field_map = {
'fact_caching_enabled': 'use_fact_cache',
'ask_diff_mode': 'ask_diff_mode_on_launch',
'ask_extra_vars': 'ask_variables_on_launch',
'ask_limit': 'ask_limit_on_launch',
'ask_tags': 'ask_tags_on_launch',
'ask_skip_tags': 'ask_skip_tags_on_launch',
'ask_verbosity': 'ask_verbosity_on_launch',
'ask_inventory': 'ask_inventory_on_launch',
'ask_credential': 'ask_credential_on_launch',
'ask_job_type': 'ask_job_type_on_launch',
'diff_mode_enabled': 'diff_mode',
'concurrent_jobs_enabled': 'allow_simultaneous',
'force_handlers_enabled': 'force_handlers',
}
params_update = {}
for old_k, new_k in field_map.items():
v = params.pop(old_k)
params_update[new_k] = v
extra_vars = params.get('extra_vars_path')
if extra_vars is not None:
params_update['extra_vars'] = ['@' + extra_vars]
params.update(params_update)
return params
def update_resources(module, p):
params = p.copy()
identity_map = {
'project': 'name',
'inventory': 'name',
'credential': 'name',
'vault_credential': 'name',
}
for k, v in identity_map.items():
try:
if params[k]:
key = 'credential' if '_credential' in k else k
result = tower_cli.get_resource(key).get(**{v: params[k]})
params[k] = result['id']
elif k in params:
# unset empty parameters to avoid ValueError: invalid literal for int() with base 10: ''
del(params[k])
except (exc.NotFound) as excinfo:
module.fail_json(msg='Failed to update job template: {0}'.format(excinfo), changed=False)
return params
def main():
argument_spec = dict(
name=dict(required=True),
description=dict(default=''),
job_type=dict(choices=['run', 'check', 'scan'], required=True),
inventory=dict(default=''),
project=dict(required=True),
playbook=dict(required=True),
credential=dict(default=''),
vault_credential=dict(default=''),
forks=dict(type='int'),
limit=dict(default=''),
verbosity=dict(type='int', choices=[0, 1, 2, 3, 4], default=0),
extra_vars_path=dict(type='path', required=False),
job_tags=dict(default=''),
force_handlers_enabled=dict(type='bool', default=False),
skip_tags=dict(default=''),
start_at_task=dict(default=''),
timeout=dict(type='int', default=0),
fact_caching_enabled=dict(type='bool', default=False),
host_config_key=dict(default=''),
ask_diff_mode=dict(type='bool', default=False),
ask_extra_vars=dict(type='bool', default=False),
ask_limit=dict(type='bool', default=False),
ask_tags=dict(type='bool', default=False),
ask_skip_tags=dict(type='bool', default=False),
ask_job_type=dict(type='bool', default=False),
ask_verbosity=dict(type='bool', default=False),
ask_inventory=dict(type='bool', default=False),
ask_credential=dict(type='bool', default=False),
survey_enabled=dict(type='bool', default=False),
survey_spec=dict(type='dict', required=False),
become_enabled=dict(type='bool', default=False),
diff_mode_enabled=dict(type='bool', default=False),
concurrent_jobs_enabled=dict(type='bool', default=False),
state=dict(choices=['present', 'absent'], default='present'),
)
module = TowerModule(argument_spec=argument_spec, supports_check_mode=True)
name = module.params.get('name')
state = module.params.pop('state')
json_output = {'job_template': name, 'state': state}
tower_auth = tower_auth_config(module)
with settings.runtime_values(**tower_auth):
tower_check_mode(module)
jt = tower_cli.get_resource('job_template')
params = update_resources(module, module.params)
params = update_fields(params)
params['create_on_missing'] = True
try:
if state == 'present':
result = jt.modify(**params)
json_output['id'] = result['id']
elif state == 'absent':
result = jt.delete(**params)
except (exc.ConnectionError, exc.BadRequest, exc.NotFound) as excinfo:
module.fail_json(msg='Failed to update job template: {0}'.format(excinfo), changed=False)
json_output['changed'] = result['changed']
module.exit_json(**json_output)
if __name__ == '__main__':
main()
| gpl-3.0 |
ppanczyk/ansible | lib/ansible/modules/cloud/misc/virt_pool.py | 29 | 21829 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Maciej Delmanowski <drybjed@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: virt_pool
author: "Maciej Delmanowski (@drybjed)"
version_added: "2.0"
short_description: Manage libvirt storage pools
description:
- Manage I(libvirt) storage pools.
options:
name:
required: false
aliases: [ "pool" ]
description:
- name of the storage pool being managed. Note that pool must be previously
defined with xml.
state:
required: false
choices: [ "active", "inactive", "present", "absent", "undefined", "deleted" ]
description:
- specify which state you want a storage pool to be in.
If 'active', pool will be started.
If 'present', ensure that pool is present but do not change its
state; if it's missing, you need to specify xml argument.
If 'inactive', pool will be stopped.
If 'undefined' or 'absent', pool will be removed from I(libvirt) configuration.
If 'deleted', pool contents will be deleted and then pool undefined.
command:
required: false
choices: [ "define", "build", "create", "start", "stop", "destroy",
"delete", "undefine", "get_xml", "list_pools", "facts",
"info", "status" ]
description:
- in addition to state management, various non-idempotent commands are available.
See examples.
autostart:
required: false
choices: ["yes", "no"]
description:
- Specify if a given storage pool should be started automatically on system boot.
uri:
required: false
default: "qemu:///system"
description:
- I(libvirt) connection uri.
xml:
required: false
description:
- XML document used with the define command.
mode:
required: false
choices: [ 'new', 'repair', 'resize', 'no_overwrite', 'overwrite', 'normal', 'zeroed' ]
description:
- Pass additional parameters to 'build' or 'delete' commands.
requirements:
- "python >= 2.6"
- "python-libvirt"
- "python-lxml"
'''
EXAMPLES = '''
# Define a new storage pool
- virt_pool:
command: define
name: vms
xml: '{{ lookup("template", "pool/dir.xml.j2") }}'
# Build a storage pool if it does not exist
- virt_pool:
command: build
name: vms
# Start a storage pool
- virt_pool:
command: create
name: vms
# List available pools
- virt_pool:
command: list_pools
# Get XML data of a specified pool
- virt_pool:
command: get_xml
name: vms
# Stop a storage pool
- virt_pool:
command: destroy
name: vms
# Delete a storage pool (destroys contents)
- virt_pool:
command: delete
name: vms
# Undefine a storage pool
- virt_pool:
command: undefine
name: vms
# Gather facts about storage pools
# Facts will be available as 'ansible_libvirt_pools'
- virt_pool:
command: facts
# Gather information about pools managed by 'libvirt' remotely using uri
- virt_pool:
command: info
uri: '{{ item }}'
with_items: '{{ libvirt_uris }}'
register: storage_pools
# Ensure that a pool is active (needs to be defined and built first)
- virt_pool:
state: active
name: vms
# Ensure that a pool is inactive
- virt_pool:
state: inactive
name: vms
# Ensure that a given pool will be started at boot
- virt_pool:
autostart: yes
name: vms
# Disable autostart for a given pool
- virt_pool:
autostart: no
name: vms
'''
try:
import libvirt
except ImportError:
HAS_VIRT = False
else:
HAS_VIRT = True
try:
from lxml import etree
except ImportError:
HAS_XML = False
else:
HAS_XML = True
from ansible.module_utils.basic import AnsibleModule
VIRT_FAILED = 1
VIRT_SUCCESS = 0
VIRT_UNAVAILABLE=2
ALL_COMMANDS = []
ENTRY_COMMANDS = ['create', 'status', 'start', 'stop', 'build', 'delete',
'undefine', 'destroy', 'get_xml', 'define', 'refresh']
HOST_COMMANDS = [ 'list_pools', 'facts', 'info' ]
ALL_COMMANDS.extend(ENTRY_COMMANDS)
ALL_COMMANDS.extend(HOST_COMMANDS)
ENTRY_STATE_ACTIVE_MAP = {
0 : "inactive",
1 : "active"
}
ENTRY_STATE_AUTOSTART_MAP = {
0 : "no",
1 : "yes"
}
ENTRY_STATE_PERSISTENT_MAP = {
0 : "no",
1 : "yes"
}
ENTRY_STATE_INFO_MAP = {
0 : "inactive",
1 : "building",
2 : "running",
3 : "degraded",
4 : "inaccessible"
}
ENTRY_BUILD_FLAGS_MAP = {
"new" : 0,
"repair" : 1,
"resize" : 2,
"no_overwrite" : 4,
"overwrite" : 8
}
ENTRY_DELETE_FLAGS_MAP = {
"normal" : 0,
"zeroed" : 1
}
ALL_MODES = []
ALL_MODES.extend(ENTRY_BUILD_FLAGS_MAP.keys())
ALL_MODES.extend(ENTRY_DELETE_FLAGS_MAP.keys())
class EntryNotFound(Exception):
pass
class LibvirtConnection(object):
def __init__(self, uri, module):
self.module = module
conn = libvirt.open(uri)
if not conn:
raise Exception("hypervisor connection failure")
self.conn = conn
def find_entry(self, entryid):
# entryid = -1 returns a list of everything
results = []
# Get active entries
for name in self.conn.listStoragePools():
entry = self.conn.storagePoolLookupByName(name)
results.append(entry)
# Get inactive entries
for name in self.conn.listDefinedStoragePools():
entry = self.conn.storagePoolLookupByName(name)
results.append(entry)
if entryid == -1:
return results
for entry in results:
if entry.name() == entryid:
return entry
raise EntryNotFound("storage pool %s not found" % entryid)
def create(self, entryid):
if not self.module.check_mode:
return self.find_entry(entryid).create()
else:
try:
state = self.find_entry(entryid).isActive()
except:
return self.module.exit_json(changed=True)
if not state:
return self.module.exit_json(changed=True)
def destroy(self, entryid):
if not self.module.check_mode:
return self.find_entry(entryid).destroy()
else:
if self.find_entry(entryid).isActive():
return self.module.exit_json(changed=True)
def undefine(self, entryid):
if not self.module.check_mode:
return self.find_entry(entryid).undefine()
else:
if not self.find_entry(entryid):
return self.module.exit_json(changed=True)
def get_status2(self, entry):
state = entry.isActive()
return ENTRY_STATE_ACTIVE_MAP.get(state,"unknown")
def get_status(self, entryid):
if not self.module.check_mode:
state = self.find_entry(entryid).isActive()
return ENTRY_STATE_ACTIVE_MAP.get(state,"unknown")
else:
try:
state = self.find_entry(entryid).isActive()
return ENTRY_STATE_ACTIVE_MAP.get(state,"unknown")
except:
return ENTRY_STATE_ACTIVE_MAP.get("inactive","unknown")
def get_uuid(self, entryid):
return self.find_entry(entryid).UUIDString()
def get_xml(self, entryid):
return self.find_entry(entryid).XMLDesc(0)
def get_info(self, entryid):
return self.find_entry(entryid).info()
def get_volume_count(self, entryid):
return self.find_entry(entryid).numOfVolumes()
def get_volume_names(self, entryid):
return self.find_entry(entryid).listVolumes()
def get_devices(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
if xml.xpath('/pool/source/device'):
result = []
for device in xml.xpath('/pool/source/device'):
result.append(device.get('path'))
try:
return result
except:
raise ValueError('No devices specified')
def get_format(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
try:
result = xml.xpath('/pool/source/format')[0].get('type')
except:
raise ValueError('Format not specified')
return result
def get_host(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
try:
result = xml.xpath('/pool/source/host')[0].get('name')
except:
raise ValueError('Host not specified')
return result
def get_source_path(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
try:
result = xml.xpath('/pool/source/dir')[0].get('path')
except:
raise ValueError('Source path not specified')
return result
def get_path(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
return xml.xpath('/pool/target/path')[0].text
def get_type(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
return xml.get('type')
def build(self, entryid, flags):
if not self.module.check_mode:
return self.find_entry(entryid).build(flags)
else:
try:
state = self.find_entry(entryid)
except:
return self.module.exit_json(changed=True)
if not state:
return self.module.exit_json(changed=True)
def delete(self, entryid, flags):
if not self.module.check_mode:
return self.find_entry(entryid).delete(flags)
else:
try:
state = self.find_entry(entryid)
except:
return self.module.exit_json(changed=True)
if state:
return self.module.exit_json(changed=True)
def get_autostart(self, entryid):
state = self.find_entry(entryid).autostart()
return ENTRY_STATE_AUTOSTART_MAP.get(state,"unknown")
def get_autostart2(self, entryid):
if not self.module.check_mode:
return self.find_entry(entryid).autostart()
else:
try:
return self.find_entry(entryid).autostart()
except:
return self.module.exit_json(changed=True)
def set_autostart(self, entryid, val):
if not self.module.check_mode:
return self.find_entry(entryid).setAutostart(val)
else:
try:
state = self.find_entry(entryid).autostart()
except:
return self.module.exit_json(changed=True)
if bool(state) != val:
return self.module.exit_json(changed=True)
def refresh(self, entryid):
return self.find_entry(entryid).refresh()
def get_persistent(self, entryid):
state = self.find_entry(entryid).isPersistent()
return ENTRY_STATE_PERSISTENT_MAP.get(state,"unknown")
def define_from_xml(self, entryid, xml):
if not self.module.check_mode:
return self.conn.storagePoolDefineXML(xml)
else:
try:
self.find_entry(entryid)
except:
return self.module.exit_json(changed=True)
class VirtStoragePool(object):
def __init__(self, uri, module):
self.module = module
self.uri = uri
self.conn = LibvirtConnection(self.uri, self.module)
def get_pool(self, entryid):
return self.conn.find_entry(entryid)
def list_pools(self, state=None):
results = []
for entry in self.conn.find_entry(-1):
if state:
if state == self.conn.get_status2(entry):
results.append(entry.name())
else:
results.append(entry.name())
return results
def state(self):
results = []
for entry in self.list_pools():
state_blurb = self.conn.get_status(entry)
results.append("%s %s" % (entry,state_blurb))
return results
def autostart(self, entryid):
return self.conn.set_autostart(entryid, True)
def get_autostart(self, entryid):
return self.conn.get_autostart2(entryid)
def set_autostart(self, entryid, state):
return self.conn.set_autostart(entryid, state)
def create(self, entryid):
return self.conn.create(entryid)
def start(self, entryid):
return self.conn.create(entryid)
def stop(self, entryid):
return self.conn.destroy(entryid)
def destroy(self, entryid):
return self.conn.destroy(entryid)
def undefine(self, entryid):
return self.conn.undefine(entryid)
def status(self, entryid):
return self.conn.get_status(entryid)
def get_xml(self, entryid):
return self.conn.get_xml(entryid)
def define(self, entryid, xml):
return self.conn.define_from_xml(entryid, xml)
def build(self, entryid, flags):
return self.conn.build(entryid, ENTRY_BUILD_FLAGS_MAP.get(flags,0))
def delete(self, entryid, flags):
return self.conn.delete(entryid, ENTRY_DELETE_FLAGS_MAP.get(flags,0))
def refresh(self, entryid):
return self.conn.refresh(entryid)
def info(self):
return self.facts(facts_mode='info')
def facts(self, facts_mode='facts'):
results = dict()
for entry in self.list_pools():
results[entry] = dict()
if self.conn.find_entry(entry):
data = self.conn.get_info(entry)
# libvirt returns maxMem, memory, and cpuTime as long()'s, which
# xmlrpclib tries to convert to regular int's during serialization.
# This throws exceptions, so convert them to strings here and
# assume the other end of the xmlrpc connection can figure things
# out or doesn't care.
results[entry] = {
"status" : ENTRY_STATE_INFO_MAP.get(data[0],"unknown"),
"size_total" : str(data[1]),
"size_used" : str(data[2]),
"size_available" : str(data[3]),
}
results[entry]["autostart"] = self.conn.get_autostart(entry)
results[entry]["persistent"] = self.conn.get_persistent(entry)
results[entry]["state"] = self.conn.get_status(entry)
results[entry]["path"] = self.conn.get_path(entry)
results[entry]["type"] = self.conn.get_type(entry)
results[entry]["uuid"] = self.conn.get_uuid(entry)
if self.conn.find_entry(entry).isActive():
results[entry]["volume_count"] = self.conn.get_volume_count(entry)
results[entry]["volumes"] = list()
for volume in self.conn.get_volume_names(entry):
results[entry]["volumes"].append(volume)
else:
results[entry]["volume_count"] = -1
try:
results[entry]["host"] = self.conn.get_host(entry)
except ValueError:
pass
try:
results[entry]["source_path"] = self.conn.get_source_path(entry)
except ValueError:
pass
try:
results[entry]["format"] = self.conn.get_format(entry)
except ValueError:
pass
try:
devices = self.conn.get_devices(entry)
results[entry]["devices"] = devices
except ValueError:
pass
else:
results[entry]["state"] = self.conn.get_status(entry)
facts = dict()
if facts_mode == 'facts':
facts["ansible_facts"] = dict()
facts["ansible_facts"]["ansible_libvirt_pools"] = results
elif facts_mode == 'info':
facts['pools'] = results
return facts
def core(module):
state = module.params.get('state', None)
name = module.params.get('name', None)
command = module.params.get('command', None)
uri = module.params.get('uri', None)
xml = module.params.get('xml', None)
autostart = module.params.get('autostart', None)
mode = module.params.get('mode', None)
v = VirtStoragePool(uri, module)
res = {}
if state and command == 'list_pools':
res = v.list_pools(state=state)
if not isinstance(res, dict):
res = { command: res }
return VIRT_SUCCESS, res
if state:
if not name:
module.fail_json(msg = "state change requires a specified name")
res['changed'] = False
if state in [ 'active' ]:
if v.status(name) is not 'active':
res['changed'] = True
res['msg'] = v.start(name)
elif state in [ 'present' ]:
try:
v.get_pool(name)
except EntryNotFound:
if not xml:
module.fail_json(msg = "storage pool '" + name + "' not present, but xml not specified")
v.define(name, xml)
res = {'changed': True, 'created': name}
elif state in [ 'inactive' ]:
entries = v.list_pools()
if name in entries:
if v.status(name) is not 'inactive':
res['changed'] = True
res['msg'] = v.destroy(name)
elif state in [ 'undefined', 'absent' ]:
entries = v.list_pools()
if name in entries:
if v.status(name) is not 'inactive':
v.destroy(name)
res['changed'] = True
res['msg'] = v.undefine(name)
elif state in [ 'deleted' ]:
entries = v.list_pools()
if name in entries:
if v.status(name) is not 'inactive':
v.destroy(name)
v.delete(name, mode)
res['changed'] = True
res['msg'] = v.undefine(name)
else:
module.fail_json(msg="unexpected state")
return VIRT_SUCCESS, res
if command:
if command in ENTRY_COMMANDS:
if not name:
module.fail_json(msg = "%s requires 1 argument: name" % command)
if command == 'define':
if not xml:
module.fail_json(msg = "define requires xml argument")
try:
v.get_pool(name)
except EntryNotFound:
v.define(name, xml)
res = {'changed': True, 'created': name}
return VIRT_SUCCESS, res
elif command == 'build':
res = v.build(name, mode)
if not isinstance(res, dict):
res = { 'changed': True, command: res }
return VIRT_SUCCESS, res
elif command == 'delete':
res = v.delete(name, mode)
if not isinstance(res, dict):
res = { 'changed': True, command: res }
return VIRT_SUCCESS, res
res = getattr(v, command)(name)
if not isinstance(res, dict):
res = { command: res }
return VIRT_SUCCESS, res
elif hasattr(v, command):
res = getattr(v, command)()
if not isinstance(res, dict):
res = { command: res }
return VIRT_SUCCESS, res
else:
module.fail_json(msg="Command %s not recognized" % command)
if autostart is not None:
if not name:
module.fail_json(msg = "state change requires a specified name")
res['changed'] = False
if autostart:
if not v.get_autostart(name):
res['changed'] = True
res['msg'] = v.set_autostart(name, True)
else:
if v.get_autostart(name):
res['changed'] = True
res['msg'] = v.set_autostart(name, False)
return VIRT_SUCCESS, res
module.fail_json(msg="expected state or command parameter to be specified")
def main():
module = AnsibleModule (
argument_spec = dict(
name = dict(aliases=['pool']),
state = dict(choices=['active', 'inactive', 'present', 'absent', 'undefined', 'deleted']),
command = dict(choices=ALL_COMMANDS),
uri = dict(default='qemu:///system'),
xml = dict(),
autostart = dict(type='bool'),
mode = dict(choices=ALL_MODES),
),
supports_check_mode = True
)
if not HAS_VIRT:
module.fail_json(
msg='The `libvirt` module is not importable. Check the requirements.'
)
if not HAS_XML:
module.fail_json(
msg='The `lxml` module is not importable. Check the requirements.'
)
rc = VIRT_SUCCESS
try:
rc, result = core(module)
except Exception as e:
module.fail_json(msg=str(e))
if rc != 0: # something went wrong emit the msg
module.fail_json(rc=rc, msg=result)
else:
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
Azure/azure-sdk-for-python | sdk/storage/azure-storage-file-datalake/tests/test_datalake_service_client.py | 1 | 14323 | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import unittest
from azure.core.exceptions import HttpResponseError
from azure.storage.filedatalake import DataLakeServiceClient
from testcase import (
StorageTestCase,
DataLakePreparer)
# ------------------------------------------------------------------------------
from azure.storage.filedatalake._models import AnalyticsLogging, Metrics, RetentionPolicy, \
StaticWebsite, CorsRule
# ------------------------------------------------------------------------------
class DatalakeServiceTest(StorageTestCase):
def _setUp(self, account_name, account_key):
url = self._get_account_url(account_name)
self.dsc = DataLakeServiceClient(url, account_key)
self.config = self.dsc._config
# --Helpers-----------------------------------------------------------------
def _assert_properties_default(self, prop):
self.assertIsNotNone(prop)
self._assert_logging_equal(prop['analytics_logging'], AnalyticsLogging())
self._assert_metrics_equal(prop['hour_metrics'], Metrics())
self._assert_metrics_equal(prop['minute_metrics'], Metrics())
self._assert_cors_equal(prop['cors'], list())
def _assert_logging_equal(self, log1, log2):
if log1 is None or log2 is None:
self.assertEqual(log1, log2)
return
self.assertEqual(log1.version, log2.version)
self.assertEqual(log1.read, log2.read)
self.assertEqual(log1.write, log2.write)
self.assertEqual(log1.delete, log2.delete)
self._assert_retention_equal(log1.retention_policy, log2.retention_policy)
def _assert_delete_retention_policy_equal(self, policy1, policy2):
if policy1 is None or policy2 is None:
self.assertEqual(policy1, policy2)
return
self.assertEqual(policy1.enabled, policy2.enabled)
self.assertEqual(policy1.days, policy2.days)
def _assert_static_website_equal(self, prop1, prop2):
if prop1 is None or prop2 is None:
self.assertEqual(prop1, prop2)
return
self.assertEqual(prop1.enabled, prop2.enabled)
self.assertEqual(prop1.index_document, prop2.index_document)
self.assertEqual(prop1.error_document404_path, prop2.error_document404_path)
self.assertEqual(prop1.default_index_document_path, prop2.default_index_document_path)
def _assert_delete_retention_policy_not_equal(self, policy1, policy2):
if policy1 is None or policy2 is None:
self.assertNotEqual(policy1, policy2)
return
self.assertFalse(policy1.enabled == policy2.enabled
and policy1.days == policy2.days)
def _assert_metrics_equal(self, metrics1, metrics2):
if metrics1 is None or metrics2 is None:
self.assertEqual(metrics1, metrics2)
return
self.assertEqual(metrics1.version, metrics2.version)
self.assertEqual(metrics1.enabled, metrics2.enabled)
self.assertEqual(metrics1.include_apis, metrics2.include_apis)
self._assert_retention_equal(metrics1.retention_policy, metrics2.retention_policy)
def _assert_cors_equal(self, cors1, cors2):
if cors1 is None or cors2 is None:
self.assertEqual(cors1, cors2)
return
self.assertEqual(len(cors1), len(cors2))
for i in range(0, len(cors1)):
rule1 = cors1[i]
rule2 = cors2[i]
self.assertEqual(len(rule1.allowed_origins), len(rule2.allowed_origins))
self.assertEqual(len(rule1.allowed_methods), len(rule2.allowed_methods))
self.assertEqual(rule1.max_age_in_seconds, rule2.max_age_in_seconds)
self.assertEqual(len(rule1.exposed_headers), len(rule2.exposed_headers))
self.assertEqual(len(rule1.allowed_headers), len(rule2.allowed_headers))
def _assert_retention_equal(self, ret1, ret2):
self.assertEqual(ret1.enabled, ret2.enabled)
self.assertEqual(ret1.days, ret2.days)
# --Test cases per service ---------------------------------------
@DataLakePreparer()
def test_datalake_service_properties(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# Act
resp = self.dsc.set_service_properties(
analytics_logging=AnalyticsLogging(),
hour_metrics=Metrics(),
minute_metrics=Metrics(),
cors=list(),
target_version='2014-02-14'
)
# Assert
self.assertIsNone(resp)
props = self.dsc.get_service_properties()
self._assert_properties_default(props)
self.assertEqual('2014-02-14', props['target_version'])
@DataLakePreparer()
def test_empty_set_service_properties_exception(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
with self.assertRaises(ValueError):
self.dsc.set_service_properties()
@DataLakePreparer()
def test_set_default_service_version(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
# Act
self.dsc.set_service_properties(target_version='2014-02-14')
# Assert
received_props = self.dsc.get_service_properties()
self.assertEqual(received_props['target_version'], '2014-02-14')
@DataLakePreparer()
def test_set_delete_retention_policy(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
delete_retention_policy = RetentionPolicy(enabled=True, days=2)
# Act
self.dsc.set_service_properties(delete_retention_policy=delete_retention_policy)
# Assert
received_props = self.dsc.get_service_properties()
self._assert_delete_retention_policy_equal(received_props['delete_retention_policy'], delete_retention_policy)
@DataLakePreparer()
def test_set_delete_retention_policy_edge_cases(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
delete_retention_policy = RetentionPolicy(enabled=True, days=1)
self.dsc.set_service_properties(delete_retention_policy=delete_retention_policy)
# Assert
received_props = self.dsc.get_service_properties()
self._assert_delete_retention_policy_equal(received_props['delete_retention_policy'], delete_retention_policy)
# Should work with maximum settings
delete_retention_policy = RetentionPolicy(enabled=True, days=365)
self.dsc.set_service_properties(delete_retention_policy=delete_retention_policy)
# Assert
received_props = self.dsc.get_service_properties()
self._assert_delete_retention_policy_equal(received_props['delete_retention_policy'], delete_retention_policy)
# Should not work with 0 days
delete_retention_policy = RetentionPolicy(enabled=True, days=0)
with self.assertRaises(HttpResponseError):
self.dsc.set_service_properties(delete_retention_policy=delete_retention_policy)
# Assert
received_props = self.dsc.get_service_properties()
self._assert_delete_retention_policy_not_equal(
received_props['delete_retention_policy'], delete_retention_policy)
# Should not work with 366 days
delete_retention_policy = RetentionPolicy(enabled=True, days=366)
with self.assertRaises(HttpResponseError):
self.dsc.set_service_properties(delete_retention_policy=delete_retention_policy)
# Assert
received_props = self.dsc.get_service_properties()
self._assert_delete_retention_policy_not_equal(
received_props['delete_retention_policy'], delete_retention_policy)
@DataLakePreparer()
def test_set_static_website_properties(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
static_website = StaticWebsite(
enabled=True,
index_document="index.html",
error_document404_path="errors/error/404error.html")
# Act
self.dsc.set_service_properties(static_website=static_website)
# Assert
received_props = self.dsc.get_service_properties()
self._assert_static_website_equal(received_props['static_website'], static_website)
@DataLakePreparer()
def test_disabled_static_website_properties(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
static_website = StaticWebsite(enabled=False, index_document="index.html",
error_document404_path="errors/error/404error.html")
# Act
self.dsc.set_service_properties(static_website=static_website)
# Assert
received_props = self.dsc.get_service_properties()
self._assert_static_website_equal(received_props['static_website'], StaticWebsite(enabled=False))
@DataLakePreparer()
def test_set_static_website_props_dont_impact_other_props(
self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
cors_rule1 = CorsRule(['www.xyz.com'], ['GET'])
allowed_origins = ['www.xyz.com', "www.ab.com", "www.bc.com"]
allowed_methods = ['GET', 'PUT']
max_age_in_seconds = 500
exposed_headers = ["x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd"]
allowed_headers = ["x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo"]
cors_rule2 = CorsRule(
allowed_origins,
allowed_methods,
max_age_in_seconds=max_age_in_seconds,
exposed_headers=exposed_headers,
allowed_headers=allowed_headers)
cors = [cors_rule1, cors_rule2]
# Act to set cors
self.dsc.set_service_properties(cors=cors)
# Assert cors is updated
received_props = self.dsc.get_service_properties()
self._assert_cors_equal(received_props['cors'], cors)
# Arrange to set static website properties
static_website = StaticWebsite(enabled=True, index_document="index.html",
error_document404_path="errors/error/404error.html")
# Act to set static website
self.dsc.set_service_properties(static_website=static_website)
# Assert static website was updated was cors was unchanged
received_props = self.dsc.get_service_properties()
self._assert_static_website_equal(received_props['static_website'], static_website)
self._assert_cors_equal(received_props['cors'], cors)
@DataLakePreparer()
def test_set_logging(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
logging = AnalyticsLogging(read=True, write=True, delete=True,
retention_policy=RetentionPolicy(enabled=True, days=5))
# Act
self.dsc.set_service_properties(analytics_logging=logging)
# Assert
received_props = self.dsc.get_service_properties()
self._assert_logging_equal(received_props['analytics_logging'], logging)
@DataLakePreparer()
def test_set_hour_metrics(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
hour_metrics = Metrics(
include_apis=False, enabled=True, retention_policy=RetentionPolicy(enabled=True, days=5))
# Act
self.dsc.set_service_properties(hour_metrics=hour_metrics)
# Assert
received_props = self.dsc.get_service_properties()
self._assert_metrics_equal(received_props['hour_metrics'], hour_metrics)
@DataLakePreparer()
def test_set_minute_metrics(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
minute_metrics = Metrics(enabled=True, include_apis=True,
retention_policy=RetentionPolicy(enabled=True, days=5))
# Act
self.dsc.set_service_properties(minute_metrics=minute_metrics)
# Assert
received_props = self.dsc.get_service_properties()
self._assert_metrics_equal(received_props['minute_metrics'], minute_metrics)
@DataLakePreparer()
def test_set_cors(self, datalake_storage_account_name, datalake_storage_account_key):
self._setUp(datalake_storage_account_name, datalake_storage_account_key)
cors_rule1 = CorsRule(['www.xyz.com'], ['GET'])
allowed_origins = ['www.xyz.com', "www.ab.com", "www.bc.com"]
allowed_methods = ['GET', 'PUT']
max_age_in_seconds = 500
exposed_headers = ["x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd"]
allowed_headers = ["x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo"]
cors_rule2 = CorsRule(
allowed_origins,
allowed_methods,
max_age_in_seconds=max_age_in_seconds,
exposed_headers=exposed_headers,
allowed_headers=allowed_headers)
cors = [cors_rule1, cors_rule2]
# Act
self.dsc.set_service_properties(cors=cors)
# Assert
received_props = self.dsc.get_service_properties()
self._assert_cors_equal(received_props['cors'], cors)
| mit |
fursund/EmguCV-Unity | opencv/samples/swig_python/fitellipse.py | 6 | 3716 | #!/usr/bin/python
"""
This program is demonstration for ellipse fitting. Program finds
contours and approximate it by ellipses.
Trackbar specify threshold parametr.
White lines is contours. Red lines is fitting ellipses.
Original C implementation by: Denis Burenkov.
Python implementation by: Roman Stanchak
"""
import sys
from opencv import cv
from opencv import highgui
image02 = None
image03 = None
image04 = None
def process_image( slider_pos ):
"""
Define trackbar callback functon. This function find contours,
draw it and approximate it by ellipses.
"""
stor = cv.cvCreateMemStorage(0);
# Threshold the source image. This needful for cv.cvFindContours().
cv.cvThreshold( image03, image02, slider_pos, 255, cv.CV_THRESH_BINARY );
# Find all contours.
nb_contours, cont = cv.cvFindContours (image02,
stor,
cv.sizeof_CvContour,
cv.CV_RETR_LIST,
cv.CV_CHAIN_APPROX_NONE,
cv.cvPoint (0,0))
# Clear images. IPL use.
cv.cvZero(image02);
cv.cvZero(image04);
# This cycle draw all contours and approximate it by ellipses.
for c in cont.hrange():
count = c.total; # This is number point in contour
# Number point must be more than or equal to 6 (for cv.cvFitEllipse_32f).
if( count < 6 ):
continue;
# Alloc memory for contour point set.
PointArray = cv.cvCreateMat(1, count, cv.CV_32SC2)
PointArray2D32f= cv.cvCreateMat( 1, count, cv.CV_32FC2)
# Get contour point set.
cv.cvCvtSeqToArray(c, PointArray, cv.cvSlice(0, cv.CV_WHOLE_SEQ_END_INDEX));
# Convert CvPoint set to CvBox2D32f set.
cv.cvConvert( PointArray, PointArray2D32f )
box = cv.CvBox2D()
# Fits ellipse to current contour.
box = cv.cvFitEllipse2(PointArray2D32f);
# Draw current contour.
cv.cvDrawContours(image04, c, cv.CV_RGB(255,255,255), cv.CV_RGB(255,255,255),0,1,8,cv.cvPoint(0,0));
# Convert ellipse data from float to integer representation.
center = cv.CvPoint()
size = cv.CvSize()
center.x = cv.cvRound(box.center.x);
center.y = cv.cvRound(box.center.y);
size.width = cv.cvRound(box.size.width*0.5);
size.height = cv.cvRound(box.size.height*0.5);
box.angle = -box.angle;
# Draw ellipse.
cv.cvEllipse(image04, center, size,
box.angle, 0, 360,
cv.CV_RGB(0,0,255), 1, cv.CV_AA, 0);
# Show image. HighGUI use.
highgui.cvShowImage( "Result", image04 );
if __name__ == '__main__':
argc = len(sys.argv)
filename = "../c/stuff.jpg"
if(argc == 2):
filename = sys.argv[1]
slider_pos = 70
# load image and force it to be grayscale
image03 = highgui.cvLoadImage(filename, 0)
if not image03:
print "Could not load image " + filename
sys.exit(-1)
# Create the destination images
image02 = cv.cvCloneImage( image03 );
image04 = cv.cvCloneImage( image03 );
# Create windows.
highgui.cvNamedWindow("Source", 1);
highgui.cvNamedWindow("Result", 1);
# Show the image.
highgui.cvShowImage("Source", image03);
# Create toolbars. HighGUI use.
highgui.cvCreateTrackbar( "Threshold", "Result", slider_pos, 255, process_image );
process_image( 1 );
#Wait for a key stroke; the same function arranges events processing
print "Press any key to exit"
highgui.cvWaitKey(0);
highgui.cvDestroyWindow("Source");
highgui.cvDestroyWindow("Result");
| gpl-3.0 |
morphis/home-assistant | homeassistant/components/automation/sun.py | 6 | 1456 | """
Offer sun based automation rules.
For more details about this automation rule, please refer to the documentation
at https://home-assistant.io/components/automation/#sun-trigger
"""
import asyncio
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.const import (
CONF_EVENT, CONF_OFFSET, CONF_PLATFORM, SUN_EVENT_SUNRISE)
from homeassistant.helpers.event import async_track_sunrise, async_track_sunset
import homeassistant.helpers.config_validation as cv
DEPENDENCIES = ['sun']
_LOGGER = logging.getLogger(__name__)
TRIGGER_SCHEMA = vol.Schema({
vol.Required(CONF_PLATFORM): 'sun',
vol.Required(CONF_EVENT): cv.sun_event,
vol.Required(CONF_OFFSET, default=timedelta(0)): cv.time_period,
})
@asyncio.coroutine
def async_trigger(hass, config, action):
"""Listen for events based on configuration."""
event = config.get(CONF_EVENT)
offset = config.get(CONF_OFFSET)
@callback
def call_action():
"""Call action with right context."""
hass.async_run_job(action, {
'trigger': {
'platform': 'sun',
'event': event,
'offset': offset,
},
})
# Do something to call action
if event == SUN_EVENT_SUNRISE:
return async_track_sunrise(hass, call_action, offset)
else:
return async_track_sunset(hass, call_action, offset)
| apache-2.0 |
gautamMalu/rootfs_xen_arndale | usr/lib/python3.4/idlelib/IdleHistory.py | 122 | 4052 | "Implement Idle Shell history mechanism with History class"
from idlelib.configHandler import idleConf
class History:
''' Implement Idle Shell history mechanism.
store - Store source statement (called from PyShell.resetoutput).
fetch - Fetch stored statement matching prefix already entered.
history_next - Bound to <<history-next>> event (default Alt-N).
history_prev - Bound to <<history-prev>> event (default Alt-P).
'''
def __init__(self, text):
'''Initialize data attributes and bind event methods.
.text - Idle wrapper of tk Text widget, with .bell().
.history - source statements, possibly with multiple lines.
.prefix - source already entered at prompt; filters history list.
.pointer - index into history.
.cyclic - wrap around history list (or not).
'''
self.text = text
self.history = []
self.prefix = None
self.pointer = None
self.cyclic = idleConf.GetOption("main", "History", "cyclic", 1, "bool")
text.bind("<<history-previous>>", self.history_prev)
text.bind("<<history-next>>", self.history_next)
def history_next(self, event):
"Fetch later statement; start with ealiest if cyclic."
self.fetch(reverse=False)
return "break"
def history_prev(self, event):
"Fetch earlier statement; start with most recent."
self.fetch(reverse=True)
return "break"
def fetch(self, reverse):
'''Fetch statememt and replace current line in text widget.
Set prefix and pointer as needed for successive fetches.
Reset them to None, None when returning to the start line.
Sound bell when return to start line or cannot leave a line
because cyclic is False.
'''
nhist = len(self.history)
pointer = self.pointer
prefix = self.prefix
if pointer is not None and prefix is not None:
if self.text.compare("insert", "!=", "end-1c") or \
self.text.get("iomark", "end-1c") != self.history[pointer]:
pointer = prefix = None
self.text.mark_set("insert", "end-1c") # != after cursor move
if pointer is None or prefix is None:
prefix = self.text.get("iomark", "end-1c")
if reverse:
pointer = nhist # will be decremented
else:
if self.cyclic:
pointer = -1 # will be incremented
else: # abort history_next
self.text.bell()
return
nprefix = len(prefix)
while 1:
pointer += -1 if reverse else 1
if pointer < 0 or pointer >= nhist:
self.text.bell()
if not self.cyclic and pointer < 0: # abort history_prev
return
else:
if self.text.get("iomark", "end-1c") != prefix:
self.text.delete("iomark", "end-1c")
self.text.insert("iomark", prefix)
pointer = prefix = None
break
item = self.history[pointer]
if item[:nprefix] == prefix and len(item) > nprefix:
self.text.delete("iomark", "end-1c")
self.text.insert("iomark", item)
break
self.text.see("insert")
self.text.tag_remove("sel", "1.0", "end")
self.pointer = pointer
self.prefix = prefix
def store(self, source):
"Store Shell input statement into history list."
source = source.strip()
if len(source) > 2:
# avoid duplicates
try:
self.history.remove(source)
except ValueError:
pass
self.history.append(source)
self.pointer = None
self.prefix = None
if __name__ == "__main__":
from unittest import main
main('idlelib.idle_test.test_idlehistory', verbosity=2, exit=False)
| gpl-2.0 |
arbenson/mrtsqr | dumbo/hyy-python-hadoop/build/lib/hadoop/typedbytes2/typedbytes2.py | 4 | 10138 | #!/usr/bin/env python
# By Yangyang Hou, based on TypedBytes for python by Klaas Bosteels
# Typed bytes types:
BYTES = 0
BYTE = 1
BOOL = 2
INT = 3
LONG = 4
FLOAT = 5
DOUBLE = 6
STRING = 7
VECTOR = 8
LIST = 9
MAP = 10
# Application-specific types:
PICKLE = 100
BYTESTRING = 101
# Low-level types:
MARKER = 255
def classes():
from cPickle import dumps, loads, UnpicklingError, HIGHEST_PROTOCOL
from struct import pack, unpack, error as StructError
from array import array
from types import BooleanType, IntType, LongType, FloatType
from types import UnicodeType, StringType, TupleType, ListType, DictType
from datetime import datetime, date
from decimal import Decimal
UNICODE_ENCODING = 'utf8'
_len = len
class Bytes(str):
def __repr__(self):
return "Bytes(" + str.__repr__(self) + ")"
class Input(object):
def __init__(self, file, unicode_errors='strict'):
self.file = file
self.unicode_errors = unicode_errors
self.eof = False
self.handler_table = self._make_handler_table()
def _read(self):
try:
t = self.file.readUByte()
self.t = t
#print 'I am a type code ',t
except StructError:
self.eof = True
raise StopIteration
return self.handler_table[t](self)
def read(self):
try:
return self._read()
except StopIteration:
return None
def _reads(self):
r = self._read
while 1:
yield r()
__iter__ = reads = _reads
def close(self):
self.file.close()
def read_bytes(self):
count = self.file.readInt()
value = self.file.read(count)
#print 'in read_bytes, len is ', len(value),'value is', value
if _len(value) != count:
raise StructError("EOF before reading all of bytes type")
return Bytes(value)
def read_byte(self):
return self.file.readByte()
def read_bool(self):
return bool(self.file.readBoolean())
def read_int(self):
return self.file.readInt()
def read_long(self):
return self.file.readLong()
def read_float(self):
return self.file.readFloat()
def read_double(self):
return self.file.readDouble()
def read_string(self):
count = self.file.readInt()
value = self.file.read(count)
if _len(value) != count:
raise StructError("EOF before reading all of string")
return value
read_bytestring = read_string
def read_unicode(self):
count = self.file.readInt()
value = self.file.read(count)
if _len(value) != count:
raise StructError("EOF before reading all of string")
return value.decode(UNICODE_ENCODING, self.unicode_errors)
def read_vector(self):
r = self._read
count = self.file.readInt()
try:
return tuple(r() for i in xrange(count))
except StopIteration:
raise StructError("EOF before all vector elements read")
def read_list(self):
value = list(self._reads())
if self.eof:
raise StructError("EOF before end-of-list marker")
return value
def read_map(self):
r = self._read
count = self.file.readInt()
return dict((r(), r()) for i in xrange(count))
def read_pickle(self):
count = self.file.readInt()
bytes = self.file.read(count)
if _len(bytes) != count:
raise StructError("EOF before reading all of bytes type")
return loads(bytes)
def read_marker(self):
raise StopIteration
def invalid_typecode(self):
raise StructError("Invalid type byte: " + str(self.t))
TYPECODE_HANDLER_MAP = {
BYTES: read_bytes,
BYTE: read_byte,
BOOL: read_bool,
INT: read_int,
LONG: read_long,
FLOAT: read_float,
DOUBLE: read_double,
STRING: read_string,
VECTOR: read_vector,
LIST: read_list,
MAP: read_map,
PICKLE: read_pickle,
BYTESTRING: read_bytestring,
MARKER: read_marker
}
def _make_handler_table(self):
return list(Input.TYPECODE_HANDLER_MAP.get(i,
Input.invalid_typecode) for i in xrange(256))
def register(self, typecode, handler):
self.handler_table[typecode] = handler
def lookup(self, typecode):
return lambda: self.handler_table[typecode](self)
_BYTES, _BYTE, _BOOL = BYTES, BYTE, BOOL
_INT, _LONG, _FLOAT, _DOUBLE = INT, LONG, FLOAT, DOUBLE
_STRING, _VECTOR, _LIST, _MAP = STRING, VECTOR, LIST, MAP
_PICKLE, _BYTESTRING, _MARKER = PICKLE, BYTESTRING, MARKER
_int, _type = int, type
def flatten(iterable):
for i in iterable:
for j in i:
yield j
class Output(object):
def __init__(self, file, unicode_errors='strict'):
self.file = file
self.unicode_errors = unicode_errors
self.handler_map = self._make_handler_map()
def __del__(self):
if not file.closed:
self.file.flush()
def _write(self, obj):
try:
writefunc = self.handler_map[_type(obj)]
except KeyError:
writefunc = Output.write_pickle
#print 'I am object ',obj, 'Type is ',_type(obj)
writefunc(self, obj)
write = _write
def _writes(self, iterable):
#print 'in _writes', iterable
w = self._write
for obj in iterable:
w(obj)
writes = _writes
def flush(self):
self.file.flush()
def close(self):
self.file.close()
def write_bytes(self, bytes):
self.file.writeUByte(_BYTES)
self.file.writeInt(_len(bytes))
self.file.write(bytes)
def write_byte(self, byte):
self.file.writeUByte(_BYTE)
self.file.writeByte(byte)
def write_bool(self, bool_):
self.file.writeUByte(_BOOL)
self.file.writeByte(_int(bool_))
def write_int(self, int_):
# Python ints are 64-bit
if -2147483648 <= int_ <= 2147483647:
self.file.writeUByte(_INT)
self.file.writeInt(int_)
else:
self.file.writeUByte(_LONG)
self.file.writeLong(int_)
def write_long(self, long_):
# Python longs are infinite precision
if -9223372036854775808L <= long_ <= 9223372036854775807L:
self.file.writeUByte(_LONG)
self.file.writeLong(long_)
else:
self.write_pickle(long_)
def write_float(self, float_):
self.file.writeUByte(_FLOAT)
self.file.writeFloat(float_)
def write_double(self, double):
self.file.writeUByte(_DOUBLE)
self.file.writeDouble(double)
def write_string(self, string):
self.file.writeUByte(_STRING)
self.file.writeInt(_len(string))
self.file.write(string)
def write_bytestring(self, string):
self.file.writeUByte(_BYTESTRING)
self.file.writeInt(_len(string))
self.file.write(string)
def write_unicode(self, string):
string = string.encode(UNICODE_ENCODING, self.unicode_errors)
self.file.writeUByte(_STRING)
self.file.writeInt(_len(string))
self.file.write(string)
def write_vector(self, vector):
self.file.writeUByte(_VECTOR)
self.file.writeInt(_len(vector))
#print 'vector length ',len(vector)
self._writes(vector)
def write_list(self, list_):
self.file.writeUByte(LIST)
self._writes(list_)
self.file.writeUByte(MARKER)
def write_map(self, map):
self.file.writeUByte(_MAP)
self.file.writeInt(_len(map))
self._writes(flatten(map.iteritems()))
def write_pickle(self, obj):
bytes = dumps(obj, HIGHEST_PROTOCOL)
self.file.writeUByte(_PICKLE)
self.file.writeInt(_len(bytes))
self.file.write(bytes)
def write_array(self, arr):
bytes = arr.tostring()
self.file.writeUByte(_BYTES)
self.file.writeInt(_len(bytes))
self.file.write(bytes)
TYPE_HANDLER_MAP = {
BooleanType: write_bool,
IntType: write_int,
LongType: write_long,
FloatType: write_double,
StringType: write_string,
TupleType: write_vector,
ListType: write_list,
DictType: write_map,
UnicodeType: write_unicode,
Bytes: write_bytes,
datetime: write_pickle,
date: write_pickle,
Decimal: write_pickle,
array: write_array
}
def _make_handler_map(self):
return dict(Output.TYPE_HANDLER_MAP)
def register(self, python_type, handler):
self.handler_map[python_type] = handler
def lookup(self, python_type):
handler_map = self.handler_map
if python_type in handler_map:
return lambda obj: handler_map[python_type](self, obj)
else:
return lambda obj: Output.write_pickle(self, obj)
return Input, Output, Bytes
Input, Output, Bytes = classes() | bsd-2-clause |
peap/django-debug-toolbar | tests/settings.py | 2 | 1809 | """Django settings for tests."""
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
SECRET_KEY = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890'
INTERNAL_IPS = ['127.0.0.1']
LOGGING_CONFIG = None # avoids spurious output in tests
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'debug_toolbar',
'tests',
]
MEDIA_URL = '/media/' # Avoids https://code.djangoproject.com/ticket/21451
MIDDLEWARE_CLASSES = [
'debug_toolbar.middleware.DebugToolbarMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'tests.urls'
STATIC_ROOT = os.path.join(BASE_DIR, 'tests', 'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'tests', 'additional_static'),
("prefix", os.path.join(BASE_DIR, 'tests', 'additional_static')),
]
# Cache and database
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
'second': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
# Debug Toolbar configuration
DEBUG_TOOLBAR_CONFIG = {
# Django's test client sets wsgi.multiprocess to True inappropriately
'RENDER_PANELS': False,
}
| bsd-3-clause |
Weihonghao/ECM | Vpy34/lib/python3.5/site-packages/scipy/optimize/tests/test_tnc.py | 110 | 11264 | """
Unit tests for TNC optimization routine from tnc.py
"""
from numpy.testing import (assert_allclose, assert_equal, TestCase,
run_module_suite)
from scipy import optimize
import numpy as np
from math import pow
class TestTnc(TestCase):
"""TNC non-linear optimization.
These tests are taken from Prof. K. Schittkowski's test examples
for constrained non-linear programming.
http://www.uni-bayreuth.de/departments/math/~kschittkowski/home.htm
"""
def setUp(self):
# options for minimize
self.opts = {'disp': False, 'maxiter': 200}
# objective functions and jacobian for each test
def f1(self, x, a=100.0):
return a * pow((x[1] - pow(x[0], 2)), 2) + pow(1.0 - x[0], 2)
def g1(self, x, a=100.0):
dif = [0, 0]
dif[1] = 2 * a * (x[1] - pow(x[0], 2))
dif[0] = -2.0 * (x[0] * (dif[1] - 1.0) + 1.0)
return dif
def fg1(self, x, a=100.0):
return self.f1(x, a), self.g1(x, a)
def f3(self, x):
return x[1] + pow(x[1] - x[0], 2) * 1.0e-5
def g3(self, x):
dif = [0, 0]
dif[0] = -2.0 * (x[1] - x[0]) * 1.0e-5
dif[1] = 1.0 - dif[0]
return dif
def fg3(self, x):
return self.f3(x), self.g3(x)
def f4(self, x):
return pow(x[0] + 1.0, 3) / 3.0 + x[1]
def g4(self, x):
dif = [0, 0]
dif[0] = pow(x[0] + 1.0, 2)
dif[1] = 1.0
return dif
def fg4(self, x):
return self.f4(x), self.g4(x)
def f5(self, x):
return np.sin(x[0] + x[1]) + pow(x[0] - x[1], 2) - \
1.5 * x[0] + 2.5 * x[1] + 1.0
def g5(self, x):
dif = [0, 0]
v1 = np.cos(x[0] + x[1])
v2 = 2.0*(x[0] - x[1])
dif[0] = v1 + v2 - 1.5
dif[1] = v1 - v2 + 2.5
return dif
def fg5(self, x):
return self.f5(x), self.g5(x)
def f38(self, x):
return (100.0 * pow(x[1] - pow(x[0], 2), 2) +
pow(1.0 - x[0], 2) + 90.0 * pow(x[3] - pow(x[2], 2), 2) +
pow(1.0 - x[2], 2) + 10.1 * (pow(x[1] - 1.0, 2) +
pow(x[3] - 1.0, 2)) +
19.8 * (x[1] - 1.0) * (x[3] - 1.0)) * 1.0e-5
def g38(self, x):
dif = [0, 0, 0, 0]
dif[0] = (-400.0 * x[0] * (x[1] - pow(x[0], 2)) -
2.0 * (1.0 - x[0])) * 1.0e-5
dif[1] = (200.0 * (x[1] - pow(x[0], 2)) + 20.2 * (x[1] - 1.0) +
19.8 * (x[3] - 1.0)) * 1.0e-5
dif[2] = (- 360.0 * x[2] * (x[3] - pow(x[2], 2)) -
2.0 * (1.0 - x[2])) * 1.0e-5
dif[3] = (180.0 * (x[3] - pow(x[2], 2)) + 20.2 * (x[3] - 1.0) +
19.8 * (x[1] - 1.0)) * 1.0e-5
return dif
def fg38(self, x):
return self.f38(x), self.g38(x)
def f45(self, x):
return 2.0 - x[0] * x[1] * x[2] * x[3] * x[4] / 120.0
def g45(self, x):
dif = [0] * 5
dif[0] = - x[1] * x[2] * x[3] * x[4] / 120.0
dif[1] = - x[0] * x[2] * x[3] * x[4] / 120.0
dif[2] = - x[0] * x[1] * x[3] * x[4] / 120.0
dif[3] = - x[0] * x[1] * x[2] * x[4] / 120.0
dif[4] = - x[0] * x[1] * x[2] * x[3] / 120.0
return dif
def fg45(self, x):
return self.f45(x), self.g45(x)
# tests
# minimize with method=TNC
def test_minimize_tnc1(self):
x0, bnds = [-2, 1], ([-np.inf, None], [-1.5, None])
xopt = [1, 1]
iterx = [] # to test callback
res = optimize.minimize(self.f1, x0, method='TNC', jac=self.g1,
bounds=bnds, options=self.opts,
callback=iterx.append)
assert_allclose(res.fun, self.f1(xopt), atol=1e-8)
assert_equal(len(iterx), res.nit)
def test_minimize_tnc1b(self):
x0, bnds = np.matrix([-2, 1]), ([-np.inf, None],[-1.5, None])
xopt = [1, 1]
x = optimize.minimize(self.f1, x0, method='TNC',
bounds=bnds, options=self.opts).x
assert_allclose(self.f1(x), self.f1(xopt), atol=1e-4)
def test_minimize_tnc1c(self):
x0, bnds = [-2, 1], ([-np.inf, None],[-1.5, None])
xopt = [1, 1]
x = optimize.minimize(self.fg1, x0, method='TNC',
jac=True, bounds=bnds,
options=self.opts).x
assert_allclose(self.f1(x), self.f1(xopt), atol=1e-8)
def test_minimize_tnc2(self):
x0, bnds = [-2, 1], ([-np.inf, None], [1.5, None])
xopt = [-1.2210262419616387, 1.5]
x = optimize.minimize(self.f1, x0, method='TNC',
jac=self.g1, bounds=bnds,
options=self.opts).x
assert_allclose(self.f1(x), self.f1(xopt), atol=1e-8)
def test_minimize_tnc3(self):
x0, bnds = [10, 1], ([-np.inf, None], [0.0, None])
xopt = [0, 0]
x = optimize.minimize(self.f3, x0, method='TNC',
jac=self.g3, bounds=bnds,
options=self.opts).x
assert_allclose(self.f3(x), self.f3(xopt), atol=1e-8)
def test_minimize_tnc4(self):
x0,bnds = [1.125, 0.125], [(1, None), (0, None)]
xopt = [1, 0]
x = optimize.minimize(self.f4, x0, method='TNC',
jac=self.g4, bounds=bnds,
options=self.opts).x
assert_allclose(self.f4(x), self.f4(xopt), atol=1e-8)
def test_minimize_tnc5(self):
x0, bnds = [0, 0], [(-1.5, 4),(-3, 3)]
xopt = [-0.54719755119659763, -1.5471975511965976]
x = optimize.minimize(self.f5, x0, method='TNC',
jac=self.g5, bounds=bnds,
options=self.opts).x
assert_allclose(self.f5(x), self.f5(xopt), atol=1e-8)
def test_minimize_tnc38(self):
x0, bnds = np.array([-3, -1, -3, -1]), [(-10, 10)]*4
xopt = [1]*4
x = optimize.minimize(self.f38, x0, method='TNC',
jac=self.g38, bounds=bnds,
options=self.opts).x
assert_allclose(self.f38(x), self.f38(xopt), atol=1e-8)
def test_minimize_tnc45(self):
x0, bnds = [2] * 5, [(0, 1), (0, 2), (0, 3), (0, 4), (0, 5)]
xopt = [1, 2, 3, 4, 5]
x = optimize.minimize(self.f45, x0, method='TNC',
jac=self.g45, bounds=bnds,
options=self.opts).x
assert_allclose(self.f45(x), self.f45(xopt), atol=1e-8)
# fmin_tnc
def test_tnc1(self):
fg, x, bounds = self.fg1, [-2, 1], ([-np.inf, None], [-1.5, None])
xopt = [1, 1]
x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds, args=(100.0, ),
messages=optimize.tnc.MSG_NONE,
maxfun=200)
assert_allclose(self.f1(x), self.f1(xopt), atol=1e-8,
err_msg="TNC failed with status: " +
optimize.tnc.RCSTRINGS[rc])
def test_tnc1b(self):
x, bounds = [-2, 1], ([-np.inf, None], [-1.5, None])
xopt = [1, 1]
x, nf, rc = optimize.fmin_tnc(self.f1, x, approx_grad=True,
bounds=bounds,
messages=optimize.tnc.MSG_NONE,
maxfun=200)
assert_allclose(self.f1(x), self.f1(xopt), atol=1e-4,
err_msg="TNC failed with status: " +
optimize.tnc.RCSTRINGS[rc])
def test_tnc1c(self):
x, bounds = [-2, 1], ([-np.inf, None], [-1.5, None])
xopt = [1, 1]
x, nf, rc = optimize.fmin_tnc(self.f1, x, fprime=self.g1,
bounds=bounds,
messages=optimize.tnc.MSG_NONE,
maxfun=200)
assert_allclose(self.f1(x), self.f1(xopt), atol=1e-8,
err_msg="TNC failed with status: " +
optimize.tnc.RCSTRINGS[rc])
def test_tnc2(self):
fg, x, bounds = self.fg1, [-2, 1], ([-np.inf, None], [1.5, None])
xopt = [-1.2210262419616387, 1.5]
x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds,
messages=optimize.tnc.MSG_NONE,
maxfun=200)
assert_allclose(self.f1(x), self.f1(xopt), atol=1e-8,
err_msg="TNC failed with status: " +
optimize.tnc.RCSTRINGS[rc])
def test_tnc3(self):
fg, x, bounds = self.fg3, [10, 1], ([-np.inf, None], [0.0, None])
xopt = [0, 0]
x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds,
messages=optimize.tnc.MSG_NONE,
maxfun=200)
assert_allclose(self.f3(x), self.f3(xopt), atol=1e-8,
err_msg="TNC failed with status: " +
optimize.tnc.RCSTRINGS[rc])
def test_tnc4(self):
fg, x, bounds = self.fg4, [1.125, 0.125], [(1, None), (0, None)]
xopt = [1, 0]
x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds,
messages=optimize.tnc.MSG_NONE,
maxfun=200)
assert_allclose(self.f4(x), self.f4(xopt), atol=1e-8,
err_msg="TNC failed with status: " +
optimize.tnc.RCSTRINGS[rc])
def test_tnc5(self):
fg, x, bounds = self.fg5, [0, 0], [(-1.5, 4),(-3, 3)]
xopt = [-0.54719755119659763, -1.5471975511965976]
x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds,
messages=optimize.tnc.MSG_NONE,
maxfun=200)
assert_allclose(self.f5(x), self.f5(xopt), atol=1e-8,
err_msg="TNC failed with status: " +
optimize.tnc.RCSTRINGS[rc])
def test_tnc38(self):
fg, x, bounds = self.fg38, np.array([-3, -1, -3, -1]), [(-10, 10)]*4
xopt = [1]*4
x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds,
messages=optimize.tnc.MSG_NONE,
maxfun=200)
assert_allclose(self.f38(x), self.f38(xopt), atol=1e-8,
err_msg="TNC failed with status: " +
optimize.tnc.RCSTRINGS[rc])
def test_tnc45(self):
fg, x, bounds = self.fg45, [2] * 5, [(0, 1), (0, 2), (0, 3),
(0, 4), (0, 5)]
xopt = [1, 2, 3, 4, 5]
x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds,
messages=optimize.tnc.MSG_NONE,
maxfun=200)
assert_allclose(self.f45(x), self.f45(xopt), atol=1e-8,
err_msg="TNC failed with status: " +
optimize.tnc.RCSTRINGS[rc])
if __name__ == "__main__":
run_module_suite()
| agpl-3.0 |
guewen/OpenUpgrade | openerp/addons/base/tests/test_res_lang.py | 384 | 2104 | import unittest2
import openerp.tests.common as common
class test_res_lang(common.TransactionCase):
def test_00_intersperse(self):
from openerp.addons.base.res.res_lang import intersperse
assert intersperse("", []) == ("", 0)
assert intersperse("0", []) == ("0", 0)
assert intersperse("012", []) == ("012", 0)
assert intersperse("1", []) == ("1", 0)
assert intersperse("12", []) == ("12", 0)
assert intersperse("123", []) == ("123", 0)
assert intersperse("1234", []) == ("1234", 0)
assert intersperse("123456789", []) == ("123456789", 0)
assert intersperse("&ab%#@1", []) == ("&ab%#@1", 0)
assert intersperse("0", []) == ("0", 0)
assert intersperse("0", [1]) == ("0", 0)
assert intersperse("0", [2]) == ("0", 0)
assert intersperse("0", [200]) == ("0", 0)
assert intersperse("12345678", [1], '.') == ('1234567.8', 1)
assert intersperse("12345678", [1], '.') == ('1234567.8', 1)
assert intersperse("12345678", [2], '.') == ('123456.78', 1)
assert intersperse("12345678", [2,1], '.') == ('12345.6.78', 2)
assert intersperse("12345678", [2,0], '.') == ('12.34.56.78', 3)
assert intersperse("12345678", [-1,2], '.') == ('12345678', 0)
assert intersperse("12345678", [2,-1], '.') == ('123456.78', 1)
assert intersperse("12345678", [2,0,1], '.') == ('12.34.56.78', 3)
assert intersperse("12345678", [2,0,0], '.') == ('12.34.56.78', 3)
assert intersperse("12345678", [2,0,-1], '.') == ('12.34.56.78', 3)
assert intersperse("12345678", [3,3,3,3], '.') == ('12.345.678', 2)
assert intersperse("abc1234567xy", [2], '.') == ('abc1234567.xy', 1)
assert intersperse("abc1234567xy8", [2], '.') == ('abc1234567x.y8', 1) # ... w.r.t. here.
assert intersperse("abc12", [3], '.') == ('abc12', 0)
assert intersperse("abc12", [2], '.') == ('abc12', 0)
assert intersperse("abc12", [1], '.') == ('abc1.2', 1)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ilexius/odoo | addons/procurement_jit/__openerp__.py | 24 | 1097 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Just In Time Scheduling',
'version': '1.0',
'category': 'Base',
'description': """
This module will automatically reserve the picking from stock when a sale order is confirmed
============================================================================================
Upon confirmation of a sale order or when quantities are added,
the picking that reserves from stock will be reserved if the
necessary quantities are available.
In the simplest configurations, this is an easy way of working:
first come, first served. However, when not installed, you can
use manual reservation or run the schedulers where the system
will take into account the expected date and the priority.
If this automatic reservation would reserve too much, you can
still unreserve a picking.
""",
'website': 'https://www.odoo.com/page/manufacturing',
'depends': ['sale_stock'],
'data': [],
'demo': [],
'test': [],
'installable': True,
'auto_install': True,
}
| gpl-3.0 |
moyogo/tachyfont | run_time/src/gae_server/third_party/closure-library/closure/bin/build/treescan.py | 582 | 2203 | #!/usr/bin/env python
#
# Copyright 2010 The Closure Library Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared utility functions for scanning directory trees."""
import os
import re
__author__ = 'nnaze@google.com (Nathan Naze)'
# Matches a .js file path.
_JS_FILE_REGEX = re.compile(r'^.+\.js$')
def ScanTreeForJsFiles(root):
"""Scans a directory tree for JavaScript files.
Args:
root: str, Path to a root directory.
Returns:
An iterable of paths to JS files, relative to cwd.
"""
return ScanTree(root, path_filter=_JS_FILE_REGEX)
def ScanTree(root, path_filter=None, ignore_hidden=True):
"""Scans a directory tree for files.
Args:
root: str, Path to a root directory.
path_filter: A regular expression filter. If set, only paths matching
the path_filter are returned.
ignore_hidden: If True, do not follow or return hidden directories or files
(those starting with a '.' character).
Yields:
A string path to files, relative to cwd.
"""
def OnError(os_error):
raise os_error
for dirpath, dirnames, filenames in os.walk(root, onerror=OnError):
# os.walk allows us to modify dirnames to prevent decent into particular
# directories. Avoid hidden directories.
for dirname in dirnames:
if ignore_hidden and dirname.startswith('.'):
dirnames.remove(dirname)
for filename in filenames:
# nothing that starts with '.'
if ignore_hidden and filename.startswith('.'):
continue
fullpath = os.path.join(dirpath, filename)
if path_filter and not path_filter.match(fullpath):
continue
yield os.path.normpath(fullpath)
| apache-2.0 |
flar2/ElementalX-m7-2.0 | arch/ia64/scripts/unwcheck.py | 13143 | 1714 | #!/usr/bin/python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
| gpl-2.0 |
xuvw/viewfinder | backend/www/admin/admin.py | 13 | 3707 | # Copyright 2012 Viewfinder Inc. All Rights Reserved.
"""Handlers for viewfinder web application administration.
AdminHandler: top-level admin handler
"""
__author__ = 'spencer@emailscrubbed.com (Spencer Kimball)'
import httplib
import logging
import os
import traceback
from tornado import gen, web
from viewfinder.backend.base import counters, handler
from viewfinder.backend.db.db_client import DBClient
from viewfinder.backend.db import schema, vf_schema
from viewfinder.backend.db.admin_permissions import AdminPermissions
from viewfinder.backend.www import basic_auth
_req_per_sec = counters.define_rate('viewfinder.admin.www.requests_per_second',
'Administrator website requests handled per second.')
def require_permission(level=None):
"""Decorator to be used in admin get/post methods.
Permission required may be 'root', 'support', or None.
If None is specified, the user must still be in the AdminPermissions table.
Permissions are stored in self._permissions for later access.
"""
def decorator(f):
@gen.engine
def wrapper(self, *args, **kwargs):
assert level in [None, 'root', 'support']
self._permissions = yield gen.Task(self.QueryAdminPermissions)
if level == 'root':
self.CheckIsRoot()
elif level == 'support':
self.CheckIsSupport()
f(self, *args, **kwargs)
return wrapper
return decorator
class AdminHandler(basic_auth.BasicAuthHandler):
"""Directory of administration tasks."""
def prepare(self):
basic_auth.BasicAuthHandler.prepare(self)
self._auth_credentials = self.get_current_user()
_req_per_sec.increment()
@handler.authenticated()
@handler.asynchronous(datastore=True)
# We only require that the user exists. Actual rights are only used here to build the link table.
# They will be checked by each sub page.
@require_permission()
def get(self):
t_dict = self.PermissionsTemplateDict()
self.render('admin.html', **t_dict)
def CheckIsRoot(self):
"""Check whether the permissions object has a ROOT rights entry."""
if not self._permissions.IsRoot():
raise web.HTTPError(httplib.FORBIDDEN, 'User %s does not have root credentials.' % self._auth_credentials)
def CheckIsSupport(self):
"""Check whether the permissions object has a SUPPORT rights entry. Root users do not automatically get
granted support rights.
"""
if not self._permissions.IsSupport():
raise web.HTTPError(httplib.FORBIDDEN, 'User %s does not have support credentials.' % self._auth_credentials)
def PermissionsTemplateDict(self):
"""Dict of variables used in all admin templates."""
return { 'auth_credentials': self._auth_credentials,
'is_root': self._permissions.IsRoot(),
'is_support': self._permissions.IsSupport() }
@gen.engine
def QueryAdminPermissions(self, callback):
"""Get set of permissions for user. Raise an error if the user does not have an entry,
of if the set of rights is empty.
"""
permissions = yield gen.Task(AdminPermissions.Query, self._client, self._auth_credentials, None, must_exist=False)
if permissions is None or not permissions.rights:
raise web.HTTPError(httplib.FORBIDDEN, 'User %s has no credentials.' % self._auth_credentials)
callback(permissions)
def _handle_request_exception(self, value):
"""Handles presentation of an exception condition to the admin.
"""
logging.exception('error in admin page')
self.render('admin_error.html',
auth_credentials=self._auth_credentials, is_root=False, is_support=False,
title=value, message=traceback.format_exc())
return True
| apache-2.0 |
PhilipDaniels/phi | google-test/test/gtest_list_tests_unittest.py | 1898 | 6515 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for Google Test's --gtest_list_tests flag.
A user can ask Google Test to list all tests by specifying the
--gtest_list_tests flag. This script tests such functionality
by invoking gtest_list_tests_unittest_ (a program written with
Google Test) the command line flags.
"""
__author__ = 'phanna@google.com (Patrick Hanna)'
import gtest_test_utils
import re
# Constants.
# The command line flag for enabling/disabling listing all tests.
LIST_TESTS_FLAG = 'gtest_list_tests'
# Path to the gtest_list_tests_unittest_ program.
EXE_PATH = gtest_test_utils.GetTestExecutablePath('gtest_list_tests_unittest_')
# The expected output when running gtest_list_tests_unittest_ with
# --gtest_list_tests
EXPECTED_OUTPUT_NO_FILTER_RE = re.compile(r"""FooDeathTest\.
Test1
Foo\.
Bar1
Bar2
DISABLED_Bar3
Abc\.
Xyz
Def
FooBar\.
Baz
FooTest\.
Test1
DISABLED_Test2
Test3
TypedTest/0\. # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\.
TestA
TestB
TypedTest/1\. # TypeParam = int\s*\*
TestA
TestB
TypedTest/2\. # TypeParam = .*MyArray<bool,\s*42>
TestA
TestB
My/TypeParamTest/0\. # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\.
TestA
TestB
My/TypeParamTest/1\. # TypeParam = int\s*\*
TestA
TestB
My/TypeParamTest/2\. # TypeParam = .*MyArray<bool,\s*42>
TestA
TestB
MyInstantiation/ValueParamTest\.
TestA/0 # GetParam\(\) = one line
TestA/1 # GetParam\(\) = two\\nlines
TestA/2 # GetParam\(\) = a very\\nlo{241}\.\.\.
TestB/0 # GetParam\(\) = one line
TestB/1 # GetParam\(\) = two\\nlines
TestB/2 # GetParam\(\) = a very\\nlo{241}\.\.\.
""")
# The expected output when running gtest_list_tests_unittest_ with
# --gtest_list_tests and --gtest_filter=Foo*.
EXPECTED_OUTPUT_FILTER_FOO_RE = re.compile(r"""FooDeathTest\.
Test1
Foo\.
Bar1
Bar2
DISABLED_Bar3
FooBar\.
Baz
FooTest\.
Test1
DISABLED_Test2
Test3
""")
# Utilities.
def Run(args):
"""Runs gtest_list_tests_unittest_ and returns the list of tests printed."""
return gtest_test_utils.Subprocess([EXE_PATH] + args,
capture_stderr=False).output
# The unit test.
class GTestListTestsUnitTest(gtest_test_utils.TestCase):
"""Tests using the --gtest_list_tests flag to list all tests."""
def RunAndVerify(self, flag_value, expected_output_re, other_flag):
"""Runs gtest_list_tests_unittest_ and verifies that it prints
the correct tests.
Args:
flag_value: value of the --gtest_list_tests flag;
None if the flag should not be present.
expected_output_re: regular expression that matches the expected
output after running command;
other_flag: a different flag to be passed to command
along with gtest_list_tests;
None if the flag should not be present.
"""
if flag_value is None:
flag = ''
flag_expression = 'not set'
elif flag_value == '0':
flag = '--%s=0' % LIST_TESTS_FLAG
flag_expression = '0'
else:
flag = '--%s' % LIST_TESTS_FLAG
flag_expression = '1'
args = [flag]
if other_flag is not None:
args += [other_flag]
output = Run(args)
if expected_output_re:
self.assert_(
expected_output_re.match(output),
('when %s is %s, the output of "%s" is "%s",\n'
'which does not match regex "%s"' %
(LIST_TESTS_FLAG, flag_expression, ' '.join(args), output,
expected_output_re.pattern)))
else:
self.assert_(
not EXPECTED_OUTPUT_NO_FILTER_RE.match(output),
('when %s is %s, the output of "%s" is "%s"'%
(LIST_TESTS_FLAG, flag_expression, ' '.join(args), output)))
def testDefaultBehavior(self):
"""Tests the behavior of the default mode."""
self.RunAndVerify(flag_value=None,
expected_output_re=None,
other_flag=None)
def testFlag(self):
"""Tests using the --gtest_list_tests flag."""
self.RunAndVerify(flag_value='0',
expected_output_re=None,
other_flag=None)
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
other_flag=None)
def testOverrideNonFilterFlags(self):
"""Tests that --gtest_list_tests overrides the non-filter flags."""
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
other_flag='--gtest_break_on_failure')
def testWithFilterFlags(self):
"""Tests that --gtest_list_tests takes into account the
--gtest_filter flag."""
self.RunAndVerify(flag_value='1',
expected_output_re=EXPECTED_OUTPUT_FILTER_FOO_RE,
other_flag='--gtest_filter=Foo*')
if __name__ == '__main__':
gtest_test_utils.Main()
| mit |
sa2ajj/DistroTracker | pts/mail/tests/tests_control.py | 1 | 90050 | # -*- coding: utf-8 -*-
# Copyright 2013 The Distro Tracker Developers
# See the COPYRIGHT file at the top-level directory of this distribution and
# at http://deb.li/DTAuthors
#
# This file is part of Distro Tracker. It is subject to the license terms
# in the LICENSE file found in the top-level directory of this
# distribution and at http://deb.li/DTLicense. No part of Distro Tracker,
# including this file, may be copied, modified, propagated, or distributed
# except according to the terms contained in the LICENSE file.
"""
Tests for :mod:`pts.mail.pts_control`.
"""
from __future__ import unicode_literals
from django.conf import settings
from django.test import TestCase
from django.core import mail
from django.utils.encoding import force_bytes
from pts.mail import control
from pts.core.utils import pts_render_to_string
from pts.core.utils import extract_email_address_from_header
from pts.core.utils import get_or_none
from pts.core.models import PackageName, EmailUser, Subscription
from pts.core.models import Keyword
from pts.core.models import Team
from pts.core.models import BinaryPackageName
from pts.core.models import SourcePackageName
from pts.core.models import SourcePackage
from pts.accounts.models import User
from pts.mail.models import CommandConfirmation
from pts.mail.control.commands import UNIQUE_COMMANDS
from email import encoders
from email.message import Message
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.base import MIMEBase
from email.utils import make_msgid
from datetime import timedelta
import re
PTS_CONTACT_EMAIL = settings.PTS_CONTACT_EMAIL
PTS_CONTROL_EMAIL = settings.PTS_CONTROL_EMAIL
class EmailControlTest(TestCase):
def control_process(self):
"""
Helper method. Passes the constructed control message to the control
processor.
"""
control.process(force_bytes(self.message.as_string(), 'utf-8'))
def setUp(self):
self.reset_message()
def set_default_headers(self):
"""
Helper method which adds the default headers for each test message.
"""
self.message.add_header('From', 'John Doe <john.doe@unknown.com>')
self.message.add_header('To', PTS_CONTROL_EMAIL)
self.message.add_header('Subject', 'Commands')
self.message.add_header('Message-ID', make_msgid())
def set_header(self, header_name, header_value):
"""
Helper method which sets the given value for the given header.
:param header_name: The name of the header to set
:param header_value: The value of the header to set
"""
if header_name in self.message:
del self.message[header_name]
self.message.add_header(header_name, header_value)
def set_input_lines(self, lines):
"""
Sets the lines of the message body which represent sent commands.
:param lines: All lines of commands
:param type: iterable
"""
payload = '\n'.join(lines)
if self.multipart:
plain_text = MIMEText('plain')
plain_text.set_payload(payload)
self.message.attach(plain_text)
else:
self.message.set_payload(payload)
def make_multipart(self, alternative=False):
"""
Helper method which converts the test message into a multipart message.
"""
if alternative:
self.message = MIMEMultipart('alternative')
else:
self.message = MIMEMultipart()
self.set_default_headers()
self.multipart = True
def add_part(self, mime_type, subtype, data):
"""
Adds the given part to the test message.
:param mime_type: The main MIME type of the new part
:param subtype: The MIME subtype of the new part
:param data: The payload of the part
"""
part = MIMEBase(mime_type, subtype)
part.set_payload(data)
if mime_type != 'text':
encoders.encode_base64(part)
self.message.attach(part)
def reset_message(self):
"""
Helper method resets any changes made to the test message.
"""
self.message = Message()
self.multipart = False
self.set_default_headers()
def make_comment(self, text):
"""
Helper function which creates a comment from the given text.
"""
return '# ' + text
def assert_response_sent(self, number_of_responses=1):
"""
Helper method which asserts that the expected number of responses is
sent.
:param number_of_responses: The expected number of responses.
"""
self.assertEqual(len(mail.outbox), number_of_responses)
def assert_response_not_sent(self):
"""
Helper method which asserts that no responses were sent.
"""
self.assertEqual(len(mail.outbox), 0)
def assert_in_response(self, text, response_number=-1):
"""
Helper method which asserts that the given text is found in the given
response message.
:param text: The text which needs to be found in the response.
:param response_number: The index number of the response message.
Standard Python indexing applies, which means that -1 means the
last sent message.
"""
self.assertTrue(mail.outbox)
out_mail = mail.outbox[response_number]
self.assertIn(text, out_mail.body)
def assert_line_in_response(self, line, response_number=-1):
"""
Helper method which asserts that the given full line of text is found
in the given response message.
:param line: The line of text which needs to be found in the response.
:param response_number: The index number of the response message.
Standard Python indexing applies, which means that -1 means the
last sent message.
"""
self.assertTrue(mail.outbox)
out_mail = mail.outbox[response_number]
self.assertIn(line, out_mail.body.splitlines())
def assert_line_not_in_response(self, line, response_number=-1):
"""
Helper method which asserts that the given full line of text is not
found in the given response message.
:param line: The line of text which needs to be found in the response.
:param response_number: The index number of the response message.
Standard Python indexing applies, which means that -1 means the
last sent message.
"""
self.assertTrue(mail.outbox)
out_mail = mail.outbox[response_number]
self.assertNotIn(line, out_mail.body.splitlines())
def get_list_item(self, item, bullet='*'):
"""
Helper method which returns a representation of a list item.
:param item: The list item's content
:type item: string
:param bullet: The character used as the "bullet" of the list.
"""
return bullet + ' ' + str(item)
def assert_list_in_response(self, items, bullet='*'):
"""
Helper method which asserts that a list of items is found in the
response.
"""
self.assert_in_response('\n'.join(
self.get_list_item(item, bullet)
for item in items
))
def assert_list_item_in_response(self, item, bullet='*'):
"""
Helper method which asserts that a single list item is found in the
response.
"""
self.assert_line_in_response(self.get_list_item(item, bullet))
def assert_list_item_not_in_response(self, item, bullet='*'):
"""
Helper method which asserts that a single list item is not found in the
response.
"""
self.assert_line_not_in_response(self.get_list_item(item, bullet))
def assert_not_in_response(self, text, response_number=-1):
"""
Helper method which asserts that the given text is not found in the
given response message.
:param text: The text which needs to be found in the response.
:param response_number: The index number of the response message.
Standard Python indexing applies, which means that -1 means the
last sent message.
"""
out_mail = mail.outbox[response_number]
self.assertNotIn(text, out_mail.body)
def assert_response_equal(self, text, response_number=-1):
"""
Helper method which asserts that the response is completely identical
to the given text.
:param text: The text which the response is compared to.
:param response_number: The index number of the response message.
Standard Python indexing applies, which means that -1 means the
last sent message.
"""
out_mail = mail.outbox[response_number]
self.assertEqual(text, out_mail.body)
def assert_header_equal(self, header_name, header_value,
response_number=-1):
"""
Helper method which asserts that a particular response's
header value is equal to an expected value.
:param header_name: The name of the header to be tested
:param header_value: The expected value of the header
:param response_number: The index number of the response message.
Standard Python indexing applies, which means that -1 means the
last sent message.
"""
out_mail = mail.outbox[response_number].message()
self.assertEqual(out_mail[header_name], header_value)
def assert_command_echo_in_response(self, command):
"""
Helper method which asserts that a given command's echo is found in
the response.
"""
self.assert_in_response('> ' + command)
def assert_command_echo_not_in_response(self, command):
"""
Helper method which asserts that a given command's echo is not found
in the response.
"""
self.assert_not_in_response('> ' + command)
def assert_warning_in_response(self, text):
"""
Helper method which asserts that a particular warning is found in the
response.
:param text: The text of the warning message.
"""
self.assert_in_response("Warning: " + text)
def assert_error_in_response(self, text):
"""
Helper method which asserts that a particular error is found in the
response.
:param text: The text of the error message.
"""
self.assert_in_response("Error: " + text)
def assert_cc_contains_address(self, email_address):
"""
Helper method which checks that the Cc header of the response contains
the given email address.
"""
response_mail = mail.outbox[-1]
self.assertIn(
email_address, (
extract_email_address_from_header(email)
for email in response_mail.cc
)
)
def reset_outbox(self):
"""
Helper method which resets the structure containing all outgoing
emails.
"""
mail.outbox = []
def regex_search_in_response(self, regexp, response_number=0):
"""
Helper method which performs a regex search in a response.
"""
return regexp.search(mail.outbox[response_number].body)
class ControlBotBasic(EmailControlTest):
def test_basic_headers(self):
"""
Tests if the proper headers are set for the reply message, that the
output contains original lines prepended with '>'
"""
input_lines = [
"#command",
" thanks",
]
self.set_header('Subject', 'Commands')
self.set_input_lines(input_lines)
self.control_process()
self.assert_response_sent()
self.assert_header_equal('Subject', 'Re: Commands')
self.assert_header_equal('X-Loop', PTS_CONTROL_EMAIL)
self.assert_header_equal('To', self.message['From'])
self.assert_header_equal('From', PTS_CONTACT_EMAIL)
self.assert_header_equal('In-Reply-To', self.message['Message-ID'])
self.assert_header_equal(
'References',
' '.join((self.message.get('References', ''),
self.message['Message-ID']))
)
def test_response_when_no_subject(self):
"""
Tests that the subject of the response when there is no subject set in
the request is correct.
"""
self.set_input_lines(["thanks"])
self.set_header('Subject', '')
self.control_process()
self.assert_header_equal('Subject', 'Re: Your mail')
def test_basic_echo_commands(self):
"""
Tests that commands are echoed in the response.
"""
input_lines = [
"#command",
" thanks",
]
self.set_header('Subject', 'Commands')
self.set_input_lines(input_lines)
self.control_process()
for line in input_lines:
self.assert_command_echo_in_response(line.strip())
def test_not_plaintext(self):
"""
Tests that the response to a non-plaintext message is a warning email.
"""
self.make_multipart()
self.add_part('application', 'octet-stream', b'asdf')
self.control_process()
self.assert_response_sent()
self.assert_response_equal(pts_render_to_string(
'control/email-plaintext-warning.txt'))
def test_multipart_with_plaintext(self):
"""
Tests that the response to a multipart message which contains a
text/plain part is correct.
"""
self.make_multipart(alternative=True)
input_lines = [
'#command',
'thanks',
]
self.set_input_lines(input_lines)
self.add_part('text', 'html', "#command\nthanks")
self.control_process()
self.assert_response_sent()
for line in input_lines:
self.assert_command_echo_in_response(line.strip())
def test_empty_no_response(self):
"""
Tests that there is no response to an empty message.
"""
self.control_process()
self.assert_response_not_sent()
def test_loop_no_response(self):
"""
Tests that there is no response if the message's X-Loop is set to
PTS_CONTROL_EMAIL
"""
self.set_header('X-Loop', 'something-else')
self.set_header('X-Loop', PTS_CONTROL_EMAIL)
self.set_input_lines(['thanks'])
self.control_process()
self.assert_response_not_sent()
def test_no_valid_command_no_response(self):
"""
Tests that there is no response for a message which does not contain
any valid commands.
"""
self.set_input_lines(['Some text', 'Some more text'])
self.control_process()
self.assert_response_not_sent()
def test_stop_after_five_garbage_lines(self):
"""
Tests that processing stops after encountering five garbage lines.
"""
MAX_ALLOWED_ERRORS = settings.PTS_MAX_ALLOWED_ERRORS_CONTROL_COMMANDS
self.set_input_lines(
['help'] + ['garbage'] * MAX_ALLOWED_ERRORS + ['#command'])
self.control_process()
self.assert_response_sent()
self.assert_command_echo_not_in_response('#command')
def test_stop_on_thanks_or_quit(self):
"""
Tests that processing stops after encountering the thanks or quit
command.
"""
self.set_input_lines(['thanks', '#command'])
self.control_process()
self.assert_response_sent()
self.assert_command_echo_in_response('thanks')
self.assert_in_response("Stopping processing here.")
self.assert_command_echo_not_in_response('#command')
def test_blank_line_skip(self):
"""
Tests that processing skips any blank lines in the message. They are
not considered garbage.
"""
self.set_input_lines(['help', ''] + [' '] * 5 + ['#comment'])
self.control_process()
self.assert_response_sent()
self.assert_command_echo_in_response('#comment')
def test_comment_line_skip(self):
"""
Tests that processing skips commented lines and that they are not
considered garbage.
"""
MAX_ALLOWED_ERRORS = settings.PTS_MAX_ALLOWED_ERRORS_CONTROL_COMMANDS
self.set_input_lines(
[self.make_comment(command)
for command in ['comment'] * MAX_ALLOWED_ERRORS] + ['help']
)
self.control_process()
self.assert_command_echo_in_response('help')
def test_utf8_message(self):
"""
Tests that the bot sends replies to utf-8 encoded messages.
"""
lines = ['üšßč', '한글ᥡ╥ສए', 'help']
self.set_input_lines(lines)
self.message.set_charset('utf-8')
self.control_process()
self.assert_response_sent()
for line in lines:
self.assert_command_echo_in_response(line)
def test_subject_command(self):
"""
Tests that a command given in the subject of the message is executed.
"""
self.set_header('Subject', 'help')
self.control_process()
self.assert_response_sent()
self.assert_command_echo_in_response('# Message subject')
self.assert_command_echo_in_response('help')
def test_end_processing_on_signature_delimiter(self):
"""
Tests that processing commands halts when the signature delimiter is
reached (--)
"""
self.set_input_lines(['help', '--', '# command'])
self.control_process()
self.assert_command_echo_not_in_response('# command')
class ConfirmationTests(EmailControlTest):
"""
Tests the command confirmation mechanism.
"""
def setUp(self):
super(ConfirmationTests, self).setUp()
self.user_email_address = 'dummy-user@domain.com'
self.set_header('From',
'Dummy User <{user_email}>'.format(
user_email=self.user_email_address))
# Regular expression to extract the confirmation code from the body of
# the response mail
self.regexp = re.compile(r'^CONFIRM (.*)$', re.MULTILINE)
self.packages = [
PackageName.objects.create(name='dummy-package'),
PackageName.objects.create(name='other-package'),
]
def user_subscribed(self, email_address, package_name):
"""
Helper method checks whether the given email is subscribed to the
package.
"""
return EmailUser.objects.is_user_subscribed_to(
user_email=email_address,
package_name=package_name)
def assert_confirmation_sent_to(self, email_address):
"""
Helper method checks whether a confirmation mail was sent to the
given email address.
"""
self.assertIn(
True, (
extract_email_address_from_header(msg.to[0]) == email_address
for msg in mail.outbox[:-1]
)
)
def test_multiple_commands_single_confirmation_email(self):
"""
Tests that multiple commands which require confirmation cause only a
single confirmation email.
"""
commands = [
'subscribe ' + package.name + ' ' + self.user_email_address
for package in self.packages
]
self.set_input_lines(commands)
self.control_process()
# A control commands response and confirmation email sent
self.assert_response_sent(2)
self.assert_confirmation_sent_to(self.user_email_address)
# Contains the confirmation key
self.assertIsNotNone(self.regex_search_in_response(self.regexp))
# A confirmation key really created
self.assertEqual(CommandConfirmation.objects.count(), 1)
# Check the commands associated with the confirmation object.
c = CommandConfirmation.objects.all()[0]
self.assertEqual('\n'.join(commands), c.commands)
for command in commands:
self.assert_in_response(command)
# Finally make sure the commands did not actually execute
self.assertEqual(Subscription.objects.filter(active=True).count(), 0)
def test_subscribe_command_confirmation_message(self):
"""
Tests that the custom confirmation messages for commands are correctly
included in the confirmation email.
"""
Subscription.objects.create_for(
email=self.user_email_address,
package_name=self.packages[1].name)
commands = [
'unsubscribeall',
'unsubscribe ' + self.packages[1].name,
'subscribe ' + self.packages[0].name,
]
self.set_input_lines(commands)
self.control_process()
expected_messages = [
pts_render_to_string(
'control/email-unsubscribeall-confirmation.txt'
),
pts_render_to_string(
'control/email-unsubscribe-confirmation.txt', {
'package': self.packages[1].name,
}
),
pts_render_to_string(
'control/email-subscription-confirmation.txt', {
'package': self.packages[0].name,
}
)
]
c = CommandConfirmation.objects.all()[0]
self.assert_response_equal(
pts_render_to_string(
'control/email-confirmation-required.txt', {
'command_confirmation': c,
'confirmation_messages': expected_messages,
}
),
response_number=0
)
def test_multiple_commands_confirmed(self):
"""
Tests that multiple commands are actually confirmed by a single key.
"""
commands = [
'subscribe ' + package.name + ' ' + self.user_email_address
for package in self.packages
]
c = CommandConfirmation.objects.create_for_commands(commands)
self.set_input_lines(['CONFIRM ' + c.confirmation_key])
self.control_process()
self.assert_response_sent()
for package in self.packages:
self.assertTrue(
self.user_subscribed(self.user_email_address, package.name))
for command in commands:
self.assert_command_echo_in_response(command)
# Key no longer usable
self.assertEqual(CommandConfirmation.objects.count(), 0)
def test_multiple_commands_per_user(self):
"""
Tests that if multiple emails should receive a confirmation email for
some commands, each of them gets only one.
"""
commands = []
commands.extend([
'subscribe ' + package.name + ' ' + self.user_email_address
for package in self.packages
])
other_user = 'other-user@domain.com'
commands.extend([
'subscribe ' + package.name + ' ' + other_user
for package in self.packages
])
self.set_input_lines(commands)
self.control_process()
# A control commands response and confirmation emails sent
self.assert_response_sent(3)
self.assert_confirmation_sent_to(self.user_email_address)
self.assert_confirmation_sent_to(other_user)
self.assertEqual(CommandConfirmation.objects.count(), 2)
# Control message CCed to both of them.
self.assert_cc_contains_address(self.user_email_address)
self.assert_cc_contains_address(other_user)
def test_same_command_repeated(self):
"""
Tests that when the same command is repeated in the command email, it
is included just once in the confirmation email.
"""
package = self.packages[0]
self.set_input_lines([
'subscribe ' + package.name + ' ' + self.user_email_address,
'subscribe ' + package.name + ' ' + self.user_email_address,
])
self.control_process()
self.assert_response_sent(2)
c = CommandConfirmation.objects.all()[0]
self.assertEqual(
'subscribe ' + package.name + ' ' + self.user_email_address,
c.commands)
def test_confirm_only_if_needs_confirmation(self):
"""
Tests that only the commands which need confirmation are included in
the confirmation email.
"""
Subscription.objects.create_for(
email=self.user_email_address,
package_name=self.packages[1].name)
package = self.packages[0]
self.set_input_lines([
'unsubscribeall',
'which',
'help',
'subscribe ' + package.name + ' ' + self.user_email_address,
'who',
'keywords',
'unsubscribe ' + self.packages[1].name + ' ' + self.user_email_address,
])
self.control_process()
self.assert_response_sent(2)
c = CommandConfirmation.objects.all()[0]
expected = '\n'.join([
'unsubscribeall ' + self.user_email_address,
'subscribe ' + package.name + ' ' + self.user_email_address,
'unsubscribe ' + self.packages[1].name + ' ' + self.user_email_address,
])
self.assertEqual(expected, c.commands)
def test_unknown_confirmation_key(self):
"""
Tests the confirm command when an unknown key is given.
"""
self.set_input_lines(['CONFIRM asdf'])
self.control_process()
self.assert_response_sent()
self.assert_error_in_response('Confirmation failed: Unknown key')
class HelpCommandTest(EmailControlTest):
"""
Tests for the help command.
"""
def get_all_help_command_descriptions(self):
"""
Helper method returning the description of all commands.
"""
return (cmd.META.get('description', '') for cmd in UNIQUE_COMMANDS)
def test_help_command(self):
"""
Tests that the help command returns all the available commands and
their descriptions.
"""
self.set_input_lines(['help'])
self.control_process()
self.assert_in_response(pts_render_to_string('control/help.txt', {
'descriptions': self.get_all_help_command_descriptions()
}))
class KeywordCommandHelperMixin(object):
"""
Contains some methods which are used for testing all forms of the keyword
command.
"""
def assert_keywords_in_response(self, keywords):
"""
Checks if the given keywords are found in the response.
"""
for keyword in keywords:
self.assert_list_item_in_response(keyword)
def assert_keywords_not_in_response(self, keywords):
"""
Checks that the given keywords are not found in the response.
"""
for keyword in keywords:
self.assert_list_item_not_in_response(keyword)
class KeywordCommandSubscriptionSpecificTest(EmailControlTest,
KeywordCommandHelperMixin):
"""
Tests for the keyword command when modifying subscription specific
keywords.
"""
def setUp(self):
super(KeywordCommandSubscriptionSpecificTest, self).setUp()
# Setup a subscription
self.package = PackageName.objects.create(name='dummy-package')
self.user = EmailUser.objects.create(email='user@domain.com')
self.subscription = Subscription.objects.create(
package=self.package,
email_user=self.user
)
self.default_keywords = set(
keyword.name
for keyword in self.subscription.keywords.filter(default=True))
self.commands = []
self.set_header('From', self.user.email)
def _to_command_string(self, command):
"""
Helper method turning a tuple representing a keyword command into a
string.
"""
return ' '.join(
command[:-1] + (', '.join(command[-1]),)
)
def add_keyword_command(self, package, operator, keywords, email=None,
use_tag=False):
if email is None:
email = ''
command = 'keyword' if not use_tag else 'tag'
self.commands.append((
command,
package,
email,
operator,
keywords,
))
self.set_input_lines(self._to_command_string(command)
for command in self.commands)
def get_new_list_of_keywords_text(self, package, email):
"""
Returns the status text which should precede a new list of keywords.
"""
return (
"Here's the new list of accepted keywords associated to package\n"
"{package} for {address} :".format(package=package,
address=self.user.email)
)
def assert_error_user_not_subscribed_in_response(self, email, package):
"""
Checks whether an error saying the user is not subscribed to a package
is in the response.
"""
self.assert_error_in_response(
'{email} is not subscribed to the package {package}'.format(
email=email, package=package)
)
def assert_subscription_keywords_equal(self, keywords):
"""
Asserts that the subscription of the test user to the test package is
equal to the given keywords.
"""
self.subscription = Subscription.objects.get(
package=self.package,
email_user=self.user
)
all_keywords = self.subscription.keywords.all()
self.assertEqual(all_keywords.count(), len(keywords))
for keyword in all_keywords:
self.assertIn(keyword.name, keywords)
def assert_subscription_has_keywords(self, keywords):
"""
Check if the subscription of the test user to the test package has the
given keywords.
"""
self.subscription = Subscription.objects.get(
package=self.package,
email_user=self.user
)
all_keywords = self.subscription.keywords.all()
for keyword in keywords:
self.assertIn(Keyword.objects.get(name=keyword), all_keywords)
def assert_subscription_not_has_keywords(self, keywords):
"""
Assert that the subscription of the test user to the test package does
not have the given keywords.
"""
self.subscription = Subscription.objects.get(
package=self.package,
email_user=self.user
)
all_keywords = self.subscription.keywords.all()
for keyword in keywords:
self.assertNotIn(Keyword.objects.get(name=keyword), all_keywords)
def test_add_keyword_to_subscription(self):
"""
Tests the keyword command version which should add a keyword to the
subscription.
"""
keywords = ['vcs', 'contact']
self.add_keyword_command(self.package.name,
'+',
keywords,
self.user.email)
self.control_process()
self.assert_keywords_in_response(keywords)
self.assert_subscription_has_keywords(keywords)
def test_remove_keyword_from_subscription(self):
"""
Tests the keyword command version which should remove a keyword from a
subscription.
"""
keywords = ['bts']
self.add_keyword_command(self.package.name,
'-',
keywords,
self.user.email)
self.control_process()
self.assert_keywords_not_in_response(keywords)
self.assert_subscription_not_has_keywords(keywords)
def test_set_keywords_for_subscription(self):
"""
Tests the keyword command version which should set a new keyword list
for a subscription.
"""
keywords = ['vcs', 'bts']
self.add_keyword_command(self.package.name,
'=',
keywords,
self.user.email)
self.control_process()
self.assert_in_response(self.get_new_list_of_keywords_text(
self.package.name, self.user.email))
self.assert_keywords_in_response(keywords)
self.assert_subscription_keywords_equal(keywords)
def test_keyword_email_not_given(self):
"""
Tests the keyword command when the email is not given.
"""
self.add_keyword_command(self.package.name, '+', ['vcs'])
self.control_process()
self.assert_in_response(self.get_new_list_of_keywords_text(
self.package.name, self.user.email))
self.assert_keywords_in_response(['vcs'])
self.assert_subscription_has_keywords(['vcs'])
def test_keyword_doesnt_exist(self):
"""
Tests the keyword command when the given keyword does not exist.
"""
self.add_keyword_command(self.package.name, '+', ['no-exist'])
self.control_process()
self.assert_warning_in_response('no-exist is not a valid keyword')
# Subscription has not changed.
self.assert_keywords_in_response(self.default_keywords)
self.assert_subscription_keywords_equal(self.default_keywords)
def test_keyword_add_subscription_not_confirmed(self):
"""
Tests the keyword command when the user has not yet confirmed the
subscription (it is pending).
"""
self.subscription.active = False
self.subscription.save()
self.add_keyword_command(self.package.name, '+', ['vcs'])
self.control_process()
self.assert_in_response(self.get_new_list_of_keywords_text(
self.package.name, self.user.email))
self.assert_keywords_in_response(['vcs'])
self.assert_subscription_has_keywords(['vcs'])
def test_keyword_add_package_doesnt_exist(self):
"""
Tests the keyword command when the given package does not exist.
"""
self.add_keyword_command('package-no-exist', '+', ['vcs'])
self.control_process()
self.assert_in_response('Package package-no-exist does not exist')
self.assert_not_in_response(self.get_new_list_of_keywords_text(
self.package.name, self.user.email))
def test_keyword_user_not_subscribed(self):
"""
Tests the keyword command when the user is not subscribed to the given
package.
"""
other_user = EmailUser.objects.create(email='other-user@domain.com')
self.add_keyword_command(self.package.name,
'+',
['vcs'],
other_user.email)
self.control_process()
self.assert_error_user_not_subscribed_in_response(other_user.email,
self.package.name)
self.assert_not_in_response(self.get_new_list_of_keywords_text(
self.package.name, other_user.email))
def test_keyword_user_doesnt_exist(self):
"""
Tests the keyword command when the user is not subscribed to any
package.
"""
email = 'other-user@domain.com'
self.add_keyword_command(self.package.name,
'+',
['vcs'],
email)
self.control_process()
self.assert_error_user_not_subscribed_in_response(email,
self.package.name)
self.assert_not_in_response(self.get_new_list_of_keywords_text(
self.package.name, self.user.email))
def test_keyword_alias_tag(self):
"""
Tests that tag works as an alias for keyword.
"""
keywords = ['vcs', 'contact']
self.add_keyword_command(self.package.name,
'+',
keywords,
self.user.email,
use_tag=True)
self.control_process()
self.assert_keywords_in_response(keywords)
self.assert_subscription_has_keywords(keywords)
class KeywordCommandListSubscriptionSpecific(EmailControlTest,
KeywordCommandHelperMixin):
"""
Tests the keyword command when used to list keywords associated with a
subscription.
"""
def setUp(self):
super(KeywordCommandListSubscriptionSpecific, self).setUp()
# Setup a subscription
self.package = PackageName.objects.create(name='dummy-package')
self.user = EmailUser.objects.create(email='user@domain.com')
self.subscription = Subscription.objects.create(
package=self.package,
email_user=self.user
)
self.commands = []
self.set_header('From', self.user.email)
def _to_command_string(self, command):
return ' '.join(command)
def add_keyword_command(self, package, email='', use_tag=False):
command = 'keyword' if not use_tag else 'tag'
self.commands.append((
command,
package,
email,
))
self.set_input_lines(self._to_command_string(command)
for command in self.commands)
def get_list_of_keywords(self, package, email):
return (
"Here's the list of accepted keywords associated to package\n"
"{package} for {user}".format(
package=self.package.name, user=self.user.email)
)
def test_keyword_user_default(self):
"""
Tests the keyword command when the subscription is using the user's
default keywords.
"""
self.user.default_keywords.add(
Keyword.objects.create(name='new-keyword'))
self.add_keyword_command(self.package.name, self.user.email)
self.control_process()
self.assert_in_response(
self.get_list_of_keywords(self.package.name, self.user.email))
self.assert_keywords_in_response(
keyword.name for keyword in self.subscription.keywords.all())
def test_keyword_subscription_specific(self):
"""
Tests the keyword command when the subscription has specific keywords
associated with it.
"""
self.subscription.keywords.add(Keyword.objects.get(name='vcs'))
self.add_keyword_command(self.package.name, self.user.email)
self.control_process()
self.assert_in_response(
self.get_list_of_keywords(self.package.name, self.user.email))
self.assert_keywords_in_response(
keyword.name for keyword in self.subscription.keywords.all())
def test_keyword_package_doesnt_exist(self):
"""
Tests the keyword command when the given package does not exist.
"""
self.add_keyword_command('no-exist', self.user.email)
self.control_process()
self.assert_error_in_response('Package no-exist does not exist')
self.assert_not_in_response("Here's the list of accepted keywords")
def test_keyword_subscription_not_active(self):
"""
Tests the keyword command when the user has not yet confirmed the
subscription to the given package.
"""
self.subscription.active = False
self.subscription.save()
self.add_keyword_command(self.package.name, self.user.email)
self.control_process()
self.assert_in_response(
self.get_list_of_keywords(self.package.name, self.user.email))
self.assert_keywords_in_response(
keyword.name for keyword in self.subscription.keywords.all())
def test_keyword_user_not_subscribed(self):
"""
Tests the keyword command when the given user is not subscribed to the
given package.
"""
self.subscription.delete()
self.add_keyword_command(self.package.name, self.user.email)
self.control_process()
self.assert_response_sent()
self.assert_error_in_response(
'{email} is not subscribed to the package {pkg}'.format(
email=self.user.email,
pkg=self.package.name)
)
self.assert_not_in_response("Here's the list of accepted keywords")
def test_keyword_email_not_given(self):
"""
Tests the keyword command when the email is not given in the command.
"""
self.add_keyword_command(self.package.name)
self.control_process()
self.assert_in_response(
self.get_list_of_keywords(self.package.name, self.user.email))
self.assert_keywords_in_response(
keyword.name for keyword in self.subscription.keywords.all())
def test_tag_same_as_keyword(self):
"""
Tests that "tag" acts as an alias for "keyword"
"""
self.add_keyword_command(self.package.name, self.user.email)
self.control_process()
self.assert_in_response(
self.get_list_of_keywords(self.package.name, self.user.email))
self.assert_keywords_in_response(
keyword.name for keyword in self.subscription.keywords.all())
class KeywordCommandModifyDefault(EmailControlTest, KeywordCommandHelperMixin):
"""
Tests the keyword command version which modifies a user's list of default
keywords.
"""
def setUp(self):
super(KeywordCommandModifyDefault, self).setUp()
# Setup a subscription
self.user = EmailUser.objects.create(email='user@domain.com')
self.default_keywords = set([
keyword.name
for keyword in self.user.default_keywords.all()
])
self.commands = []
self.set_header('From', self.user.email)
def _to_command_string(self, command):
"""
Helper method turning a tuple representing a keyword command into a
string.
"""
return ' '.join(
command[:-1] + (', '.join(command[-1]),)
)
def get_new_default_list_output_message(self, email):
"""
Returns the message which should precede the list of new default
keywords.
"""
return (
"Here's the new default list of accepted "
"keywords for {email} :".format(email=email)
)
def add_keyword_command(self, operator, keywords, email='', use_tag=False):
command = 'keyword' if not use_tag else 'tag'
self.commands.append((
command,
email,
operator,
keywords,
))
self.set_input_lines(self._to_command_string(command)
for command in self.commands)
def assert_keywords_in_user_default_list(self, keywords):
"""
Asserts that the given keywords are found in the user's list of default
keywords.
"""
default_keywords = self.user.default_keywords.all()
for keyword in keywords:
self.assertIn(Keyword.objects.get(name=keyword), default_keywords)
def assert_keywords_not_in_user_default_list(self, keywords):
"""
Asserts that the given keywords are not found in the user's list of
default keywords.
"""
default_keywords = self.user.default_keywords.all()
for keyword in keywords:
self.assertNotIn(
Keyword.objects.get(name=keyword), default_keywords)
def assert_keywords_user_default_list_equal(self, keywords):
"""
Asserts that the user's list of default keywords exactly matches the
given keywords.
"""
default_keywords = self.user.default_keywords.all()
self.assertEqual(default_keywords.count(), len(keywords))
for keyword in default_keywords:
self.assertIn(keyword.name, keywords)
def test_keyword_add_default(self):
"""
Tests that the keyword command adds a new keyword to the user's list of
default keywords.
"""
keywords = [
keyword.name
for keyword in Keyword.objects.filter(default=False)[:3]
]
self.add_keyword_command('+', keywords, self.user.email)
self.control_process()
self.assert_in_response(
self.get_new_default_list_output_message(self.user.email))
self.assert_keywords_in_response(keywords)
self.assert_keywords_in_user_default_list(keywords)
def test_keyword_remove_default(self):
"""
Tests that the keyword command removes keywords from the user's list of
default keywords.
"""
keywords = [
keyword.name
for keyword in Keyword.objects.filter(default=True)[:3]
]
self.add_keyword_command('-', keywords, self.user.email)
self.control_process()
self.assert_in_response(
self.get_new_default_list_output_message(self.user.email))
self.assert_keywords_not_in_response(keywords)
self.assert_keywords_not_in_user_default_list(keywords)
def test_keyword_set_default(self):
"""
Tests that the keyword command sets a new list of the user's default
keywords.
"""
keywords = [
keyword.name
for keyword in Keyword.objects.filter(default=False)[:5]
]
keywords.extend(
keyword.name
for keyword in Keyword.objects.filter(default=True)[:2]
)
self.add_keyword_command(' = ', keywords, self.user.email)
self.control_process()
self.assert_in_response(
self.get_new_default_list_output_message(self.user.email))
self.assert_keywords_in_response(keywords)
self.assert_keywords_user_default_list_equal(keywords)
def test_keyword_email_not_given(self):
"""
Tests the keyword command when the email is not given.
"""
keywords = [
keyword.name
for keyword in Keyword.objects.filter(default=False)[:3]
]
self.add_keyword_command(' +', keywords)
self.control_process()
self.assert_in_response(
self.get_new_default_list_output_message(self.user.email))
self.assert_keywords_in_response(keywords)
self.assert_keywords_in_user_default_list(keywords)
def test_keyword_doesnt_exist(self):
"""
Tests the keyword command when a nonexistant keyword is given.
"""
self.add_keyword_command('+', ['no-exist'])
self.control_process()
self.assert_warning_in_response('no-exist is not a valid keyword')
self.assert_keywords_not_in_response(['no-exist'])
def test_user_doesnt_exist(self):
"""
Tests adding a keyword to a user's default list of subscriptions when
the given user is not subscribed to any packages (it does not exist yet)
"""
all_default_keywords = [
keyword.name
for keyword in Keyword.objects.filter(default=True)
]
new_user = 'doesnt-exist@domain.com'
keywords = [Keyword.objects.filter(default=False)[0].name]
self.add_keyword_command('+', keywords, new_user)
self.control_process()
# User created
self.assertEqual(EmailUser.objects.filter_by_email(new_user).count(), 1)
self.assert_in_response(
self.get_new_default_list_output_message(new_user))
self.assert_keywords_in_response(keywords + all_default_keywords)
class KeywordCommandShowDefault(EmailControlTest, KeywordCommandHelperMixin):
def setUp(self):
super(KeywordCommandShowDefault, self).setUp()
self.user = EmailUser.objects.create(email='user@domain.com')
self.user.default_keywords.add(
Keyword.objects.filter(default=False)[0])
self.set_header('From', self.user.email)
def get_default_keywords_list_message(self, email):
"""
Returns the message which should precede the list of all default
keywords in the output of the command.
"""
return (
"Here's the default list of accepted keywords for {email}:".format(
email=email)
)
def test_show_default_keywords(self):
"""
Tests that the keyword command outputs all default keywords of a user.
"""
self.set_input_lines(['keyword ' + self.user.email])
self.control_process()
self.assert_in_response(
self.get_default_keywords_list_message(self.user.email))
self.assert_keywords_in_response(
keyword.name for keyword in self.user.default_keywords.all()
)
def test_show_default_keywords_email_not_given(self):
"""
Tests that the keyword command shows all default keywords of a user
when the email is not given in the command.
"""
self.set_input_lines(['keyword'])
self.control_process()
self.assert_in_response(
self.get_default_keywords_list_message(self.user.email))
self.assert_keywords_in_response(
keyword.name for keyword in self.user.default_keywords.all()
)
def test_show_default_keywords_email_no_subscriptions(self):
"""
Tests that the keyword command returns a list of default keywords for
users that are not subscribed to any packages.
"""
email = 'no-exist@domain.com'
self.set_input_lines(['keyword ' + email])
self.control_process()
# User created first...
self.assertEqual(EmailUser.objects.filter_by_email(email).count(), 1)
user = EmailUser.objects.get(user_email__email=email)
self.assert_in_response(
self.get_default_keywords_list_message(user.email))
self.assert_keywords_in_response(
keyword.name for keyword in user.default_keywords.all()
)
def test_tag_alias_for_keyword(self):
"""
Tests that "tag" is an alias for "keyword"
"""
self.set_input_lines(['tag ' + self.user.email])
self.control_process()
self.assert_in_response(
self.get_default_keywords_list_message(self.user.email))
self.assert_keywords_in_response(
keyword.name for keyword in self.user.default_keywords.all()
)
def test_tags_alias_for_keyword(self):
"""
Tests that 'tags' is an alias for 'keyword'
"""
self.set_input_lines(['tags ' + self.user.email])
self.control_process()
self.assert_in_response(
self.get_default_keywords_list_message(self.user.email))
self.assert_keywords_in_response(
keyword.name for keyword in self.user.default_keywords.all()
)
def test_keywords_alias_for_keyword(self):
"""
Tests that 'keywords' is an alias for 'keyword'
"""
self.set_input_lines(['keywords ' + self.user.email])
self.control_process()
self.assert_in_response(
self.get_default_keywords_list_message(self.user.email))
self.assert_keywords_in_response(
keyword.name for keyword in self.user.default_keywords.all()
)
class SubscribeToPackageTest(EmailControlTest):
"""
Tests for the subscribe to package story.
"""
def setUp(self):
super(SubscribeToPackageTest, self).setUp()
self.user_email_address = 'dummy-user@domain.com'
self.set_header('From',
'Dummy User <{user_email}>'.format(
user_email=self.user_email_address))
# Regular expression to extract the confirmation code from the body of
# the response mail
self.regexp = re.compile(r'^CONFIRM (.*)$', re.MULTILINE)
self.package = PackageName.objects.create(
source=True,
name='dummy-package')
def user_subscribed(self, email_address):
"""
Helper method checks whether the given email is subscribed to the
package.
"""
return EmailUser.objects.is_user_subscribed_to(
user_email=email_address,
package_name=self.package.name)
def assert_confirmation_sent_to(self, email_address):
"""
Helper method checks whether a confirmation mail was sent to the
given email address.
"""
self.assertIn(
True, (
extract_email_address_from_header(msg.to[0]) == email_address
for msg in mail.outbox[:-1]
)
)
def add_binary_package(self, source_package, binary_package):
"""
Helper method which creates a binary package for the given source
package.
"""
binary_pkg = BinaryPackageName.objects.create(
name=binary_package)
src_pkg_name = SourcePackageName.objects.get(name=source_package.name)
src_pkg, _ = SourcePackage.objects.get_or_create(
source_package_name=src_pkg_name, version='1.0.0')
src_pkg.binary_packages = [binary_pkg]
src_pkg.save()
def add_subscribe_command(self, package, email=None):
"""
Helper method which adds a subscribe command to the command message.
"""
if not email:
email = ''
payload = self.message.get_payload() or ''
commands = payload.splitlines()
commands.append('subscribe ' + package + ' ' + email)
self.set_input_lines(commands)
def get_not_source_nor_binary_warning(self, package_name):
return (
'{pkg} is neither a source package nor a binary package.'.format(
pkg=package_name)
)
def test_subscribe_and_confirm_normal(self):
"""
Tests that the user is subscribed to the pacakge after running
subscribe and confirm.
"""
package_name = self.package.name
self.add_subscribe_command(package_name, self.user_email_address)
self.control_process()
self.assert_in_response(
'A confirmation mail has been sent to {email}'.format(
email=self.user_email_address))
self.assert_confirmation_sent_to(self.user_email_address)
# User still not actually subscribed
self.assertFalse(self.user_subscribed(self.user_email_address))
# Check that the confirmation mail contains the confirmation code
match = self.regex_search_in_response(self.regexp)
self.assertIsNotNone(match)
# Extract the code and send a confirmation mail
self.reset_message()
self.reset_outbox()
self.set_input_lines([match.group(0)])
self.control_process()
self.assert_in_response(
'{email} has been subscribed to {package}'.format(
email=self.user_email_address,
package=package_name))
self.assertTrue(self.user_subscribed(self.user_email_address))
def test_subscribe_when_user_already_subscribed(self):
"""
Tests the subscribe command in the case that the user is trying to
subscribe to a package he is already subscribed to.
"""
# Make sure the user is already subscribed.
Subscription.objects.create_for(
package_name=self.package.name,
email=self.user_email_address
)
# Try subscribing again
self.add_subscribe_command(self.package.name, self.user_email_address)
self.control_process()
self.assert_warning_in_response(
'{email} is already subscribed to {package}'.format(
email=self.user_email_address,
package=self.package.name))
def test_subscribe_no_email_given(self):
"""
Tests the subscribe command when there is no email address given.
"""
self.add_subscribe_command(self.package.name)
self.control_process()
self.assert_confirmation_sent_to(self.user_email_address)
def test_subscribe_email_different_than_from(self):
"""
Tests the subscribe command when the given email address is different
than the From address of the received message.
"""
subscribe_email_address = 'another-user@domain.com'
self.assertNotEqual(
subscribe_email_address,
self.user_email_address,
'The test checks the case when <email> is different than From'
)
self.add_subscribe_command(self.package.name, subscribe_email_address)
self.control_process()
self.assert_cc_contains_address(subscribe_email_address)
self.assert_confirmation_sent_to(subscribe_email_address)
def test_subscribe_unexisting_source_package(self):
"""
Tests the subscribe command when the given package is not an existing
source package.
"""
binary_package = 'binary-package'
self.add_binary_package(self.package, binary_package)
self.add_subscribe_command(binary_package)
self.control_process()
self.assert_warning_in_response(
'{package} is not a source package.'.format(
package=binary_package))
self.assert_in_response(
'{package} is the source package '
'for the {binary} binary package'.format(
package=self.package.name,
binary=binary_package))
self.assert_confirmation_sent_to(self.user_email_address)
def test_subscribe_unexisting_package(self):
"""
Tests the subscribe command when the given package is not an existing
source, binary or pseudo package.
"""
package_name = 'random-package-name'
self.add_subscribe_command(package_name)
self.control_process()
self.assert_warning_in_response(
self.get_not_source_nor_binary_warning(package_name))
self.assert_warning_in_response(
'Package {package} is not even a pseudo package'.format(
package=package_name))
self.assert_confirmation_sent_to(self.user_email_address)
# A new package was created.
self.assertIsNotNone(get_or_none(PackageName, name=package_name))
def test_subscribe_subscription_only_package(self):
"""
Tests that when subscribing to a subscription-only package the correct
warning is displayed even when it already contains subscriptions.
"""
package_name = 'random-package-name'
Subscription.objects.create_for(
email='user@domain.com', package_name=package_name)
# Make sure the package actually exists before running the test
pkg = get_or_none(PackageName, name=package_name)
self.assertIsNotNone(pkg)
self.assertFalse(pkg.binary)
self.add_subscribe_command(package_name)
self.control_process()
self.assert_warning_in_response(
self.get_not_source_nor_binary_warning(package_name))
self.assert_warning_in_response(
'Package {package} is not even a pseudo package'.format(
package=package_name))
self.assert_confirmation_sent_to(self.user_email_address)
def test_subscribe_pseudo_package(self):
"""
Tests the subscribe command when the given package is an existing
pseudo-package.
"""
pseudo_package = 'pseudo-package'
PackageName.pseudo_packages.create(name=pseudo_package)
self.add_subscribe_command(pseudo_package)
self.control_process()
self.assert_warning_in_response(
self.get_not_source_nor_binary_warning(pseudo_package))
self.assert_warning_in_response(
'Package {package} is a pseudo package'.format(
package=pseudo_package))
self.assert_confirmation_sent_to(self.user_email_address)
def test_subscribe_execute_once(self):
"""
If the command message includes the same subscribe command multiple
times, it is executed only once.
"""
self.add_subscribe_command(self.package.name)
self.add_subscribe_command(self.package.name, self.user_email_address)
self.control_process()
# Only one confirmation email required as the subscribe commands are
# equivalent.
self.assert_response_sent(2)
def test_confirm_expired(self):
"""
Tests that an expired confirmation does not subscribe the user.
"""
# Set up an expired CommandConfirmation object.
c = CommandConfirmation.objects.create_for_commands(
['subscribe {package} {user}'.format(user=self.user_email_address,
package=self.package.name)])
delta = timedelta(days=settings.PTS_CONFIRMATION_EXPIRATION_DAYS + 1)
c.date_created = c.date_created - delta
c.save()
self.set_input_lines(['confirm ' + c.confirmation_key])
self.control_process()
self.assert_error_in_response('Confirmation failed')
class UnsubscribeFromPackageTest(EmailControlTest):
"""
Tests for the unsubscribe from package story.
"""
def setUp(self):
super(UnsubscribeFromPackageTest, self).setUp()
self.user_email_address = 'dummy-user@domain.com'
self.set_header('From',
'Dummy User <{user_email}>'.format(
user_email=self.user_email_address))
self.package = PackageName.objects.create(
source=True,
name='dummy-package')
self.other_package = PackageName.objects.create(name='other-package')
# The user is initially subscribed to the package
Subscription.objects.create_for(
package_name=self.package.name,
email=self.user_email_address)
self.other_user = 'another-user@domain.com'
Subscription.objects.create_for(
package_name=self.package.name,
email=self.other_user)
# Regular expression to extract the confirmation code from the body of
# the response mail
self.regexp = re.compile(r'^CONFIRM (.*)$', re.MULTILINE)
def user_subscribed(self, email_address):
"""
Helper method checks whether the given email is subscribed to the
package.
"""
return email_address in (
user_email.email
for user_email in self.package.subscriptions.all()
)
def assert_confirmation_sent_to(self, email_address):
"""
Helper method checks whether a confirmation mail was sent to the
given email address.
"""
self.assertIn(
True, (
extract_email_address_from_header(msg.to[0]) == email_address
for msg in mail.outbox[:-1]
)
)
def assert_not_subscribed_error_in_response(self, email):
self.assert_error_in_response(
"{email} is not subscribed, you can't unsubscribe.".format(
email=email))
def add_binary_package(self, source_package, binary_package):
"""
Helper method which creates a binary package for the given source
package.
"""
binary_pkg = BinaryPackageName.objects.create(
name=binary_package)
src_pkg_name = SourcePackageName.objects.get(name=source_package.name)
src_pkg, _ = SourcePackage.objects.get_or_create(
source_package_name=src_pkg_name, version='1.0.0')
src_pkg.binary_packages = [binary_pkg]
src_pkg.save()
def add_unsubscribe_command(self, package, email=None):
"""
Helper method which adds a subscribe command to the command message.
"""
if not email:
email = ''
payload = self.message.get_payload() or ''
commands = payload.splitlines()
commands.append('unsubscribe ' + package + ' ' + email)
self.set_input_lines(commands)
def test_unsubscribe_and_confirm_normal(self):
"""
Tests that the user is unsubscribed from the pacakge after running
unsubscribe and confirm.
"""
package_name = self.package.name
self.add_unsubscribe_command(package_name, self.user_email_address)
self.control_process()
self.assert_in_response(
'A confirmation mail has been sent to {email}'.format(
email=self.user_email_address))
self.assert_confirmation_sent_to(self.user_email_address)
# User still not actually unsubscribed
self.assertTrue(self.user_subscribed(self.user_email_address))
# Check that the confirmation mail contains the confirmation code
match = self.regex_search_in_response(self.regexp)
self.assertIsNotNone(match)
# Extract the code and send a confirmation mail
self.reset_message()
self.reset_outbox()
self.set_input_lines([match.group(0)])
self.control_process()
self.assert_in_response(
'{email} has been unsubscribed from {package}'.format(
email=self.user_email_address,
package=package_name))
# User no longer subscribed
self.assertFalse(self.user_subscribed(self.user_email_address))
def test_unsubscribe_when_user_not_subscribed(self):
"""
Tests the unsubscribe command when the user is not subscribed to the
given package.
"""
self.add_unsubscribe_command(self.other_package.name,
self.user_email_address)
self.control_process()
self.assert_not_subscribed_error_in_response(self.user_email_address)
def test_unsubscribe_inactive_subscription(self):
"""
Tests the unsubscribe command when the user's subscription is not
active.
"""
Subscription.objects.create_for(
package_name=self.other_package.name,
email=self.user_email_address,
active=False)
self.add_unsubscribe_command(self.other_package.name,
self.user_email_address)
self.control_process()
self.assert_not_subscribed_error_in_response(self.user_email_address)
def test_unsubscribe_no_email_given(self):
"""
Tests the unsubscribe command when there is no email address given.
"""
self.add_unsubscribe_command(self.package.name)
self.control_process()
self.assert_confirmation_sent_to(self.user_email_address)
def test_unsubscribe_email_different_than_from(self):
"""
Tests the unsubscribe command when the given email address is different
than the From address of the received message.
"""
self.add_unsubscribe_command(self.package.name,
self.other_user)
self.control_process()
self.assert_cc_contains_address(self.other_user)
self.assert_confirmation_sent_to(self.other_user)
def test_unsubscribe_unexisting_source_package(self):
"""
Tests the unsubscribe command when the given package is not an existing
source package.
"""
binary_package = 'binary-package'
self.add_binary_package(self.package, binary_package)
self.add_unsubscribe_command(binary_package)
self.control_process()
self.assert_in_response(
'Warning: {package} is not a source package.'.format(
package=binary_package))
self.assert_in_response(
'{package} is the source package '
'for the {binary} binary package'.format(
package=self.package.name,
binary=binary_package))
def test_unsubscribe_unexisting_source_or_binary_package(self):
"""
Tests the unsubscribe command when the given package is neither an
existing source nor an existing binary package.
"""
binary_package = 'binary-package'
self.add_unsubscribe_command(binary_package)
self.control_process()
self.assert_warning_in_response(
'{package} is neither a source package '
'nor a binary package.'.format(package=binary_package))
def test_unsubscribe_execute_once(self):
"""
If the command message includes the same subscribe command multiple
times, it is executed only once.
"""
self.add_unsubscribe_command(self.package.name)
self.add_unsubscribe_command(self.package.name, self.user_email_address)
self.control_process()
# Only one confirmation email required as the commands are equivalent
self.assert_response_sent(2)
class UnsubscribeallCommandTest(EmailControlTest):
"""
Tests for the unsubscribeall command.
"""
def setUp(self):
super(UnsubscribeallCommandTest, self).setUp()
self.user_email_address = 'dummy-user@domain.com'
self.set_header('From',
'Dummy User <{user_email}>'.format(
user_email=self.user_email_address))
self.package = PackageName.objects.create(name='dummy-package')
self.other_package = PackageName.objects.create(name='other-package')
# The user is initially subscribed to the package
Subscription.objects.create_for(
package_name=self.package.name,
email=self.user_email_address)
Subscription.objects.create_for(
package_name=self.other_package.name,
email=self.user_email_address,
active=False)
self.user = EmailUser.objects.get(email=self.user_email_address)
# Regular expression to extract the confirmation code from the body of
# the response mail
self.regexp = re.compile(r'^CONFIRM (.*)$', re.MULTILINE)
def assert_confirmation_sent_to(self, email_address):
"""
Helper method checks whether a confirmation mail was sent to the
given email address.
"""
self.assertIn(
True, (
extract_email_address_from_header(msg.to[0]) == email_address
for msg in mail.outbox[:-1]
)
)
def test_unsubscribeall_and_confirm(self):
"""
Tests the unsubscribeall command with the confirmation.
"""
old_subscriptions = [pkg.name for pkg in self.user.packagename_set.all()]
self.set_input_lines(['unsubscribeall ' + self.user.email])
self.control_process()
self.assert_in_response(
"A confirmation mail has been sent to " + self.user.email)
self.assert_confirmation_sent_to(self.user.email)
match = self.regex_search_in_response(self.regexp)
self.assertIsNotNone(match)
self.reset_message()
self.reset_outbox()
self.set_input_lines([match.group(0)])
self.control_process()
self.assert_in_response('All your subscriptions have been terminated')
self.assert_list_in_response(
'{email} has been unsubscribed from {pkg}@{fqdn}'.format(
email=self.user.email,
pkg=package,
fqdn=settings.PTS_FQDN)
for package in sorted(old_subscriptions)
)
def test_unsubscribeall_no_subscriptions(self):
"""
Tests the unsubscribeall command when the user is not subscribed to any
packages.
"""
self.user.subscription_set.all().delete()
self.set_input_lines(['unsubscribeall ' + self.user.email])
self.control_process()
self.assert_warning_in_response(
'User {email} is not subscribed to any packages'.format(
email=self.user.email))
def test_unsubscribeall_email_different_than_from(self):
"""
Tests the unsubscribeall when the email given in the command is
different than the one in the From header.
"""
self.set_input_lines(['unsubscribeall ' + self.user.email])
self.set_header('From', 'other-email@domain.com')
self.control_process()
self.assert_cc_contains_address(self.user.email)
self.assert_confirmation_sent_to(self.user.email)
def test_unsubscribeall_no_email_given(self):
"""
Tests the unsubscribeall command when no email is given in the message.
"""
self.set_input_lines(['unsubscribeall'])
self.control_process()
self.assert_confirmation_sent_to(self.user.email)
class WhichCommandTest(EmailControlTest):
"""
Tests for the which command.
"""
def setUp(self):
super(WhichCommandTest, self).setUp()
self.packages = [
PackageName.objects.create(name='package' + str(i))
for i in range(10)
]
self.user = EmailUser.objects.create(email='user@domain.com')
def assert_no_subscriptions_in_response(self):
self.assert_in_response('No subscriptions found')
def test_list_packages_subscribed_to(self):
"""
Tests that the which command lists the right packages.
"""
subscriptions = [
Subscription.objects.create(
package=self.packages[i],
email_user=self.user
)
for i in range(5)
]
self.set_input_lines(['which ' + self.user.email])
self.control_process()
self.assert_list_in_response(sub.package.name for sub in subscriptions)
def test_list_packages_no_email_given(self):
"""
Tests that the which command lists the right packages when no email is
given.
"""
subscriptions = [
Subscription.objects.create(
package=self.packages[i],
email_user=self.user
)
for i in range(5)
]
self.set_header('From', self.user.email)
self.set_input_lines(['which'])
self.control_process()
self.assert_list_in_response(sub.package.name for sub in subscriptions)
def test_list_packages_no_subscriptions(self):
"""
Tests the which command when the user is not subscribed to any packages.
"""
self.set_input_lines(['which ' + self.user.email])
self.control_process()
self.assert_no_subscriptions_in_response()
def test_list_packages_no_active_subscriptions(self):
"""
Tests the which command when the user does not have any active
subscriptions.
"""
Subscription.objects.create(
package=self.packages[0],
email_user=self.user,
active=False)
self.set_input_lines(['which ' + self.user.email])
self.control_process()
self.assert_no_subscriptions_in_response()
class WhoCommandTest(EmailControlTest):
"""
Tests for the who command.
"""
def setUp(self):
super(WhoCommandTest, self).setUp()
self.package = PackageName.objects.create(name='dummy-package')
self.users = [
EmailUser.objects.create(email='user@domain.com'),
EmailUser.objects.create(email='second-user@domain.com'),
]
def get_command_message(self):
"""
Helper function returns the message that the command should output
before the list of all packages.
"""
return "Here's the list of subscribers to package {package}".format(
package=self.package)
def test_list_all_subscribers(self):
"""
Tests that all subscribers are output.
"""
# Subscribe users
for user in self.users:
Subscription.objects.create(email_user=user, package=self.package)
self.set_input_lines(['who ' + self.package.name])
self.control_process()
self.assert_in_response(self.get_command_message())
# Check that all users are in the response
for user in self.users:
self.assert_in_response(user.email.rsplit('@', 1)[0])
# Check that their exact addresses aren't
for user in self.users:
self.assert_not_in_response(user.email)
def test_package_does_not_exist(self):
"""
Tests the who command when the given package does not exist.
"""
self.set_input_lines(['who no-exist'])
self.control_process()
self.assert_in_response('Package no-exist does not exist')
def test_no_subscribers(self):
"""
Tests the who command when the given package does not have any
subscribers.
"""
self.set_input_lines(['who ' + self.package.name])
self.control_process()
self.assert_in_response(
'Package {package} does not have any subscribers'.format(
package=self.package.name))
class TeamCommandsMixin(object):
def setUp(self):
super(TeamCommandsMixin, self).setUp()
self.password = 'asdf'
self.user = User.objects.create_user(
main_email='user@domain.com', password=self.password,
first_name='', last_name='')
self.team = Team.objects.create_with_slug(
owner=self.user, name="Team name")
self.package = PackageName.objects.create(name='dummy')
self.team.packages.add(self.package)
self.team.add_members(self.user.emails.all()[:1])
def get_confirmation_text(self, email):
return 'A confirmation mail has been sent to {}'.format(email)
def assert_confirmation_sent_to(self, email_address):
"""
Asserts that a confirmation mail has been sent to the given email.
"""
self.assertTrue(any(
msg.message()['Subject'].startswith('CONFIRM') and
email_address in msg.to
for msg in mail.outbox
))
class JoinTeamCommandsTests(TeamCommandsMixin, EmailControlTest):
"""
Tests for the join-team control command.
"""
def setUp(self):
super(JoinTeamCommandsTests, self).setUp()
self.email_user = EmailUser.objects.create(email='other@domain.com')
self.set_header('From', self.email_user.email)
def get_join_command(self, team, email=''):
return 'join-team {} {}'.format(team, email)
def get_joined_message(self, team):
return 'You have successfully joined the team "{}"'.format(team.name)
def get_private_error(self, team):
return (
"The given team is not public. "
"Please contact {} if you wish to join".format(
team.owner.main_email)
)
def get_no_exist_error(self, team):
return 'Team with the slug "{}" does not exist.'.format(team)
def get_is_member_warning(self):
return 'You are already a member of the team.'
def test_join_public_team(self):
"""
Tests that users can join a public team.
"""
self.set_input_lines([self.get_join_command(self.team.slug)])
self.control_process()
# Confirmation mail sent
self.assert_confirmation_sent_to(self.email_user.email)
# The response to the original control message indicates that
self.assert_in_response(self.get_confirmation_text(self.email_user.email))
# The user isn't a member of the team yet
self.assertNotIn(self.email_user, self.team.members.all())
# A confirmation instance is created
self.assertEqual(1, CommandConfirmation.objects.count())
confirmation = CommandConfirmation.objects.all()[0]
# Send the confirmation mail now
self.reset_outbox()
self.set_input_lines(['CONFIRM ' + confirmation.confirmation_key])
self.control_process()
# The response indicates that the user has joined the team
self.assert_in_response(self.get_joined_message(self.team))
# The user now really is in the team
self.assertIn(self.email_user, self.team.members.all())
def test_join_public_team_different_from(self):
"""
Tests that a confirmation mail is sent to the user being added to the
team, not the user who sent the control command.
"""
self.set_input_lines([self.get_join_command(self.team.slug, self.email_user)])
self.set_header('From', 'different-user@domain.com')
self.control_process()
# The confirmation sent to the user being added to the team
self.assert_confirmation_sent_to(self.email_user.email)
def test_join_private_team(self):
"""
Tests that trying to join a private team fails.
"""
self.team.public = False
self.team.save()
self.set_input_lines([self.get_join_command(self.team.slug)])
self.control_process()
self.assert_error_in_response(self.get_private_error(self.team))
def test_join_non_existing_team(self):
"""
Tests that trying to join a non-existing team fails.
"""
team_slug = 'team-does-not-exist'
self.set_input_lines([self.get_join_command(team_slug)])
self.control_process()
self.assert_error_in_response(self.get_no_exist_error(team_slug))
def test_join_team_already_member(self):
"""
Tests that a user gets a warning when trying to join a team he is
already a member of.
"""
self.team.add_members([self.email_user])
self.set_input_lines([self.get_join_command(self.team.slug, self.email_user)])
self.control_process()
self.assert_warning_in_response(self.get_is_member_warning())
class LeaveTeamCommandTests(TeamCommandsMixin, EmailControlTest):
def setUp(self):
super(LeaveTeamCommandTests, self).setUp()
self.email_user = EmailUser.objects.create(email='other@domain.com')
self.team.add_members([self.email_user])
self.set_header('From', self.email_user.email)
def get_leave_command(self, team, email=''):
return 'leave-team {} {}'.format(team, email)
def get_left_team_message(self, team):
return 'You have successfully left the team "{}" (slug: {})'.format(
team,
team.slug)
def get_is_not_member_warning(self):
return 'You are not a member of the team.'
def get_no_exist_error(self, team):
return 'Team with the slug "{}" does not exist.'.format(team)
def test_leave_team(self):
"""
Tests the normal situation where the user leaves a team he is a
member of.
"""
self.set_input_lines([self.get_leave_command(self.team.slug)])
self.control_process()
# A confirmation sent to the user
self.assert_confirmation_sent_to(self.email_user.email)
# Which is displayed in the response to the original message
self.assert_in_response(self.get_confirmation_text(self.email_user.email))
# The user is still a member of the team
self.assertIn(self.email_user, self.team.members.all())
# A confirmation is created
self.assertEqual(1, CommandConfirmation.objects.count())
confirmation = CommandConfirmation.objects.all()[0]
# Now confirm the email
self.reset_outbox()
self.set_input_lines(['CONFIRM ' + confirmation.confirmation_key])
self.control_process()
# The user notified that he has left the team
self.assert_in_response(self.get_left_team_message(self.team))
# The user is no longer a member of the team
self.assertNotIn(self.email_user, self.team.members.all())
def test_leave_team_different_from(self):
"""
Tests that a confirmation message is sent to the user being removed
from the team, not the one that sent the control message.
"""
self.set_input_lines(
[self.get_leave_command(self.team.slug, self.email_user.email)])
self.set_header('From', 'some-other-user@domain.com')
self.control_process()
# Confirmation sent to the user being removed from the team
self.assert_confirmation_sent_to(self.email_user.email)
def test_leave_team_not_member(self):
"""
Tests that a warning is returned when the user tries to leave a team
that he is not a member of.
"""
self.team.remove_members([self.email_user])
self.set_input_lines([self.get_leave_command(self.team.slug)])
self.control_process()
self.assert_warning_in_response(self.get_is_not_member_warning())
def test_leave_team_does_not_exist(self):
"""
Tests that an error is returned when the user tries to leave a team
that does not even exist.
"""
team_slug = 'this-does-not-exist'
self.set_input_lines([self.get_leave_command(team_slug)])
self.control_process()
self.assert_error_in_response(self.get_no_exist_error(team_slug))
class ListTeamPackagesTests(TeamCommandsMixin, EmailControlTest):
def setUp(self):
super(ListTeamPackagesTests, self).setUp()
# Add some more packages to the team
self.team.packages.create(name='pkg1')
self.team.packages.create(name='pkg2')
def get_list_team_packages_command(self, team):
return 'list-team-packages {}'.format(team)
def get_private_error(self):
return (
"The team is private. "
"Only team members can see its packages.")
def test_get_public_team_packages(self):
"""
Tests that a public team's packages can be obtained by any user.
"""
self.set_input_lines([self.get_list_team_packages_command(self.team.slug)])
self.control_process()
self.assert_list_in_response(
package.name
for package in self.team.packages.all().order_by('name'))
def test_get_private_team_packages_non_member(self):
"""
Tests that getting a private team's packages is not possible by a
user that is not a member of the team.
"""
self.team.public = False
self.team.save()
self.set_input_lines([self.get_list_team_packages_command(self.team.slug)])
self.control_process()
self.assert_error_in_response(self.get_private_error())
def test_get_private_team_packages_member(self):
"""
Tests that getting a private team's packages is possible by a
member of the team.
"""
self.team.public = False
self.team.save()
# Add a member to the team
email_user = EmailUser.objects.create(email='member@domain.com')
self.team.add_members([email_user])
# Set the from field so that the member sends the control email
self.set_header('From', email_user.email)
self.set_input_lines([self.get_list_team_packages_command(self.team.slug)])
self.control_process()
# The packages are output in the response
self.assert_list_in_response(
package.name
for package in self.team.packages.all().order_by('name'))
class WhichTeamsCommandTests(TeamCommandsMixin, EmailControlTest):
def setUp(self):
super(WhichTeamsCommandTests, self).setUp()
# Set up more teams
self.teams = [
self.team,
Team.objects.create_with_slug(name='Other team', owner=self.user),
Team.objects.create_with_slug(name='Some team', owner=self.user),
]
self.email_user = EmailUser.objects.create(email='other@domain.com')
def get_which_teams_command(self, email=''):
return 'which-teams {}'.format(email)
def get_no_teams_warning(self, email):
return '{} is not a member of any team.'.format(email)
def test_user_member_of_teams(self):
"""
Test that all the user's team memberships are output.
"""
member_of = self.teams[:2]
not_member_of = self.teams[2:]
for team in member_of:
team.add_members([self.email_user])
self.set_input_lines([self.get_which_teams_command(self.email_user.email)])
self.control_process()
# The teams that the user is subscribed too are output in the response
self.assert_list_in_response([
team.slug
for team in self.email_user.teams.all().order_by('name')
])
# The teams the user is not subscribed to are not found in the response
for team in not_member_of:
self.assert_list_item_not_in_response(team.slug)
def test_user_not_member_of_any_team(self):
"""
Tests the situation when the user is not a member of any teams.
"""
self.set_input_lines([self.get_which_teams_command(self.email_user.email)])
self.control_process()
self.assert_warning_in_response(self.get_no_teams_warning(self.email_user.email))
| gpl-2.0 |
seanwestfall/django | tests/custom_columns/tests.py | 207 | 4090 | from __future__ import unicode_literals
from django.core.exceptions import FieldError
from django.test import TestCase
from django.utils import six
from .models import Article, Author
class CustomColumnsTests(TestCase):
def setUp(self):
self.a1 = Author.objects.create(first_name="John", last_name="Smith")
self.a2 = Author.objects.create(first_name="Peter", last_name="Jones")
self.authors = [self.a1, self.a2]
self.article = Article.objects.create(headline="Django lets you build Web apps easily", primary_author=self.a1)
self.article.authors = self.authors
def test_query_all_available_authors(self):
self.assertQuerysetEqual(
Author.objects.all(), [
"Peter Jones", "John Smith",
],
six.text_type
)
def test_get_first_name(self):
self.assertEqual(
Author.objects.get(first_name__exact="John"),
self.a1,
)
def test_filter_first_name(self):
self.assertQuerysetEqual(
Author.objects.filter(first_name__exact="John"), [
"John Smith",
],
six.text_type
)
def test_field_error(self):
self.assertRaises(
FieldError,
lambda: Author.objects.filter(firstname__exact="John")
)
def test_attribute_error(self):
with self.assertRaises(AttributeError):
self.a1.firstname
with self.assertRaises(AttributeError):
self.a1.last
def test_get_all_authors_for_an_article(self):
self.assertQuerysetEqual(
self.article.authors.all(), [
"Peter Jones",
"John Smith",
],
six.text_type
)
def test_get_all_articles_for_an_author(self):
self.assertQuerysetEqual(
self.a1.article_set.all(), [
"Django lets you build Web apps easily",
],
lambda a: a.headline
)
def test_get_author_m2m_relation(self):
self.assertQuerysetEqual(
self.article.authors.filter(last_name='Jones'), [
"Peter Jones"
],
six.text_type
)
def test_author_querying(self):
self.assertQuerysetEqual(
Author.objects.all().order_by('last_name'),
['<Author: Peter Jones>', '<Author: John Smith>']
)
def test_author_filtering(self):
self.assertQuerysetEqual(
Author.objects.filter(first_name__exact='John'),
['<Author: John Smith>']
)
def test_author_get(self):
self.assertEqual(self.a1, Author.objects.get(first_name__exact='John'))
def test_filter_on_nonexistent_field(self):
self.assertRaisesMessage(
FieldError,
"Cannot resolve keyword 'firstname' into field. Choices are: Author_ID, article, first_name, last_name, primary_set",
Author.objects.filter,
firstname__exact='John'
)
def test_author_get_attributes(self):
a = Author.objects.get(last_name__exact='Smith')
self.assertEqual('John', a.first_name)
self.assertEqual('Smith', a.last_name)
self.assertRaisesMessage(
AttributeError,
"'Author' object has no attribute 'firstname'",
getattr,
a, 'firstname'
)
self.assertRaisesMessage(
AttributeError,
"'Author' object has no attribute 'last'",
getattr,
a, 'last'
)
def test_m2m_table(self):
self.assertQuerysetEqual(
self.article.authors.all().order_by('last_name'),
['<Author: Peter Jones>', '<Author: John Smith>']
)
self.assertQuerysetEqual(
self.a1.article_set.all(),
['<Article: Django lets you build Web apps easily>']
)
self.assertQuerysetEqual(
self.article.authors.filter(last_name='Jones'),
['<Author: Peter Jones>']
)
| bsd-3-clause |
sidmitra/django_nonrel_testapp | django/core/management/commands/diffsettings.py | 411 | 1296 | from django.core.management.base import NoArgsCommand
def module_to_dict(module, omittable=lambda k: k.startswith('_')):
"Converts a module namespace to a Python dictionary. Used by get_settings_diff."
return dict([(k, repr(v)) for k, v in module.__dict__.items() if not omittable(k)])
class Command(NoArgsCommand):
help = """Displays differences between the current settings.py and Django's
default settings. Settings that don't appear in the defaults are
followed by "###"."""
requires_model_validation = False
def handle_noargs(self, **options):
# Inspired by Postfix's "postconf -n".
from django.conf import settings, global_settings
# Because settings are imported lazily, we need to explicitly load them.
settings._setup()
user_settings = module_to_dict(settings._wrapped)
default_settings = module_to_dict(global_settings)
output = []
keys = user_settings.keys()
keys.sort()
for key in keys:
if key not in default_settings:
output.append("%s = %s ###" % (key, user_settings[key]))
elif user_settings[key] != default_settings[key]:
output.append("%s = %s" % (key, user_settings[key]))
return '\n'.join(output)
| bsd-3-clause |
RudoCris/horizon | openstack_dashboard/dashboards/admin/instances/forms.py | 40 | 3555 | # Copyright 2013 Kylin OS, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import messages
from openstack_dashboard import api
class LiveMigrateForm(forms.SelfHandlingForm):
current_host = forms.CharField(label=_("Current Host"),
required=False,
widget=forms.TextInput(
attrs={'readonly': 'readonly'}))
host = forms.ChoiceField(label=_("New Host"),
help_text=_("Choose a Host to migrate to."))
disk_over_commit = forms.BooleanField(label=_("Disk Over Commit"),
initial=False, required=False)
block_migration = forms.BooleanField(label=_("Block Migration"),
initial=False, required=False)
def __init__(self, request, *args, **kwargs):
super(LiveMigrateForm, self).__init__(request, *args, **kwargs)
initial = kwargs.get('initial', {})
instance_id = initial.get('instance_id')
self.fields['instance_id'] = forms.CharField(widget=forms.HiddenInput,
initial=instance_id)
self.fields['host'].choices = self.populate_host_choices(request,
initial)
def populate_host_choices(self, request, initial):
hosts = initial.get('hosts')
current_host = initial.get('current_host')
host_list = [(host.host_name,
host.host_name)
for host in hosts
if (host.service.startswith('compute') and
host.host_name != current_host)]
if host_list:
host_list.insert(0, ("", _("Select a new host")))
else:
host_list.insert(0, ("", _("No other hosts available")))
return sorted(host_list)
def handle(self, request, data):
try:
block_migration = data['block_migration']
disk_over_commit = data['disk_over_commit']
api.nova.server_live_migrate(request,
data['instance_id'],
data['host'],
block_migration=block_migration,
disk_over_commit=disk_over_commit)
msg = _('The instance is preparing the live migration '
'to host "%s".') % data['host']
messages.success(request, msg)
return True
except Exception:
msg = _('Failed to live migrate instance to '
'host "%s".') % data['host']
redirect = reverse('horizon:admin:instances:index')
exceptions.handle(request, msg, redirect=redirect)
| apache-2.0 |
Azure/azure-sdk-for-python | sdk/billing/azure-mgmt-billing/azure/mgmt/billing/aio/operations/_instructions_operations.py | 1 | 12262 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class InstructionsOperations:
"""InstructionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.billing.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_by_billing_profile(
self,
billing_account_name: str,
billing_profile_name: str,
**kwargs
) -> AsyncIterable["_models.InstructionListResult"]:
"""Lists the instructions by billing profile id.
:param billing_account_name: The ID that uniquely identifies a billing account.
:type billing_account_name: str
:param billing_profile_name: The ID that uniquely identifies a billing profile.
:type billing_profile_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either InstructionListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.billing.models.InstructionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.InstructionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_billing_profile.metadata['url'] # type: ignore
path_format_arguments = {
'billingAccountName': self._serialize.url("billing_account_name", billing_account_name, 'str'),
'billingProfileName': self._serialize.url("billing_profile_name", billing_profile_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('InstructionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_billing_profile.metadata = {'url': '/providers/Microsoft.Billing/billingAccounts/{billingAccountName}/billingProfiles/{billingProfileName}/instructions'} # type: ignore
async def get(
self,
billing_account_name: str,
billing_profile_name: str,
instruction_name: str,
**kwargs
) -> "_models.Instruction":
"""Get the instruction by name. These are custom billing instructions and are only applicable for
certain customers.
:param billing_account_name: The ID that uniquely identifies a billing account.
:type billing_account_name: str
:param billing_profile_name: The ID that uniquely identifies a billing profile.
:type billing_profile_name: str
:param instruction_name: Instruction Name.
:type instruction_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Instruction, or the result of cls(response)
:rtype: ~azure.mgmt.billing.models.Instruction
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Instruction"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'billingAccountName': self._serialize.url("billing_account_name", billing_account_name, 'str'),
'billingProfileName': self._serialize.url("billing_profile_name", billing_profile_name, 'str'),
'instructionName': self._serialize.url("instruction_name", instruction_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Instruction', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/providers/Microsoft.Billing/billingAccounts/{billingAccountName}/billingProfiles/{billingProfileName}/instructions/{instructionName}'} # type: ignore
async def put(
self,
billing_account_name: str,
billing_profile_name: str,
instruction_name: str,
parameters: "_models.Instruction",
**kwargs
) -> "_models.Instruction":
"""Creates or updates an instruction. These are custom billing instructions and are only
applicable for certain customers.
:param billing_account_name: The ID that uniquely identifies a billing account.
:type billing_account_name: str
:param billing_profile_name: The ID that uniquely identifies a billing profile.
:type billing_profile_name: str
:param instruction_name: Instruction Name.
:type instruction_name: str
:param parameters: The new instruction.
:type parameters: ~azure.mgmt.billing.models.Instruction
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Instruction, or the result of cls(response)
:rtype: ~azure.mgmt.billing.models.Instruction
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Instruction"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.put.metadata['url'] # type: ignore
path_format_arguments = {
'billingAccountName': self._serialize.url("billing_account_name", billing_account_name, 'str'),
'billingProfileName': self._serialize.url("billing_profile_name", billing_profile_name, 'str'),
'instructionName': self._serialize.url("instruction_name", instruction_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'Instruction')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Instruction', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
put.metadata = {'url': '/providers/Microsoft.Billing/billingAccounts/{billingAccountName}/billingProfiles/{billingProfileName}/instructions/{instructionName}'} # type: ignore
| mit |
wberrier/meson | mesonbuild/modules/windows.py | 2 | 3453 | # Copyright 2015 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from .. import mlog
from .. import mesonlib, dependencies, build
from ..mesonlib import MesonException
from . import get_include_args
from . import ModuleReturnValue
from . import ExtensionModule
class WindowsModule(ExtensionModule):
def detect_compiler(self, compilers):
for l in ('c', 'cpp'):
if l in compilers:
return compilers[l]
raise MesonException('Resource compilation requires a C or C++ compiler.')
def compile_resources(self, state, args, kwargs):
comp = self.detect_compiler(state.compilers)
extra_args = mesonlib.stringlistify(kwargs.get('args', []))
inc_dirs = kwargs.pop('include_directories', [])
if not isinstance(inc_dirs, list):
inc_dirs = [inc_dirs]
for incd in inc_dirs:
if not isinstance(incd.held_object, (str, build.IncludeDirs)):
raise MesonException('Resource include dirs should be include_directories().')
extra_args += get_include_args(inc_dirs)
if comp.id == 'msvc':
rescomp = dependencies.ExternalProgram('rc', silent=True)
res_args = extra_args + ['/nologo', '/fo@OUTPUT@', '@INPUT@']
suffix = 'res'
else:
m = 'Argument {!r} has a space which may not work with windres due to ' \
'a MinGW bug: https://sourceware.org/bugzilla/show_bug.cgi?id=4933'
for arg in extra_args:
if ' ' in arg:
mlog.warning(m.format(arg))
rescomp_name = None
# FIXME: Does not handle `native: true` executables, see
# https://github.com/mesonbuild/meson/issues/1531
if state.environment.is_cross_build():
# If cross compiling see if windres has been specified in the
# cross file before trying to find it another way.
rescomp_name = state.environment.cross_info.config['binaries'].get('windres')
if rescomp_name is None:
# Pick-up env var WINDRES if set. This is often used for
# specifying an arch-specific windres.
rescomp_name = os.environ.get('WINDRES', 'windres')
rescomp = dependencies.ExternalProgram(rescomp_name, silent=True)
res_args = extra_args + ['@INPUT@', '@OUTPUT@']
suffix = 'o'
if not rescomp.found():
raise MesonException('Could not find Windows resource compiler %s.' % ' '.join(rescomp.get_command()))
res_kwargs = {'output': '@BASENAME@.' + suffix,
'arguments': res_args}
res_gen = build.Generator([rescomp], res_kwargs)
res_output = res_gen.process_files('Windows resource', args, state)
return ModuleReturnValue(res_output, [res_output])
def initialize():
return WindowsModule()
| apache-2.0 |
Jgarcia-IAS/localizacion | openerp/addons-extra/odoo-pruebas/odoo-server/addons/project/wizard/project_task_delegate.py | 195 | 6463 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from lxml import etree
from openerp import tools
from openerp.tools.translate import _
from openerp.osv import fields, osv
class project_task_delegate(osv.osv_memory):
_name = 'project.task.delegate'
_description = 'Task Delegate'
_columns = {
'name': fields.char('Delegated Title', required=True, help="New title of the task delegated to the user"),
'prefix': fields.char('Your Task Title', help="Title for your validation task"),
'project_id': fields.many2one('project.project', 'Project', help="User you want to delegate this task to"),
'user_id': fields.many2one('res.users', 'Assign To', required=True, help="User you want to delegate this task to"),
'new_task_description': fields.text('New Task Description', help="Reinclude the description of the task in the task of the user"),
'planned_hours': fields.float('Planned Hours', help="Estimated time to close this task by the delegated user"),
'planned_hours_me': fields.float('Hours to Validate', help="Estimated time for you to validate the work done by the user to whom you delegate this task"),
'state': fields.selection([('pending','Pending'), ('done','Done'), ], 'Validation State', help="New state of your own task. Pending will be reopened automatically when the delegated task is closed")
}
def onchange_project_id(self, cr, uid, ids, project_id=False, context=None):
project_project = self.pool.get('project.project')
if not project_id:
return {'value':{'user_id': False}}
project = project_project.browse(cr, uid, project_id, context=context)
return {'value': {'user_id': project.user_id and project.user_id.id or False}}
def default_get(self, cr, uid, fields, context=None):
"""
This function gets default values
"""
res = super(project_task_delegate, self).default_get(cr, uid, fields, context=context)
if context is None:
context = {}
record_id = context and context.get('active_id', False) or False
if not record_id:
return res
task_pool = self.pool.get('project.task')
task = task_pool.browse(cr, uid, record_id, context=context)
task_name =tools.ustr(task.name)
if 'project_id' in fields:
res['project_id'] = int(task.project_id.id) if task.project_id else False
if 'name' in fields:
if task_name.startswith(_('CHECK: ')):
newname = tools.ustr(task_name).replace(_('CHECK: '), '')
else:
newname = tools.ustr(task_name or '')
res['name'] = newname
if 'planned_hours' in fields:
res['planned_hours'] = task.remaining_hours or 0.0
if 'prefix' in fields:
if task_name.startswith(_('CHECK: ')):
newname = tools.ustr(task_name).replace(_('CHECK: '), '')
else:
newname = tools.ustr(task_name or '')
prefix = _('CHECK: %s') % newname
res['prefix'] = prefix
if 'new_task_description' in fields:
res['new_task_description'] = task.description
return res
_defaults = {
'planned_hours_me': 1.0,
'state': 'pending',
}
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
res = super(project_task_delegate, self).fields_view_get(cr, uid, view_id, view_type, context, toolbar, submenu=submenu)
users_pool = self.pool.get('res.users')
obj_tm = users_pool.browse(cr, uid, uid, context=context).company_id.project_time_mode_id
tm = obj_tm and obj_tm.name or 'Hours'
if tm in ['Hours','Hour']:
return res
eview = etree.fromstring(res['arch'])
def _check_rec(eview):
if eview.attrib.get('widget','') == 'float_time':
eview.set('widget','float')
for child in eview:
_check_rec(child)
return True
_check_rec(eview)
res['arch'] = etree.tostring(eview)
for field in res['fields']:
if 'Hours' in res['fields'][field]['string']:
res['fields'][field]['string'] = res['fields'][field]['string'].replace('Hours',tm)
return res
def delegate(self, cr, uid, ids, context=None):
if context is None:
context = {}
task_id = context.get('active_id', False)
task_pool = self.pool.get('project.task')
delegate_data = self.read(cr, uid, ids, context=context)[0]
delegated_tasks = task_pool.do_delegate(cr, uid, [task_id], delegate_data, context=context)
models_data = self.pool.get('ir.model.data')
action_model, action_id = models_data.get_object_reference(cr, uid, 'project', 'action_view_task')
view_model, task_view_form_id = models_data.get_object_reference(cr, uid, 'project', 'view_task_form2')
view_model, task_view_tree_id = models_data.get_object_reference(cr, uid, 'project', 'view_task_tree2')
action = self.pool[action_model].read(cr, uid, [action_id], context=context)[0]
action['res_id'] = delegated_tasks[task_id]
action['view_id'] = False
action['views'] = [(task_view_form_id, 'form'), (task_view_tree_id, 'tree')]
action['help'] = False
return action
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
laserson/ibis | ibis/__init__.py | 2 | 3771 | # Copyright 2014 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# flake8: noqa
__version__ = '0.5.0'
from ibis.filesystems import HDFS, WebHDFS
from ibis.common import IbisError
import ibis.expr.api as api
import ibis.expr.types as ir
# __all__ is defined
from ibis.expr.api import *
import ibis.impala.api as impala
import ibis.sql.sqlite.api as sqlite
import ibis.config_init
from ibis.config import options
import ibis.util as util
# Deprecated
impala_connect = util.deprecate(impala.connect,
'impala_connect is deprecated, use'
' ibis.impala.connect instead')
def make_client(db, hdfs_client=None):
"""
Create an Ibis client from a database connection and optional additional
connections (like HDFS)
Parameters
----------
db : Connection
e.g. produced by ibis.impala.connect
hdfs_client : ibis HDFS client
Examples
--------
>>> con = ibis.impala.connect(**impala_params)
>>> hdfs = ibis.hdfs_connect(**hdfs_params)
>>> client = ibis.make_client(con, hdfs_client=hdfs)
Returns
-------
client : IbisClient
"""
db._hdfs = hdfs_client
return db
make_client = util.deprecate(
make_client, ('make_client is deprecated. '
'Use ibis.impala.connect '
' with hdfs_client=hdfs_client'))
def hdfs_connect(host='localhost', port=50070, protocol='webhdfs',
auth_mechanism='NOSASL', verify=True, **kwds):
"""
Connect to HDFS
Parameters
----------
host : string, Host name of the HDFS NameNode
port : int, NameNode's WebHDFS port (default 50070)
protocol : {'webhdfs'}
auth_mechanism : string, Set to NOSASL or PLAIN for non-secure clusters.
Set to GSSAPI or LDAP for Kerberos-secured clusters.
verify : boolean, Set to False to turn off verifying SSL certificates.
(default True)
Other keywords are forwarded to hdfs library classes
Returns
-------
client : WebHDFS
"""
import requests
session = kwds.setdefault('session', requests.Session())
session.verify = verify
if auth_mechanism in ['GSSAPI', 'LDAP']:
try:
import requests_kerberos
except ImportError:
raise IbisError(
"Unable to import requests-kerberos, which is required for "
"Kerberos HDFS support. Install it by executing `pip install "
"requests-kerberos` or `pip install hdfs[kerberos]`.")
from hdfs.ext.kerberos import KerberosClient
url = 'https://{0}:{1}'.format(host, port) # note SSL
kwds.setdefault('mutual_auth', 'OPTIONAL')
hdfs_client = KerberosClient(url, **kwds)
else:
from hdfs.client import InsecureClient
url = 'http://{0}:{1}'.format(host, port)
hdfs_client = InsecureClient(url, **kwds)
return WebHDFS(hdfs_client)
def test(impala=False):
import pytest
import ibis
import os
ibis_dir, _ = os.path.split(ibis.__file__)
args = ['--pyargs', ibis_dir]
if impala:
args.append('--impala')
pytest.main(args)
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
| apache-2.0 |
pfnet/chainer | chainer/distributions/gamma.py | 6 | 2914 | import chainer
from chainer.backends import cuda
from chainer import distribution
from chainer.functions.array import broadcast
from chainer.functions.array import where
from chainer.functions.math import digamma
from chainer.functions.math import exponential
from chainer.functions.math import lgamma
from chainer.utils import cache
class Gamma(distribution.Distribution):
"""Gamma Distribution.
Args:
k(:class:`~chainer.Variable` or :ref:`ndarray`): Parameter of
distribution.
theta(:class:`~chainer.Variable` or :ref:`ndarray`): Parameter of
distribution.
"""
def __init__(self, k, theta):
super(Gamma, self).__init__()
self.__k = k
self.__theta = theta
@cache.cached_property
def k(self):
return chainer.as_variable(self.__k)
@cache.cached_property
def theta(self):
return chainer.as_variable(self.__theta)
@property
def batch_shape(self):
return self.k.shape
@cache.cached_property
def entropy(self):
return (
self.k
+ exponential.log(self.theta)
+ lgamma.lgamma(self.k)
+ (1 - self.k) * digamma.digamma(self.k))
@property
def event_shape(self):
return ()
@property
def _is_gpu(self):
return isinstance(self.k.data, cuda.ndarray)
def log_prob(self, x):
logp = (
- lgamma.lgamma(self.k)
- self.k * exponential.log(self.theta)
+ (self.k - 1) * exponential.log(x)
- x / self.theta)
xp = logp.xp
inf = xp.full_like(logp.array, xp.inf)
if isinstance(x, chainer.Variable):
x = x.array
return where.where(xp.asarray(x >= 0), logp, xp.asarray(-inf))
@cache.cached_property
def mean(self):
return self.k * self.theta
@property
def params(self):
return {'k': self.k, 'theta': self.theta}
def sample_n(self, n):
xp = chainer.backend.get_array_module(self.k)
if xp is cuda.cupy:
eps = xp.random.gamma(
self.k.data, size=(n,) + self.batch_shape, dtype=self.k.dtype)
else:
eps = xp.random.gamma(
self.k.data, size=(n,) + self.batch_shape).astype(self.k.dtype)
noise = broadcast.broadcast_to(self.theta, eps.shape) * eps
return noise
@property
def support(self):
return 'positive'
@cache.cached_property
def variance(self):
return self.mean * self.theta
@distribution.register_kl(Gamma, Gamma)
def _kl_gamma_gamma(dist1, dist2):
return (
(dist1.k - dist2.k) * digamma.digamma(dist1.k)
- (lgamma.lgamma(dist1.k) - lgamma.lgamma(dist2.k))
+ dist2.k * (exponential.log(dist2.theta)
- exponential.log(dist1.theta))
+ dist1.k * (dist1.theta / dist2.theta - 1))
| mit |
infobloxopen/infoblox-netmri | infoblox_netmri/api/broker/v2_0_0/script_broker.py | 1 | 9418 | from ..broker import Broker
class ScriptBroker(Broker):
controller = "scripts"
def index(self, **kwargs):
"""Lists the available scripts. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param id: The internal NetMRI identifier for the script.
:type id: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param id: The internal NetMRI identifier for the script.
:type id: Array of Integer
| ``api version min:`` 2
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param name: The name of this script.
:type name: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param name: The name of this script.
:type name: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` id
:param sort: The data field(s) to use for sorting the output. Default is id. Valid values are id, name, module, visible, description, created_by, updated_by, language, risk_level, created_at, updated_at, category, read_only, taskflow_create, taskflow_edit, taskflow_revert, target_mapping, transactional_ind.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each Script. Valid values are id, name, module, visible, description, created_by, updated_by, language, risk_level, created_at, updated_at, category, read_only, taskflow_create, taskflow_edit, taskflow_revert, target_mapping, transactional_ind. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return scripts: An array of the Script objects that match the specified input criteria.
:rtype scripts: Array of Script
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def show(self, **kwargs):
"""Shows the details for the specified script.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param id: The internal NetMRI identifier for the script.
:type id: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return script: The script identified by the specified id.
:rtype script: Script
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def create(self, **kwargs):
"""Create a CCS script on NetMRI.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param script_file: The contents of the script file to be created.
:type script_file: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` True
:param overwrite: Overwrite any existing version of the script with the same name.
:type overwrite: Boolean
**Outputs**
"""
return self.api_request(self._get_method_fullname("create"), kwargs)
def update(self, **kwargs):
"""Update a CCS/Perl/Python script on NetMRI.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param id: The ID of the script to update.
:type id: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param script_file: The contents of the script file to be updated.
:type script_file: String
| ``api version min:`` 2.1
| ``api version max:`` None
| ``required:`` False
| ``default:`` CCS
:param language: The language the script is written in (CCS, Perl, Python).
:type language: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1
:param overwrite: Overwrite any existing version of the script with the same name.
:type overwrite: Integer
**Outputs**
"""
return self.api_request(self._get_method_fullname("update"), kwargs)
def destroy(self, **kwargs):
"""Delete a CCS script on NetMRI.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param id: The ID of the script to delete.
:type id: Integer
**Outputs**
"""
return self.api_request(self._get_method_fullname("destroy"), kwargs)
def run(self, **kwargs):
"""Run a script immediately with specified input. In addition to the listed parameters, optional parameters can be passed. Any parameter name starting with \\$ will be passed as Script-Variable to the script.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param id: The ID of the script to run.
:type id: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param device_group_ids: A comma delimited string of device group ids. Can be blank if not using device groups.
:type device_group_ids: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param device_ids: A command delimited string of device ids. Can be blank if ONLY using device groups.
:type device_ids: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return JobID: The JobID of the running script.
:rtype JobID: Integer
"""
return self.api_request(self._get_method_fullname("run"), kwargs)
| apache-2.0 |
eino-makitalo/odoo | addons/sales_team/__init__.py | 365 | 1081 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sales_team
import res_config
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
pjg101/SickRage | lib/future/backports/email/_parseaddr.py | 82 | 17389 | # Copyright (C) 2002-2007 Python Software Foundation
# Contact: email-sig@python.org
"""Email address parsing code.
Lifted directly from rfc822.py. This should eventually be rewritten.
"""
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future.builtins import int
__all__ = [
'mktime_tz',
'parsedate',
'parsedate_tz',
'quote',
]
import time, calendar
SPACE = ' '
EMPTYSTRING = ''
COMMASPACE = ', '
# Parse a date field
_monthnames = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul',
'aug', 'sep', 'oct', 'nov', 'dec',
'january', 'february', 'march', 'april', 'may', 'june', 'july',
'august', 'september', 'october', 'november', 'december']
_daynames = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
# The timezone table does not include the military time zones defined
# in RFC822, other than Z. According to RFC1123, the description in
# RFC822 gets the signs wrong, so we can't rely on any such time
# zones. RFC1123 recommends that numeric timezone indicators be used
# instead of timezone names.
_timezones = {'UT':0, 'UTC':0, 'GMT':0, 'Z':0,
'AST': -400, 'ADT': -300, # Atlantic (used in Canada)
'EST': -500, 'EDT': -400, # Eastern
'CST': -600, 'CDT': -500, # Central
'MST': -700, 'MDT': -600, # Mountain
'PST': -800, 'PDT': -700 # Pacific
}
def parsedate_tz(data):
"""Convert a date string to a time tuple.
Accounts for military timezones.
"""
res = _parsedate_tz(data)
if not res:
return
if res[9] is None:
res[9] = 0
return tuple(res)
def _parsedate_tz(data):
"""Convert date to extended time tuple.
The last (additional) element is the time zone offset in seconds, except if
the timezone was specified as -0000. In that case the last element is
None. This indicates a UTC timestamp that explicitly declaims knowledge of
the source timezone, as opposed to a +0000 timestamp that indicates the
source timezone really was UTC.
"""
if not data:
return
data = data.split()
# The FWS after the comma after the day-of-week is optional, so search and
# adjust for this.
if data[0].endswith(',') or data[0].lower() in _daynames:
# There's a dayname here. Skip it
del data[0]
else:
i = data[0].rfind(',')
if i >= 0:
data[0] = data[0][i+1:]
if len(data) == 3: # RFC 850 date, deprecated
stuff = data[0].split('-')
if len(stuff) == 3:
data = stuff + data[1:]
if len(data) == 4:
s = data[3]
i = s.find('+')
if i == -1:
i = s.find('-')
if i > 0:
data[3:] = [s[:i], s[i:]]
else:
data.append('') # Dummy tz
if len(data) < 5:
return None
data = data[:5]
[dd, mm, yy, tm, tz] = data
mm = mm.lower()
if mm not in _monthnames:
dd, mm = mm, dd.lower()
if mm not in _monthnames:
return None
mm = _monthnames.index(mm) + 1
if mm > 12:
mm -= 12
if dd[-1] == ',':
dd = dd[:-1]
i = yy.find(':')
if i > 0:
yy, tm = tm, yy
if yy[-1] == ',':
yy = yy[:-1]
if not yy[0].isdigit():
yy, tz = tz, yy
if tm[-1] == ',':
tm = tm[:-1]
tm = tm.split(':')
if len(tm) == 2:
[thh, tmm] = tm
tss = '0'
elif len(tm) == 3:
[thh, tmm, tss] = tm
elif len(tm) == 1 and '.' in tm[0]:
# Some non-compliant MUAs use '.' to separate time elements.
tm = tm[0].split('.')
if len(tm) == 2:
[thh, tmm] = tm
tss = 0
elif len(tm) == 3:
[thh, tmm, tss] = tm
else:
return None
try:
yy = int(yy)
dd = int(dd)
thh = int(thh)
tmm = int(tmm)
tss = int(tss)
except ValueError:
return None
# Check for a yy specified in two-digit format, then convert it to the
# appropriate four-digit format, according to the POSIX standard. RFC 822
# calls for a two-digit yy, but RFC 2822 (which obsoletes RFC 822)
# mandates a 4-digit yy. For more information, see the documentation for
# the time module.
if yy < 100:
# The year is between 1969 and 1999 (inclusive).
if yy > 68:
yy += 1900
# The year is between 2000 and 2068 (inclusive).
else:
yy += 2000
tzoffset = None
tz = tz.upper()
if tz in _timezones:
tzoffset = _timezones[tz]
else:
try:
tzoffset = int(tz)
except ValueError:
pass
if tzoffset==0 and tz.startswith('-'):
tzoffset = None
# Convert a timezone offset into seconds ; -0500 -> -18000
if tzoffset:
if tzoffset < 0:
tzsign = -1
tzoffset = -tzoffset
else:
tzsign = 1
tzoffset = tzsign * ( (tzoffset//100)*3600 + (tzoffset % 100)*60)
# Daylight Saving Time flag is set to -1, since DST is unknown.
return [yy, mm, dd, thh, tmm, tss, 0, 1, -1, tzoffset]
def parsedate(data):
"""Convert a time string to a time tuple."""
t = parsedate_tz(data)
if isinstance(t, tuple):
return t[:9]
else:
return t
def mktime_tz(data):
"""Turn a 10-tuple as returned by parsedate_tz() into a POSIX timestamp."""
if data[9] is None:
# No zone info, so localtime is better assumption than GMT
return time.mktime(data[:8] + (-1,))
else:
t = calendar.timegm(data)
return t - data[9]
def quote(str):
"""Prepare string to be used in a quoted string.
Turns backslash and double quote characters into quoted pairs. These
are the only characters that need to be quoted inside a quoted string.
Does not add the surrounding double quotes.
"""
return str.replace('\\', '\\\\').replace('"', '\\"')
class AddrlistClass(object):
"""Address parser class by Ben Escoto.
To understand what this class does, it helps to have a copy of RFC 2822 in
front of you.
Note: this class interface is deprecated and may be removed in the future.
Use email.utils.AddressList instead.
"""
def __init__(self, field):
"""Initialize a new instance.
`field' is an unparsed address header field, containing
one or more addresses.
"""
self.specials = '()<>@,:;.\"[]'
self.pos = 0
self.LWS = ' \t'
self.CR = '\r\n'
self.FWS = self.LWS + self.CR
self.atomends = self.specials + self.LWS + self.CR
# Note that RFC 2822 now specifies `.' as obs-phrase, meaning that it
# is obsolete syntax. RFC 2822 requires that we recognize obsolete
# syntax, so allow dots in phrases.
self.phraseends = self.atomends.replace('.', '')
self.field = field
self.commentlist = []
def gotonext(self):
"""Skip white space and extract comments."""
wslist = []
while self.pos < len(self.field):
if self.field[self.pos] in self.LWS + '\n\r':
if self.field[self.pos] not in '\n\r':
wslist.append(self.field[self.pos])
self.pos += 1
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
else:
break
return EMPTYSTRING.join(wslist)
def getaddrlist(self):
"""Parse all addresses.
Returns a list containing all of the addresses.
"""
result = []
while self.pos < len(self.field):
ad = self.getaddress()
if ad:
result += ad
else:
result.append(('', ''))
return result
def getaddress(self):
"""Parse the next address."""
self.commentlist = []
self.gotonext()
oldpos = self.pos
oldcl = self.commentlist
plist = self.getphraselist()
self.gotonext()
returnlist = []
if self.pos >= len(self.field):
# Bad email address technically, no domain.
if plist:
returnlist = [(SPACE.join(self.commentlist), plist[0])]
elif self.field[self.pos] in '.@':
# email address is just an addrspec
# this isn't very efficient since we start over
self.pos = oldpos
self.commentlist = oldcl
addrspec = self.getaddrspec()
returnlist = [(SPACE.join(self.commentlist), addrspec)]
elif self.field[self.pos] == ':':
# address is a group
returnlist = []
fieldlen = len(self.field)
self.pos += 1
while self.pos < len(self.field):
self.gotonext()
if self.pos < fieldlen and self.field[self.pos] == ';':
self.pos += 1
break
returnlist = returnlist + self.getaddress()
elif self.field[self.pos] == '<':
# Address is a phrase then a route addr
routeaddr = self.getrouteaddr()
if self.commentlist:
returnlist = [(SPACE.join(plist) + ' (' +
' '.join(self.commentlist) + ')', routeaddr)]
else:
returnlist = [(SPACE.join(plist), routeaddr)]
else:
if plist:
returnlist = [(SPACE.join(self.commentlist), plist[0])]
elif self.field[self.pos] in self.specials:
self.pos += 1
self.gotonext()
if self.pos < len(self.field) and self.field[self.pos] == ',':
self.pos += 1
return returnlist
def getrouteaddr(self):
"""Parse a route address (Return-path value).
This method just skips all the route stuff and returns the addrspec.
"""
if self.field[self.pos] != '<':
return
expectroute = False
self.pos += 1
self.gotonext()
adlist = ''
while self.pos < len(self.field):
if expectroute:
self.getdomain()
expectroute = False
elif self.field[self.pos] == '>':
self.pos += 1
break
elif self.field[self.pos] == '@':
self.pos += 1
expectroute = True
elif self.field[self.pos] == ':':
self.pos += 1
else:
adlist = self.getaddrspec()
self.pos += 1
break
self.gotonext()
return adlist
def getaddrspec(self):
"""Parse an RFC 2822 addr-spec."""
aslist = []
self.gotonext()
while self.pos < len(self.field):
preserve_ws = True
if self.field[self.pos] == '.':
if aslist and not aslist[-1].strip():
aslist.pop()
aslist.append('.')
self.pos += 1
preserve_ws = False
elif self.field[self.pos] == '"':
aslist.append('"%s"' % quote(self.getquote()))
elif self.field[self.pos] in self.atomends:
if aslist and not aslist[-1].strip():
aslist.pop()
break
else:
aslist.append(self.getatom())
ws = self.gotonext()
if preserve_ws and ws:
aslist.append(ws)
if self.pos >= len(self.field) or self.field[self.pos] != '@':
return EMPTYSTRING.join(aslist)
aslist.append('@')
self.pos += 1
self.gotonext()
return EMPTYSTRING.join(aslist) + self.getdomain()
def getdomain(self):
"""Get the complete domain name from an address."""
sdlist = []
while self.pos < len(self.field):
if self.field[self.pos] in self.LWS:
self.pos += 1
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
elif self.field[self.pos] == '[':
sdlist.append(self.getdomainliteral())
elif self.field[self.pos] == '.':
self.pos += 1
sdlist.append('.')
elif self.field[self.pos] in self.atomends:
break
else:
sdlist.append(self.getatom())
return EMPTYSTRING.join(sdlist)
def getdelimited(self, beginchar, endchars, allowcomments=True):
"""Parse a header fragment delimited by special characters.
`beginchar' is the start character for the fragment.
If self is not looking at an instance of `beginchar' then
getdelimited returns the empty string.
`endchars' is a sequence of allowable end-delimiting characters.
Parsing stops when one of these is encountered.
If `allowcomments' is non-zero, embedded RFC 2822 comments are allowed
within the parsed fragment.
"""
if self.field[self.pos] != beginchar:
return ''
slist = ['']
quote = False
self.pos += 1
while self.pos < len(self.field):
if quote:
slist.append(self.field[self.pos])
quote = False
elif self.field[self.pos] in endchars:
self.pos += 1
break
elif allowcomments and self.field[self.pos] == '(':
slist.append(self.getcomment())
continue # have already advanced pos from getcomment
elif self.field[self.pos] == '\\':
quote = True
else:
slist.append(self.field[self.pos])
self.pos += 1
return EMPTYSTRING.join(slist)
def getquote(self):
"""Get a quote-delimited fragment from self's field."""
return self.getdelimited('"', '"\r', False)
def getcomment(self):
"""Get a parenthesis-delimited fragment from self's field."""
return self.getdelimited('(', ')\r', True)
def getdomainliteral(self):
"""Parse an RFC 2822 domain-literal."""
return '[%s]' % self.getdelimited('[', ']\r', False)
def getatom(self, atomends=None):
"""Parse an RFC 2822 atom.
Optional atomends specifies a different set of end token delimiters
(the default is to use self.atomends). This is used e.g. in
getphraselist() since phrase endings must not include the `.' (which
is legal in phrases)."""
atomlist = ['']
if atomends is None:
atomends = self.atomends
while self.pos < len(self.field):
if self.field[self.pos] in atomends:
break
else:
atomlist.append(self.field[self.pos])
self.pos += 1
return EMPTYSTRING.join(atomlist)
def getphraselist(self):
"""Parse a sequence of RFC 2822 phrases.
A phrase is a sequence of words, which are in turn either RFC 2822
atoms or quoted-strings. Phrases are canonicalized by squeezing all
runs of continuous whitespace into one space.
"""
plist = []
while self.pos < len(self.field):
if self.field[self.pos] in self.FWS:
self.pos += 1
elif self.field[self.pos] == '"':
plist.append(self.getquote())
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
elif self.field[self.pos] in self.phraseends:
break
else:
plist.append(self.getatom(self.phraseends))
return plist
class AddressList(AddrlistClass):
"""An AddressList encapsulates a list of parsed RFC 2822 addresses."""
def __init__(self, field):
AddrlistClass.__init__(self, field)
if field:
self.addresslist = self.getaddrlist()
else:
self.addresslist = []
def __len__(self):
return len(self.addresslist)
def __add__(self, other):
# Set union
newaddr = AddressList(None)
newaddr.addresslist = self.addresslist[:]
for x in other.addresslist:
if not x in self.addresslist:
newaddr.addresslist.append(x)
return newaddr
def __iadd__(self, other):
# Set union, in-place
for x in other.addresslist:
if not x in self.addresslist:
self.addresslist.append(x)
return self
def __sub__(self, other):
# Set difference
newaddr = AddressList(None)
for x in self.addresslist:
if not x in other.addresslist:
newaddr.addresslist.append(x)
return newaddr
def __isub__(self, other):
# Set difference, in-place
for x in other.addresslist:
if x in self.addresslist:
self.addresslist.remove(x)
return self
def __getitem__(self, index):
# Make indexing, slices, and 'in' work
return self.addresslist[index]
| gpl-3.0 |
oinopion/django | django/db/backends/oracle/compiler.py | 407 | 2180 | from django.db.models.sql import compiler
class SQLCompiler(compiler.SQLCompiler):
def as_sql(self, with_limits=True, with_col_aliases=False, subquery=False):
"""
Creates the SQL for this query. Returns the SQL string and list
of parameters. This is overridden from the original Query class
to handle the additional SQL Oracle requires to emulate LIMIT
and OFFSET.
If 'with_limits' is False, any limit/offset information is not
included in the query.
"""
if with_limits and self.query.low_mark == self.query.high_mark:
return '', ()
# The `do_offset` flag indicates whether we need to construct
# the SQL needed to use limit/offset with Oracle.
do_offset = with_limits and (self.query.high_mark is not None
or self.query.low_mark)
if not do_offset:
sql, params = super(SQLCompiler, self).as_sql(
with_limits=False,
with_col_aliases=with_col_aliases,
subquery=subquery,
)
else:
sql, params = super(SQLCompiler, self).as_sql(
with_limits=False,
with_col_aliases=True,
subquery=subquery,
)
# Wrap the base query in an outer SELECT * with boundaries on
# the "_RN" column. This is the canonical way to emulate LIMIT
# and OFFSET on Oracle.
high_where = ''
if self.query.high_mark is not None:
high_where = 'WHERE ROWNUM <= %d' % (self.query.high_mark,)
sql = (
'SELECT * FROM (SELECT "_SUB".*, ROWNUM AS "_RN" FROM (%s) '
'"_SUB" %s) WHERE "_RN" > %d' % (sql, high_where, self.query.low_mark)
)
return sql, params
class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler):
pass
class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler):
pass
class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler):
pass
class SQLAggregateCompiler(compiler.SQLAggregateCompiler, SQLCompiler):
pass
| bsd-3-clause |
ddr2108/CS4803-Android_Kenel_Projects | tools/perf/scripts/python/futex-contention.py | 11261 | 1486 | # futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
| gpl-2.0 |
wreckJ/intellij-community | plugins/hg4idea/testData/bin/mercurial/bundlerepo.py | 91 | 13705 | # bundlerepo.py - repository class for viewing uncompressed bundles
#
# Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
"""Repository class for viewing uncompressed bundles.
This provides a read-only repository interface to bundles as if they
were part of the actual repository.
"""
from node import nullid
from i18n import _
import os, tempfile, shutil
import changegroup, util, mdiff, discovery, cmdutil, scmutil
import localrepo, changelog, manifest, filelog, revlog, error
class bundlerevlog(revlog.revlog):
def __init__(self, opener, indexfile, bundle, linkmapper):
# How it works:
# To retrieve a revision, we need to know the offset of the revision in
# the bundle (an unbundle object). We store this offset in the index
# (start). The base of the delta is stored in the base field.
#
# To differentiate a rev in the bundle from a rev in the revlog, we
# check revision against repotiprev.
opener = scmutil.readonlyvfs(opener)
revlog.revlog.__init__(self, opener, indexfile)
self.bundle = bundle
n = len(self)
self.repotiprev = n - 1
chain = None
self.bundlerevs = set() # used by 'bundle()' revset expression
while True:
chunkdata = bundle.deltachunk(chain)
if not chunkdata:
break
node = chunkdata['node']
p1 = chunkdata['p1']
p2 = chunkdata['p2']
cs = chunkdata['cs']
deltabase = chunkdata['deltabase']
delta = chunkdata['delta']
size = len(delta)
start = bundle.tell() - size
link = linkmapper(cs)
if node in self.nodemap:
# this can happen if two branches make the same change
chain = node
self.bundlerevs.add(self.nodemap[node])
continue
for p in (p1, p2):
if p not in self.nodemap:
raise error.LookupError(p, self.indexfile,
_("unknown parent"))
if deltabase not in self.nodemap:
raise LookupError(deltabase, self.indexfile,
_('unknown delta base'))
baserev = self.rev(deltabase)
# start, size, full unc. size, base (unused), link, p1, p2, node
e = (revlog.offset_type(start, 0), size, -1, baserev, link,
self.rev(p1), self.rev(p2), node)
self.index.insert(-1, e)
self.nodemap[node] = n
self.bundlerevs.add(n)
chain = node
n += 1
def _chunk(self, rev):
# Warning: in case of bundle, the diff is against what we stored as
# delta base, not against rev - 1
# XXX: could use some caching
if rev <= self.repotiprev:
return revlog.revlog._chunk(self, rev)
self.bundle.seek(self.start(rev))
return self.bundle.read(self.length(rev))
def revdiff(self, rev1, rev2):
"""return or calculate a delta between two revisions"""
if rev1 > self.repotiprev and rev2 > self.repotiprev:
# hot path for bundle
revb = self.index[rev2][3]
if revb == rev1:
return self._chunk(rev2)
elif rev1 <= self.repotiprev and rev2 <= self.repotiprev:
return revlog.revlog.revdiff(self, rev1, rev2)
return mdiff.textdiff(self.revision(self.node(rev1)),
self.revision(self.node(rev2)))
def revision(self, nodeorrev):
"""return an uncompressed revision of a given node or revision
number.
"""
if isinstance(nodeorrev, int):
rev = nodeorrev
node = self.node(rev)
else:
node = nodeorrev
rev = self.rev(node)
if node == nullid:
return ""
text = None
chain = []
iterrev = rev
# reconstruct the revision if it is from a changegroup
while iterrev > self.repotiprev:
if self._cache and self._cache[1] == iterrev:
text = self._cache[2]
break
chain.append(iterrev)
iterrev = self.index[iterrev][3]
if text is None:
text = revlog.revlog.revision(self, iterrev)
while chain:
delta = self._chunk(chain.pop())
text = mdiff.patches(text, [delta])
self._checkhash(text, node, rev)
self._cache = (node, rev, text)
return text
def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
raise NotImplementedError
def addgroup(self, revs, linkmapper, transaction):
raise NotImplementedError
def strip(self, rev, minlink):
raise NotImplementedError
def checksize(self):
raise NotImplementedError
class bundlechangelog(bundlerevlog, changelog.changelog):
def __init__(self, opener, bundle):
changelog.changelog.__init__(self, opener)
linkmapper = lambda x: x
bundlerevlog.__init__(self, opener, self.indexfile, bundle,
linkmapper)
class bundlemanifest(bundlerevlog, manifest.manifest):
def __init__(self, opener, bundle, linkmapper):
manifest.manifest.__init__(self, opener)
bundlerevlog.__init__(self, opener, self.indexfile, bundle,
linkmapper)
class bundlefilelog(bundlerevlog, filelog.filelog):
def __init__(self, opener, path, bundle, linkmapper, repo):
filelog.filelog.__init__(self, opener, path)
bundlerevlog.__init__(self, opener, self.indexfile, bundle,
linkmapper)
self._repo = repo
def _file(self, f):
self._repo.file(f)
class bundlepeer(localrepo.localpeer):
def canpush(self):
return False
class bundlerepository(localrepo.localrepository):
def __init__(self, ui, path, bundlename):
self._tempparent = None
try:
localrepo.localrepository.__init__(self, ui, path)
except error.RepoError:
self._tempparent = tempfile.mkdtemp()
localrepo.instance(ui, self._tempparent, 1)
localrepo.localrepository.__init__(self, ui, self._tempparent)
self.ui.setconfig('phases', 'publish', False)
if path:
self._url = 'bundle:' + util.expandpath(path) + '+' + bundlename
else:
self._url = 'bundle:' + bundlename
self.tempfile = None
f = util.posixfile(bundlename, "rb")
self.bundle = changegroup.readbundle(f, bundlename)
if self.bundle.compressed():
fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-",
suffix=".hg10un", dir=self.path)
self.tempfile = temp
fptemp = os.fdopen(fdtemp, 'wb')
try:
fptemp.write("HG10UN")
while True:
chunk = self.bundle.read(2**18)
if not chunk:
break
fptemp.write(chunk)
finally:
fptemp.close()
f = util.posixfile(self.tempfile, "rb")
self.bundle = changegroup.readbundle(f, bundlename)
# dict with the mapping 'filename' -> position in the bundle
self.bundlefilespos = {}
@localrepo.unfilteredpropertycache
def changelog(self):
# consume the header if it exists
self.bundle.changelogheader()
c = bundlechangelog(self.sopener, self.bundle)
self.manstart = self.bundle.tell()
return c
@localrepo.unfilteredpropertycache
def manifest(self):
self.bundle.seek(self.manstart)
# consume the header if it exists
self.bundle.manifestheader()
m = bundlemanifest(self.sopener, self.bundle, self.changelog.rev)
self.filestart = self.bundle.tell()
return m
@localrepo.unfilteredpropertycache
def manstart(self):
self.changelog
return self.manstart
@localrepo.unfilteredpropertycache
def filestart(self):
self.manifest
return self.filestart
def url(self):
return self._url
def file(self, f):
if not self.bundlefilespos:
self.bundle.seek(self.filestart)
while True:
chunkdata = self.bundle.filelogheader()
if not chunkdata:
break
fname = chunkdata['filename']
self.bundlefilespos[fname] = self.bundle.tell()
while True:
c = self.bundle.deltachunk(None)
if not c:
break
if f in self.bundlefilespos:
self.bundle.seek(self.bundlefilespos[f])
return bundlefilelog(self.sopener, f, self.bundle,
self.changelog.rev, self)
else:
return filelog.filelog(self.sopener, f)
def close(self):
"""Close assigned bundle file immediately."""
self.bundle.close()
if self.tempfile is not None:
os.unlink(self.tempfile)
if self._tempparent:
shutil.rmtree(self._tempparent, True)
def cancopy(self):
return False
def peer(self):
return bundlepeer(self)
def getcwd(self):
return os.getcwd() # always outside the repo
def instance(ui, path, create):
if create:
raise util.Abort(_('cannot create new bundle repository'))
parentpath = ui.config("bundle", "mainreporoot", "")
if not parentpath:
# try to find the correct path to the working directory repo
parentpath = cmdutil.findrepo(os.getcwd())
if parentpath is None:
parentpath = ''
if parentpath:
# Try to make the full path relative so we get a nice, short URL.
# In particular, we don't want temp dir names in test outputs.
cwd = os.getcwd()
if parentpath == cwd:
parentpath = ''
else:
cwd = os.path.join(cwd,'')
if parentpath.startswith(cwd):
parentpath = parentpath[len(cwd):]
u = util.url(path)
path = u.localpath()
if u.scheme == 'bundle':
s = path.split("+", 1)
if len(s) == 1:
repopath, bundlename = parentpath, s[0]
else:
repopath, bundlename = s
else:
repopath, bundlename = parentpath, path
return bundlerepository(ui, repopath, bundlename)
def getremotechanges(ui, repo, other, onlyheads=None, bundlename=None,
force=False):
'''obtains a bundle of changes incoming from other
"onlyheads" restricts the returned changes to those reachable from the
specified heads.
"bundlename", if given, stores the bundle to this file path permanently;
otherwise it's stored to a temp file and gets deleted again when you call
the returned "cleanupfn".
"force" indicates whether to proceed on unrelated repos.
Returns a tuple (local, csets, cleanupfn):
"local" is a local repo from which to obtain the actual incoming
changesets; it is a bundlerepo for the obtained bundle when the
original "other" is remote.
"csets" lists the incoming changeset node ids.
"cleanupfn" must be called without arguments when you're done processing
the changes; it closes both the original "other" and the one returned
here.
'''
tmp = discovery.findcommonincoming(repo, other, heads=onlyheads,
force=force)
common, incoming, rheads = tmp
if not incoming:
try:
if bundlename:
os.unlink(bundlename)
except OSError:
pass
return repo, [], other.close
bundle = None
bundlerepo = None
localrepo = other.local()
if bundlename or not localrepo:
# create a bundle (uncompressed if other repo is not local)
if other.capable('getbundle'):
cg = other.getbundle('incoming', common=common, heads=rheads)
elif onlyheads is None and not other.capable('changegroupsubset'):
# compat with older servers when pulling all remote heads
cg = other.changegroup(incoming, "incoming")
rheads = None
else:
cg = other.changegroupsubset(incoming, rheads, 'incoming')
bundletype = localrepo and "HG10BZ" or "HG10UN"
fname = bundle = changegroup.writebundle(cg, bundlename, bundletype)
# keep written bundle?
if bundlename:
bundle = None
if not localrepo:
# use the created uncompressed bundlerepo
localrepo = bundlerepo = bundlerepository(repo.baseui, repo.root,
fname)
# this repo contains local and other now, so filter out local again
common = repo.heads()
if localrepo:
# Part of common may be remotely filtered
# So use an unfiltered version
# The discovery process probably need cleanup to avoid that
localrepo = localrepo.unfiltered()
csets = localrepo.changelog.findmissing(common, rheads)
def cleanup():
if bundlerepo:
bundlerepo.close()
if bundle:
os.unlink(bundle)
other.close()
return (localrepo, csets, cleanup)
| apache-2.0 |
pasiegel/SickGear | lib/unidecode/x0d4.py | 253 | 4758 | data = (
'poss', # 0x00
'pong', # 0x01
'poj', # 0x02
'poc', # 0x03
'pok', # 0x04
'pot', # 0x05
'pop', # 0x06
'poh', # 0x07
'pwa', # 0x08
'pwag', # 0x09
'pwagg', # 0x0a
'pwags', # 0x0b
'pwan', # 0x0c
'pwanj', # 0x0d
'pwanh', # 0x0e
'pwad', # 0x0f
'pwal', # 0x10
'pwalg', # 0x11
'pwalm', # 0x12
'pwalb', # 0x13
'pwals', # 0x14
'pwalt', # 0x15
'pwalp', # 0x16
'pwalh', # 0x17
'pwam', # 0x18
'pwab', # 0x19
'pwabs', # 0x1a
'pwas', # 0x1b
'pwass', # 0x1c
'pwang', # 0x1d
'pwaj', # 0x1e
'pwac', # 0x1f
'pwak', # 0x20
'pwat', # 0x21
'pwap', # 0x22
'pwah', # 0x23
'pwae', # 0x24
'pwaeg', # 0x25
'pwaegg', # 0x26
'pwaegs', # 0x27
'pwaen', # 0x28
'pwaenj', # 0x29
'pwaenh', # 0x2a
'pwaed', # 0x2b
'pwael', # 0x2c
'pwaelg', # 0x2d
'pwaelm', # 0x2e
'pwaelb', # 0x2f
'pwaels', # 0x30
'pwaelt', # 0x31
'pwaelp', # 0x32
'pwaelh', # 0x33
'pwaem', # 0x34
'pwaeb', # 0x35
'pwaebs', # 0x36
'pwaes', # 0x37
'pwaess', # 0x38
'pwaeng', # 0x39
'pwaej', # 0x3a
'pwaec', # 0x3b
'pwaek', # 0x3c
'pwaet', # 0x3d
'pwaep', # 0x3e
'pwaeh', # 0x3f
'poe', # 0x40
'poeg', # 0x41
'poegg', # 0x42
'poegs', # 0x43
'poen', # 0x44
'poenj', # 0x45
'poenh', # 0x46
'poed', # 0x47
'poel', # 0x48
'poelg', # 0x49
'poelm', # 0x4a
'poelb', # 0x4b
'poels', # 0x4c
'poelt', # 0x4d
'poelp', # 0x4e
'poelh', # 0x4f
'poem', # 0x50
'poeb', # 0x51
'poebs', # 0x52
'poes', # 0x53
'poess', # 0x54
'poeng', # 0x55
'poej', # 0x56
'poec', # 0x57
'poek', # 0x58
'poet', # 0x59
'poep', # 0x5a
'poeh', # 0x5b
'pyo', # 0x5c
'pyog', # 0x5d
'pyogg', # 0x5e
'pyogs', # 0x5f
'pyon', # 0x60
'pyonj', # 0x61
'pyonh', # 0x62
'pyod', # 0x63
'pyol', # 0x64
'pyolg', # 0x65
'pyolm', # 0x66
'pyolb', # 0x67
'pyols', # 0x68
'pyolt', # 0x69
'pyolp', # 0x6a
'pyolh', # 0x6b
'pyom', # 0x6c
'pyob', # 0x6d
'pyobs', # 0x6e
'pyos', # 0x6f
'pyoss', # 0x70
'pyong', # 0x71
'pyoj', # 0x72
'pyoc', # 0x73
'pyok', # 0x74
'pyot', # 0x75
'pyop', # 0x76
'pyoh', # 0x77
'pu', # 0x78
'pug', # 0x79
'pugg', # 0x7a
'pugs', # 0x7b
'pun', # 0x7c
'punj', # 0x7d
'punh', # 0x7e
'pud', # 0x7f
'pul', # 0x80
'pulg', # 0x81
'pulm', # 0x82
'pulb', # 0x83
'puls', # 0x84
'pult', # 0x85
'pulp', # 0x86
'pulh', # 0x87
'pum', # 0x88
'pub', # 0x89
'pubs', # 0x8a
'pus', # 0x8b
'puss', # 0x8c
'pung', # 0x8d
'puj', # 0x8e
'puc', # 0x8f
'puk', # 0x90
'put', # 0x91
'pup', # 0x92
'puh', # 0x93
'pweo', # 0x94
'pweog', # 0x95
'pweogg', # 0x96
'pweogs', # 0x97
'pweon', # 0x98
'pweonj', # 0x99
'pweonh', # 0x9a
'pweod', # 0x9b
'pweol', # 0x9c
'pweolg', # 0x9d
'pweolm', # 0x9e
'pweolb', # 0x9f
'pweols', # 0xa0
'pweolt', # 0xa1
'pweolp', # 0xa2
'pweolh', # 0xa3
'pweom', # 0xa4
'pweob', # 0xa5
'pweobs', # 0xa6
'pweos', # 0xa7
'pweoss', # 0xa8
'pweong', # 0xa9
'pweoj', # 0xaa
'pweoc', # 0xab
'pweok', # 0xac
'pweot', # 0xad
'pweop', # 0xae
'pweoh', # 0xaf
'pwe', # 0xb0
'pweg', # 0xb1
'pwegg', # 0xb2
'pwegs', # 0xb3
'pwen', # 0xb4
'pwenj', # 0xb5
'pwenh', # 0xb6
'pwed', # 0xb7
'pwel', # 0xb8
'pwelg', # 0xb9
'pwelm', # 0xba
'pwelb', # 0xbb
'pwels', # 0xbc
'pwelt', # 0xbd
'pwelp', # 0xbe
'pwelh', # 0xbf
'pwem', # 0xc0
'pweb', # 0xc1
'pwebs', # 0xc2
'pwes', # 0xc3
'pwess', # 0xc4
'pweng', # 0xc5
'pwej', # 0xc6
'pwec', # 0xc7
'pwek', # 0xc8
'pwet', # 0xc9
'pwep', # 0xca
'pweh', # 0xcb
'pwi', # 0xcc
'pwig', # 0xcd
'pwigg', # 0xce
'pwigs', # 0xcf
'pwin', # 0xd0
'pwinj', # 0xd1
'pwinh', # 0xd2
'pwid', # 0xd3
'pwil', # 0xd4
'pwilg', # 0xd5
'pwilm', # 0xd6
'pwilb', # 0xd7
'pwils', # 0xd8
'pwilt', # 0xd9
'pwilp', # 0xda
'pwilh', # 0xdb
'pwim', # 0xdc
'pwib', # 0xdd
'pwibs', # 0xde
'pwis', # 0xdf
'pwiss', # 0xe0
'pwing', # 0xe1
'pwij', # 0xe2
'pwic', # 0xe3
'pwik', # 0xe4
'pwit', # 0xe5
'pwip', # 0xe6
'pwih', # 0xe7
'pyu', # 0xe8
'pyug', # 0xe9
'pyugg', # 0xea
'pyugs', # 0xeb
'pyun', # 0xec
'pyunj', # 0xed
'pyunh', # 0xee
'pyud', # 0xef
'pyul', # 0xf0
'pyulg', # 0xf1
'pyulm', # 0xf2
'pyulb', # 0xf3
'pyuls', # 0xf4
'pyult', # 0xf5
'pyulp', # 0xf6
'pyulh', # 0xf7
'pyum', # 0xf8
'pyub', # 0xf9
'pyubs', # 0xfa
'pyus', # 0xfb
'pyuss', # 0xfc
'pyung', # 0xfd
'pyuj', # 0xfe
'pyuc', # 0xff
)
| gpl-3.0 |
syscoin/syscoin2 | test/functional/wallet_basic.py | 1 | 27238 | #!/usr/bin/env python3
# Copyright (c) 2014-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the wallet."""
from decimal import Decimal
import time
from test_framework.test_framework import SyscoinTestFramework
from test_framework.util import (
assert_array_result,
assert_equal,
assert_fee_amount,
assert_raises_rpc_error,
connect_nodes,
wait_until,
)
from test_framework.wallet_util import test_address
class WalletTest(SyscoinTestFramework):
def set_test_params(self):
self.num_nodes = 4
self.extra_args = [[
"-acceptnonstdtxn=1",
]] * self.num_nodes
self.setup_clean_chain = True
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self):
self.setup_nodes()
# Only need nodes 0-2 running at start of test
self.stop_node(3)
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 2)
connect_nodes(self.nodes[0], 2)
self.sync_all(self.nodes[0:3])
def check_fee_amount(self, curr_balance, balance_with_fee, fee_per_byte, tx_size):
"""Return curr_balance after asserting the fee was in range"""
fee = balance_with_fee - curr_balance
assert_fee_amount(fee, tx_size, fee_per_byte * 1000)
return curr_balance
def get_vsize(self, txn):
return self.nodes[0].decoderawtransaction(txn)['vsize']
def run_test(self):
# Check that there's no UTXO on none of the nodes
assert_equal(len(self.nodes[0].listunspent()), 0)
assert_equal(len(self.nodes[1].listunspent()), 0)
assert_equal(len(self.nodes[2].listunspent()), 0)
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
walletinfo = self.nodes[0].getwalletinfo()
assert_equal(walletinfo['immature_balance'], 50)
assert_equal(walletinfo['balance'], 0)
self.sync_all(self.nodes[0:3])
self.nodes[1].generate(101)
self.sync_all(self.nodes[0:3])
assert_equal(self.nodes[0].getbalance(), 50)
assert_equal(self.nodes[1].getbalance(), 50)
assert_equal(self.nodes[2].getbalance(), 0)
# Check that only first and second nodes have UTXOs
utxos = self.nodes[0].listunspent()
assert_equal(len(utxos), 1)
assert_equal(len(self.nodes[1].listunspent()), 1)
assert_equal(len(self.nodes[2].listunspent()), 0)
self.log.info("test gettxout")
confirmed_txid, confirmed_index = utxos[0]["txid"], utxos[0]["vout"]
# First, outputs that are unspent both in the chain and in the
# mempool should appear with or without include_mempool
txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=False)
assert_equal(txout['value'], 50)
txout = self.nodes[0].gettxout(txid=confirmed_txid, n=confirmed_index, include_mempool=True)
assert_equal(txout['value'], 50)
# Send 21 SYS from 0 to 2 using sendtoaddress call.
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11)
mempool_txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
self.log.info("test gettxout (second part)")
# utxo spent in mempool should be visible if you exclude mempool
# but invisible if you include mempool
txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, False)
assert_equal(txout['value'], 50)
txout = self.nodes[0].gettxout(confirmed_txid, confirmed_index, True)
assert txout is None
# new utxo from mempool should be invisible if you exclude mempool
# but visible if you include mempool
txout = self.nodes[0].gettxout(mempool_txid, 0, False)
assert txout is None
txout1 = self.nodes[0].gettxout(mempool_txid, 0, True)
txout2 = self.nodes[0].gettxout(mempool_txid, 1, True)
# note the mempool tx will have randomly assigned indices
# but 10 will go to node2 and the rest will go to node0
balance = self.nodes[0].getbalance()
assert_equal(set([txout1['value'], txout2['value']]), set([10, balance]))
walletinfo = self.nodes[0].getwalletinfo()
assert_equal(walletinfo['immature_balance'], 0)
# Have node0 mine a block, thus it will collect its own fee.
self.nodes[0].generate(1)
self.sync_all(self.nodes[0:3])
# Exercise locking of unspent outputs
unspent_0 = self.nodes[2].listunspent()[0]
unspent_0 = {"txid": unspent_0["txid"], "vout": unspent_0["vout"]}
assert_raises_rpc_error(-8, "Invalid parameter, expected locked output", self.nodes[2].lockunspent, True, [unspent_0])
self.nodes[2].lockunspent(False, [unspent_0])
assert_raises_rpc_error(-8, "Invalid parameter, output already locked", self.nodes[2].lockunspent, False, [unspent_0])
assert_raises_rpc_error(-4, "Insufficient funds", self.nodes[2].sendtoaddress, self.nodes[2].getnewaddress(), 20)
assert_equal([unspent_0], self.nodes[2].listlockunspent())
self.nodes[2].lockunspent(True, [unspent_0])
assert_equal(len(self.nodes[2].listlockunspent()), 0)
assert_raises_rpc_error(-8, "txid must be of length 64 (not 34, for '0000000000000000000000000000000000')",
self.nodes[2].lockunspent, False,
[{"txid": "0000000000000000000000000000000000", "vout": 0}])
assert_raises_rpc_error(-8, "txid must be hexadecimal string (not 'ZZZ0000000000000000000000000000000000000000000000000000000000000')",
self.nodes[2].lockunspent, False,
[{"txid": "ZZZ0000000000000000000000000000000000000000000000000000000000000", "vout": 0}])
assert_raises_rpc_error(-8, "Invalid parameter, unknown transaction",
self.nodes[2].lockunspent, False,
[{"txid": "0000000000000000000000000000000000000000000000000000000000000000", "vout": 0}])
assert_raises_rpc_error(-8, "Invalid parameter, vout index out of bounds",
self.nodes[2].lockunspent, False,
[{"txid": unspent_0["txid"], "vout": 999}])
# An output should be unlocked when spent
unspent_0 = self.nodes[1].listunspent()[0]
self.nodes[1].lockunspent(False, [unspent_0])
tx = self.nodes[1].createrawtransaction([unspent_0], { self.nodes[1].getnewaddress() : 1 })
tx = self.nodes[1].fundrawtransaction(tx)['hex']
tx = self.nodes[1].signrawtransactionwithwallet(tx)["hex"]
self.nodes[1].sendrawtransaction(tx)
assert_equal(len(self.nodes[1].listlockunspent()), 0)
# Have node1 generate 100 blocks (so node0 can recover the fee)
self.nodes[1].generate(100)
self.sync_all(self.nodes[0:3])
# node0 should end up with 100 sys in block rewards plus fees, but
# minus the 21 plus fees sent to node2
assert_equal(self.nodes[0].getbalance(), 100 - 21)
assert_equal(self.nodes[2].getbalance(), 21)
# Node0 should have two unspent outputs.
# Create a couple of transactions to send them to node2, submit them through
# node1, and make sure both node0 and node2 pick them up properly:
node0utxos = self.nodes[0].listunspent(1)
assert_equal(len(node0utxos), 2)
# create both transactions
txns_to_send = []
for utxo in node0utxos:
inputs = []
outputs = {}
inputs.append({"txid": utxo["txid"], "vout": utxo["vout"]})
outputs[self.nodes[2].getnewaddress()] = utxo["amount"] - 3
raw_tx = self.nodes[0].createrawtransaction(inputs, outputs)
txns_to_send.append(self.nodes[0].signrawtransactionwithwallet(raw_tx))
# Have node 1 (miner) send the transactions
self.nodes[1].sendrawtransaction(hexstring=txns_to_send[0]["hex"], maxfeerate=0)
self.nodes[1].sendrawtransaction(hexstring=txns_to_send[1]["hex"], maxfeerate=0)
# Have node1 mine a block to confirm transactions:
self.nodes[1].generate(1)
self.sync_all(self.nodes[0:3])
assert_equal(self.nodes[0].getbalance(), 0)
assert_equal(self.nodes[2].getbalance(), 94)
# Verify that a spent output cannot be locked anymore
spent_0 = {"txid": node0utxos[0]["txid"], "vout": node0utxos[0]["vout"]}
assert_raises_rpc_error(-8, "Invalid parameter, expected unspent output", self.nodes[0].lockunspent, False, [spent_0])
# Send 10 SYS normal
address = self.nodes[0].getnewaddress("test")
fee_per_byte = Decimal('0.001') / 1000
self.nodes[2].settxfee(fee_per_byte * 1000)
txid = self.nodes[2].sendtoaddress(address, 10, "", "", False)
self.nodes[2].generate(1)
self.sync_all(self.nodes[0:3])
node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), Decimal('84'), fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex']))
assert_equal(self.nodes[0].getbalance(), Decimal('10'))
# Send 10 SYS with subtract fee from amount
txid = self.nodes[2].sendtoaddress(address, 10, "", "", True)
self.nodes[2].generate(1)
self.sync_all(self.nodes[0:3])
node_2_bal -= Decimal('10')
assert_equal(self.nodes[2].getbalance(), node_2_bal)
node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), Decimal('20'), fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex']))
# Sendmany 10 SYS
txid = self.nodes[2].sendmany('', {address: 10}, 0, "", [])
self.nodes[2].generate(1)
self.sync_all(self.nodes[0:3])
node_0_bal += Decimal('10')
node_2_bal = self.check_fee_amount(self.nodes[2].getbalance(), node_2_bal - Decimal('10'), fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex']))
assert_equal(self.nodes[0].getbalance(), node_0_bal)
# Sendmany 10 SYS with subtract fee from amount
txid = self.nodes[2].sendmany('', {address: 10}, 0, "", [address])
self.nodes[2].generate(1)
self.sync_all(self.nodes[0:3])
node_2_bal -= Decimal('10')
assert_equal(self.nodes[2].getbalance(), node_2_bal)
node_0_bal = self.check_fee_amount(self.nodes[0].getbalance(), node_0_bal + Decimal('10'), fee_per_byte, self.get_vsize(self.nodes[2].gettransaction(txid)['hex']))
self.start_node(3)
connect_nodes(self.nodes[0], 3)
self.sync_all()
# check if we can list zero value tx as available coins
# 1. create raw_tx
# 2. hex-changed one output to 0.0
# 3. sign and send
# 4. check if recipient (node0) can list the zero value tx
usp = self.nodes[1].listunspent(query_options={'minimumAmount': '49.998'})[0]
inputs = [{"txid": usp['txid'], "vout": usp['vout']}]
outputs = {self.nodes[1].getnewaddress(): 49.998, self.nodes[0].getnewaddress(): 11.11}
raw_tx = self.nodes[1].createrawtransaction(inputs, outputs).replace("c0833842", "00000000") # replace 11.11 with 0.0 (int32)
signed_raw_tx = self.nodes[1].signrawtransactionwithwallet(raw_tx)
decoded_raw_tx = self.nodes[1].decoderawtransaction(signed_raw_tx['hex'])
zero_value_txid = decoded_raw_tx['txid']
self.nodes[1].sendrawtransaction(signed_raw_tx['hex'])
self.sync_all()
self.nodes[1].generate(1) # mine a block
self.sync_all()
unspent_txs = self.nodes[0].listunspent() # zero value tx must be in listunspents output
found = False
for uTx in unspent_txs:
if uTx['txid'] == zero_value_txid:
found = True
assert_equal(uTx['amount'], Decimal('0'))
assert found
# do some -walletbroadcast tests
self.stop_nodes()
self.start_node(0, ["-walletbroadcast=0"])
self.start_node(1, ["-walletbroadcast=0"])
self.start_node(2, ["-walletbroadcast=0"])
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 2)
connect_nodes(self.nodes[0], 2)
self.sync_all(self.nodes[0:3])
txid_not_broadcast = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2)
tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast)
self.nodes[1].generate(1) # mine a block, tx should not be in there
self.sync_all(self.nodes[0:3])
assert_equal(self.nodes[2].getbalance(), node_2_bal) # should not be changed because tx was not broadcasted
# now broadcast from another node, mine a block, sync, and check the balance
self.nodes[1].sendrawtransaction(tx_obj_not_broadcast['hex'])
self.nodes[1].generate(1)
self.sync_all(self.nodes[0:3])
node_2_bal += 2
tx_obj_not_broadcast = self.nodes[0].gettransaction(txid_not_broadcast)
assert_equal(self.nodes[2].getbalance(), node_2_bal)
# create another tx
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 2)
# restart the nodes with -walletbroadcast=1
self.stop_nodes()
self.start_node(0)
self.start_node(1)
self.start_node(2)
connect_nodes(self.nodes[0], 1)
connect_nodes(self.nodes[1], 2)
connect_nodes(self.nodes[0], 2)
self.sync_blocks(self.nodes[0:3])
self.nodes[0].generate(1)
self.sync_blocks(self.nodes[0:3])
node_2_bal += 2
# tx should be added to balance because after restarting the nodes tx should be broadcast
assert_equal(self.nodes[2].getbalance(), node_2_bal)
# send a tx with value in a string (PR#6380 +)
txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "2")
tx_obj = self.nodes[0].gettransaction(txid)
assert_equal(tx_obj['amount'], Decimal('-2'))
txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "0.0001")
tx_obj = self.nodes[0].gettransaction(txid)
assert_equal(tx_obj['amount'], Decimal('-0.0001'))
# check if JSON parser can handle scientific notation in strings
txid = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), "1e-4")
tx_obj = self.nodes[0].gettransaction(txid)
assert_equal(tx_obj['amount'], Decimal('-0.0001'))
# General checks for errors from incorrect inputs
# This will raise an exception because the amount type is wrong
assert_raises_rpc_error(-3, "Invalid amount", self.nodes[0].sendtoaddress, self.nodes[2].getnewaddress(), "1f-4")
# This will raise an exception since generate does not accept a string
assert_raises_rpc_error(-1, "not an integer", self.nodes[0].generate, "2")
# This will raise an exception for the invalid private key format
assert_raises_rpc_error(-5, "Invalid private key encoding", self.nodes[0].importprivkey, "invalid")
# This will raise an exception for importing an address with the PS2H flag
temp_address = self.nodes[1].getnewaddress("", "p2sh-segwit")
assert_raises_rpc_error(-5, "Cannot use the p2sh flag with an address - use a script instead", self.nodes[0].importaddress, temp_address, "label", False, True)
# This will raise an exception for attempting to dump the private key of an address you do not own
assert_raises_rpc_error(-3, "Address does not refer to a key", self.nodes[0].dumpprivkey, temp_address)
# This will raise an exception for attempting to get the private key of an invalid Syscoin address
assert_raises_rpc_error(-5, "Invalid Syscoin address", self.nodes[0].dumpprivkey, "invalid")
# This will raise an exception for attempting to set a label for an invalid Syscoin address
assert_raises_rpc_error(-5, "Invalid Syscoin address", self.nodes[0].setlabel, "invalid address", "label")
# This will raise an exception for importing an invalid address
assert_raises_rpc_error(-5, "Invalid Syscoin address or script", self.nodes[0].importaddress, "invalid")
# This will raise an exception for attempting to import a pubkey that isn't in hex
assert_raises_rpc_error(-5, "Pubkey must be a hex string", self.nodes[0].importpubkey, "not hex")
# This will raise an exception for importing an invalid pubkey
assert_raises_rpc_error(-5, "Pubkey is not a valid public key", self.nodes[0].importpubkey, "5361746f736869204e616b616d6f746f")
# Import address and private key to check correct behavior of spendable unspents
# 1. Send some coins to generate new UTXO
address_to_import = self.nodes[2].getnewaddress()
txid = self.nodes[0].sendtoaddress(address_to_import, 1)
self.nodes[0].generate(1)
self.sync_all(self.nodes[0:3])
# 2. Import address from node2 to node1
self.nodes[1].importaddress(address_to_import)
# 3. Validate that the imported address is watch-only on node1
assert self.nodes[1].getaddressinfo(address_to_import)["iswatchonly"]
# 4. Check that the unspents after import are not spendable
assert_array_result(self.nodes[1].listunspent(),
{"address": address_to_import},
{"spendable": False})
# 5. Import private key of the previously imported address on node1
priv_key = self.nodes[2].dumpprivkey(address_to_import)
self.nodes[1].importprivkey(priv_key)
# 6. Check that the unspents are now spendable on node1
assert_array_result(self.nodes[1].listunspent(),
{"address": address_to_import},
{"spendable": True})
# Mine a block from node0 to an address from node1
coinbase_addr = self.nodes[1].getnewaddress()
block_hash = self.nodes[0].generatetoaddress(1, coinbase_addr)[0]
coinbase_txid = self.nodes[0].getblock(block_hash)['tx'][0]
self.sync_all(self.nodes[0:3])
# Check that the txid and balance is found by node1
self.nodes[1].gettransaction(coinbase_txid)
# check if wallet or blockchain maintenance changes the balance
self.sync_all(self.nodes[0:3])
blocks = self.nodes[0].generate(2)
self.sync_all(self.nodes[0:3])
balance_nodes = [self.nodes[i].getbalance() for i in range(3)]
block_count = self.nodes[0].getblockcount()
# Check modes:
# - True: unicode escaped as \u....
# - False: unicode directly as UTF-8
for mode in [True, False]:
self.nodes[0].rpc.ensure_ascii = mode
# unicode check: Basic Multilingual Plane, Supplementary Plane respectively
for label in [u'рыба', u'𝅘𝅥𝅯']:
addr = self.nodes[0].getnewaddress()
self.nodes[0].setlabel(addr, label)
test_address(self.nodes[0], addr, labels=[label])
assert label in self.nodes[0].listlabels()
self.nodes[0].rpc.ensure_ascii = True # restore to default
# maintenance tests
maintenance = [
'-rescan',
'-reindex',
'-zapwallettxes=1',
'-zapwallettxes=2',
# disabled until issue is fixed: https://github.com/syscoin/syscoin/issues/7463
# '-salvagewallet',
]
chainlimit = 6
for m in maintenance:
self.log.info("check " + m)
self.stop_nodes()
# set lower ancestor limit for later
self.start_node(0, [m, "-limitancestorcount=" + str(chainlimit)])
self.start_node(1, [m, "-limitancestorcount=" + str(chainlimit)])
self.start_node(2, [m, "-limitancestorcount=" + str(chainlimit)])
if m == '-reindex':
# reindex will leave rpc warm up "early"; Wait for it to finish
wait_until(lambda: [block_count] * 3 == [self.nodes[i].getblockcount() for i in range(3)])
assert_equal(balance_nodes, [self.nodes[i].getbalance() for i in range(3)])
# Exercise listsinceblock with the last two blocks
coinbase_tx_1 = self.nodes[0].listsinceblock(blocks[0])
assert_equal(coinbase_tx_1["lastblock"], blocks[1])
assert_equal(len(coinbase_tx_1["transactions"]), 1)
assert_equal(coinbase_tx_1["transactions"][0]["blockhash"], blocks[1])
assert_equal(len(self.nodes[0].listsinceblock(blocks[1])["transactions"]), 0)
# ==Check that wallet prefers to use coins that don't exceed mempool limits =====
# Get all non-zero utxos together
chain_addrs = [self.nodes[0].getnewaddress(), self.nodes[0].getnewaddress()]
singletxid = self.nodes[0].sendtoaddress(chain_addrs[0], self.nodes[0].getbalance(), "", "", True)
self.nodes[0].generate(1)
node0_balance = self.nodes[0].getbalance()
# Split into two chains
rawtx = self.nodes[0].createrawtransaction([{"txid": singletxid, "vout": 0}], {chain_addrs[0]: node0_balance / 2 - Decimal('0.01'), chain_addrs[1]: node0_balance / 2 - Decimal('0.01')})
signedtx = self.nodes[0].signrawtransactionwithwallet(rawtx)
singletxid = self.nodes[0].sendrawtransaction(hexstring=signedtx["hex"], maxfeerate=0)
self.nodes[0].generate(1)
# Make a long chain of unconfirmed payments without hitting mempool limit
# Each tx we make leaves only one output of change on a chain 1 longer
# Since the amount to send is always much less than the outputs, we only ever need one output
# So we should be able to generate exactly chainlimit txs for each original output
sending_addr = self.nodes[1].getnewaddress()
txid_list = []
for i in range(chainlimit * 2):
txid_list.append(self.nodes[0].sendtoaddress(sending_addr, Decimal('0.0001')))
assert_equal(self.nodes[0].getmempoolinfo()['size'], chainlimit * 2)
assert_equal(len(txid_list), chainlimit * 2)
# Without walletrejectlongchains, we will still generate a txid
# The tx will be stored in the wallet but not accepted to the mempool
extra_txid = self.nodes[0].sendtoaddress(sending_addr, Decimal('0.0001'))
assert extra_txid not in self.nodes[0].getrawmempool()
assert extra_txid in [tx["txid"] for tx in self.nodes[0].listtransactions()]
self.nodes[0].abandontransaction(extra_txid)
total_txs = len(self.nodes[0].listtransactions("*", 99999))
# Try with walletrejectlongchains
# Double chain limit but require combining inputs, so we pass SelectCoinsMinConf
self.stop_node(0)
self.start_node(0, extra_args=["-walletrejectlongchains", "-limitancestorcount=" + str(2 * chainlimit)])
# wait for loadmempool
timeout = 10
while (timeout > 0 and len(self.nodes[0].getrawmempool()) < chainlimit * 2):
time.sleep(0.5)
timeout -= 0.5
assert_equal(len(self.nodes[0].getrawmempool()), chainlimit * 2)
node0_balance = self.nodes[0].getbalance()
# With walletrejectlongchains we will not create the tx and store it in our wallet.
assert_raises_rpc_error(-4, "Transaction has too long of a mempool chain", self.nodes[0].sendtoaddress, sending_addr, node0_balance - Decimal('0.01'))
# Verify nothing new in wallet
assert_equal(total_txs, len(self.nodes[0].listtransactions("*", 99999)))
# Test getaddressinfo on external address. Note that these addresses are taken from disablewallet.py
assert_raises_rpc_error(-5, "Invalid address", self.nodes[0].getaddressinfo, "3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy")
address_info = self.nodes[0].getaddressinfo("mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ")
assert_equal(address_info['address'], "mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ")
assert_equal(address_info["scriptPubKey"], "76a9144e3854046c7bd1594ac904e4793b6a45b36dea0988ac")
assert not address_info["ismine"]
assert not address_info["iswatchonly"]
assert not address_info["isscript"]
assert not address_info["ischange"]
# Test getaddressinfo 'ischange' field on change address.
self.nodes[0].generate(1)
destination = self.nodes[1].getnewaddress()
txid = self.nodes[0].sendtoaddress(destination, 0.123)
tx = self.nodes[0].decoderawtransaction(self.nodes[0].gettransaction(txid)['hex'])
output_addresses = [vout['scriptPubKey']['addresses'][0] for vout in tx["vout"]]
assert len(output_addresses) > 1
for address in output_addresses:
ischange = self.nodes[0].getaddressinfo(address)['ischange']
assert_equal(ischange, address != destination)
if ischange:
change = address
self.nodes[0].setlabel(change, 'foobar')
assert_equal(self.nodes[0].getaddressinfo(change)['ischange'], False)
# Test gettransaction response with different arguments.
self.log.info("Testing gettransaction response with different arguments...")
self.nodes[0].setlabel(change, 'baz')
baz = self.nodes[0].listtransactions(label="baz", count=1)[0]
expected_receive_vout = {"label": "baz",
"address": baz["address"],
"amount": baz["amount"],
"category": baz["category"],
"vout": baz["vout"]}
expected_fields = frozenset({'amount', 'bip125-replaceable', 'confirmations', 'details', 'fee',
'hex', 'time', 'timereceived', 'trusted', 'txid', 'walletconflicts'})
verbose_field = "decoded"
expected_verbose_fields = expected_fields | {verbose_field}
self.log.debug("Testing gettransaction response without verbose")
tx = self.nodes[0].gettransaction(txid=txid)
assert_equal(set([*tx]), expected_fields)
assert_array_result(tx["details"], {"category": "receive"}, expected_receive_vout)
self.log.debug("Testing gettransaction response with verbose set to False")
tx = self.nodes[0].gettransaction(txid=txid, verbose=False)
assert_equal(set([*tx]), expected_fields)
assert_array_result(tx["details"], {"category": "receive"}, expected_receive_vout)
self.log.debug("Testing gettransaction response with verbose set to True")
tx = self.nodes[0].gettransaction(txid=txid, verbose=True)
assert_equal(set([*tx]), expected_verbose_fields)
assert_array_result(tx["details"], {"category": "receive"}, expected_receive_vout)
assert_equal(tx[verbose_field], self.nodes[0].decoderawtransaction(tx["hex"]))
if __name__ == '__main__':
WalletTest().main()
| mit |
kyvinh/home-assistant | homeassistant/components/image_processing/openalpr_local.py | 3 | 5919 | """
Component that will help set the openalpr local for alpr processing.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/image_processing.openalpr_local/
"""
import asyncio
import logging
import io
import re
import voluptuous as vol
from homeassistant.core import split_entity_id, callback
from homeassistant.const import STATE_UNKNOWN
import homeassistant.helpers.config_validation as cv
from homeassistant.components.image_processing import (
PLATFORM_SCHEMA, ImageProcessingEntity, CONF_CONFIDENCE, CONF_SOURCE,
CONF_ENTITY_ID, CONF_NAME, ATTR_ENTITY_ID, ATTR_CONFIDENCE)
from homeassistant.util.async import run_callback_threadsafe
_LOGGER = logging.getLogger(__name__)
RE_ALPR_PLATE = re.compile(r"^plate\d*:")
RE_ALPR_RESULT = re.compile(r"- (\w*)\s*confidence: (\d*.\d*)")
EVENT_FOUND_PLATE = 'image_processing.found_plate'
ATTR_PLATE = 'plate'
ATTR_PLATES = 'plates'
ATTR_VEHICLES = 'vehicles'
OPENALPR_REGIONS = [
'au',
'auwide',
'br',
'eu',
'fr',
'gb',
'kr',
'kr2',
'mx',
'sg',
'us',
'vn2'
]
CONF_REGION = 'region'
CONF_ALPR_BIN = 'alp_bin'
DEFAULT_BINARY = 'alpr'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_REGION):
vol.All(vol.Lower, vol.In(OPENALPR_REGIONS)),
vol.Optional(CONF_ALPR_BIN, default=DEFAULT_BINARY): cv.string,
})
@asyncio.coroutine
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
"""Set up the openalpr local platform."""
command = [config[CONF_ALPR_BIN], '-c', config[CONF_REGION], '-']
confidence = config[CONF_CONFIDENCE]
entities = []
for camera in config[CONF_SOURCE]:
entities.append(OpenAlprLocalEntity(
camera[CONF_ENTITY_ID], command, confidence, camera.get(CONF_NAME)
))
yield from async_add_devices(entities)
class ImageProcessingAlprEntity(ImageProcessingEntity):
"""Base entity class for alpr image processing."""
def __init__(self):
"""Initialize base alpr entity."""
self.plates = {} # last scan data
self.vehicles = 0 # vehicles count
@property
def state(self):
"""Return the state of the entity."""
confidence = 0
plate = STATE_UNKNOWN
# search high plate
for i_pl, i_co in self.plates.items():
if i_co > confidence:
confidence = i_co
plate = i_pl
return plate
@property
def state_attributes(self):
"""Return device specific state attributes."""
attr = {
ATTR_PLATES: self.plates,
ATTR_VEHICLES: self.vehicles
}
return attr
def process_plates(self, plates, vehicles):
"""Send event with new plates and store data."""
run_callback_threadsafe(
self.hass.loop, self.async_process_plates, plates, vehicles
).result()
@callback
def async_process_plates(self, plates, vehicles):
"""Send event with new plates and store data.
plates are a dict in follow format:
{ 'plate': confidence }
This method must be run in the event loop.
"""
plates = {plate: confidence for plate, confidence in plates.items()
if confidence >= self.confidence}
new_plates = set(plates) - set(self.plates)
# send events
for i_plate in new_plates:
self.hass.async_add_job(
self.hass.bus.async_fire, EVENT_FOUND_PLATE, {
ATTR_PLATE: i_plate,
ATTR_ENTITY_ID: self.entity_id,
ATTR_CONFIDENCE: plates.get(i_plate),
}
)
# update entity store
self.plates = plates
self.vehicles = vehicles
class OpenAlprLocalEntity(ImageProcessingAlprEntity):
"""OpenAlpr local api entity."""
def __init__(self, camera_entity, command, confidence, name=None):
"""Initialize openalpr local api."""
super().__init__()
self._cmd = command
self._camera = camera_entity
self._confidence = confidence
if name:
self._name = name
else:
self._name = "OpenAlpr {0}".format(
split_entity_id(camera_entity)[1])
@property
def confidence(self):
"""Return minimum confidence for send events."""
return self._confidence
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return self._camera
@property
def name(self):
"""Return the name of the entity."""
return self._name
@asyncio.coroutine
def async_process_image(self, image):
"""Process image.
This method is a coroutine.
"""
result = {}
vehicles = 0
alpr = yield from asyncio.create_subprocess_exec(
*self._cmd,
loop=self.hass.loop,
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.DEVNULL
)
# send image
stdout, _ = yield from alpr.communicate(input=image)
stdout = io.StringIO(str(stdout, 'utf-8'))
while True:
line = stdout.readline()
if not line:
break
new_plates = RE_ALPR_PLATE.search(line)
new_result = RE_ALPR_RESULT.search(line)
# found new vehicle
if new_plates:
vehicles += 1
continue
# found plate result
if new_result:
try:
result.update(
{new_result.group(1): float(new_result.group(2))})
except ValueError:
continue
self.async_process_plates(result, vehicles)
| apache-2.0 |
todaychi/hue | desktop/core/ext-py/navoptapi-0.1.0/versioneer.py | 386 | 68611 |
# Version: 0.18
"""The Versioneer - like a rocketeer, but for versions.
The Versioneer
==============
* like a rocketeer, but for versions!
* https://github.com/warner/python-versioneer
* Brian Warner
* License: Public Domain
* Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy
* [![Latest Version]
(https://pypip.in/version/versioneer/badge.svg?style=flat)
](https://pypi.python.org/pypi/versioneer/)
* [![Build Status]
(https://travis-ci.org/warner/python-versioneer.png?branch=master)
](https://travis-ci.org/warner/python-versioneer)
This is a tool for managing a recorded version number in distutils-based
python projects. The goal is to remove the tedious and error-prone "update
the embedded version string" step from your release process. Making a new
release should be as easy as recording a new tag in your version-control
system, and maybe making new tarballs.
## Quick Install
* `pip install versioneer` to somewhere to your $PATH
* add a `[versioneer]` section to your setup.cfg (see below)
* run `versioneer install` in your source tree, commit the results
## Version Identifiers
Source trees come from a variety of places:
* a version-control system checkout (mostly used by developers)
* a nightly tarball, produced by build automation
* a snapshot tarball, produced by a web-based VCS browser, like github's
"tarball from tag" feature
* a release tarball, produced by "setup.py sdist", distributed through PyPI
Within each source tree, the version identifier (either a string or a number,
this tool is format-agnostic) can come from a variety of places:
* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows
about recent "tags" and an absolute revision-id
* the name of the directory into which the tarball was unpacked
* an expanded VCS keyword ($Id$, etc)
* a `_version.py` created by some earlier build step
For released software, the version identifier is closely related to a VCS
tag. Some projects use tag names that include more than just the version
string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool
needs to strip the tag prefix to extract the version identifier. For
unreleased software (between tags), the version identifier should provide
enough information to help developers recreate the same tree, while also
giving them an idea of roughly how old the tree is (after version 1.2, before
version 1.3). Many VCS systems can report a description that captures this,
for example `git describe --tags --dirty --always` reports things like
"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the
0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has
uncommitted changes.
The version identifier is used for multiple purposes:
* to allow the module to self-identify its version: `myproject.__version__`
* to choose a name and prefix for a 'setup.py sdist' tarball
## Theory of Operation
Versioneer works by adding a special `_version.py` file into your source
tree, where your `__init__.py` can import it. This `_version.py` knows how to
dynamically ask the VCS tool for version information at import time.
`_version.py` also contains `$Revision$` markers, and the installation
process marks `_version.py` to have this marker rewritten with a tag name
during the `git archive` command. As a result, generated tarballs will
contain enough information to get the proper version.
To allow `setup.py` to compute a version too, a `versioneer.py` is added to
the top level of your source tree, next to `setup.py` and the `setup.cfg`
that configures it. This overrides several distutils/setuptools commands to
compute the version when invoked, and changes `setup.py build` and `setup.py
sdist` to replace `_version.py` with a small static file that contains just
the generated version data.
## Installation
See [INSTALL.md](./INSTALL.md) for detailed installation instructions.
## Version-String Flavors
Code which uses Versioneer can learn about its version string at runtime by
importing `_version` from your main `__init__.py` file and running the
`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can
import the top-level `versioneer.py` and run `get_versions()`.
Both functions return a dictionary with different flavors of version
information:
* `['version']`: A condensed version string, rendered using the selected
style. This is the most commonly used value for the project's version
string. The default "pep440" style yields strings like `0.11`,
`0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section
below for alternative styles.
* `['full-revisionid']`: detailed revision identifier. For Git, this is the
full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac".
* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the
commit date in ISO 8601 format. This will be None if the date is not
available.
* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that
this is only accurate if run in a VCS checkout, otherwise it is likely to
be False or None
* `['error']`: if the version string could not be computed, this will be set
to a string describing the problem, otherwise it will be None. It may be
useful to throw an exception in setup.py if this is set, to avoid e.g.
creating tarballs with a version string of "unknown".
Some variants are more useful than others. Including `full-revisionid` in a
bug report should allow developers to reconstruct the exact code being tested
(or indicate the presence of local changes that should be shared with the
developers). `version` is suitable for display in an "about" box or a CLI
`--version` output: it can be easily compared against release notes and lists
of bugs fixed in various releases.
The installer adds the following text to your `__init__.py` to place a basic
version in `YOURPROJECT.__version__`:
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
## Styles
The setup.cfg `style=` configuration controls how the VCS information is
rendered into a version string.
The default style, "pep440", produces a PEP440-compliant string, equal to the
un-prefixed tag name for actual releases, and containing an additional "local
version" section with more detail for in-between builds. For Git, this is
TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags
--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the
tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and
that this commit is two revisions ("+2") beyond the "0.11" tag. For released
software (exactly equal to a known tag), the identifier will only contain the
stripped tag, e.g. "0.11".
Other styles are available. See [details.md](details.md) in the Versioneer
source tree for descriptions.
## Debugging
Versioneer tries to avoid fatal errors: if something goes wrong, it will tend
to return a version of "0+unknown". To investigate the problem, run `setup.py
version`, which will run the version-lookup code in a verbose mode, and will
display the full contents of `get_versions()` (including the `error` string,
which may help identify what went wrong).
## Known Limitations
Some situations are known to cause problems for Versioneer. This details the
most significant ones. More can be found on Github
[issues page](https://github.com/warner/python-versioneer/issues).
### Subprojects
Versioneer has limited support for source trees in which `setup.py` is not in
the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are
two common reasons why `setup.py` might not be in the root:
* Source trees which contain multiple subprojects, such as
[Buildbot](https://github.com/buildbot/buildbot), which contains both
"master" and "slave" subprojects, each with their own `setup.py`,
`setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI
distributions (and upload multiple independently-installable tarballs).
* Source trees whose main purpose is to contain a C library, but which also
provide bindings to Python (and perhaps other langauges) in subdirectories.
Versioneer will look for `.git` in parent directories, and most operations
should get the right version string. However `pip` and `setuptools` have bugs
and implementation details which frequently cause `pip install .` from a
subproject directory to fail to find a correct version string (so it usually
defaults to `0+unknown`).
`pip install --editable .` should work correctly. `setup.py install` might
work too.
Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in
some later version.
[Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking
this issue. The discussion in
[PR #61](https://github.com/warner/python-versioneer/pull/61) describes the
issue from the Versioneer side in more detail.
[pip PR#3176](https://github.com/pypa/pip/pull/3176) and
[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve
pip to let Versioneer work correctly.
Versioneer-0.16 and earlier only looked for a `.git` directory next to the
`setup.cfg`, so subprojects were completely unsupported with those releases.
### Editable installs with setuptools <= 18.5
`setup.py develop` and `pip install --editable .` allow you to install a
project into a virtualenv once, then continue editing the source code (and
test) without re-installing after every change.
"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a
convenient way to specify executable scripts that should be installed along
with the python package.
These both work as expected when using modern setuptools. When using
setuptools-18.5 or earlier, however, certain operations will cause
`pkg_resources.DistributionNotFound` errors when running the entrypoint
script, which must be resolved by re-installing the package. This happens
when the install happens with one version, then the egg_info data is
regenerated while a different version is checked out. Many setup.py commands
cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into
a different virtualenv), so this can be surprising.
[Bug #83](https://github.com/warner/python-versioneer/issues/83) describes
this one, but upgrading to a newer version of setuptools should probably
resolve it.
### Unicode version strings
While Versioneer works (and is continually tested) with both Python 2 and
Python 3, it is not entirely consistent with bytes-vs-unicode distinctions.
Newer releases probably generate unicode version strings on py2. It's not
clear that this is wrong, but it may be surprising for applications when then
write these strings to a network connection or include them in bytes-oriented
APIs like cryptographic checksums.
[Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates
this question.
## Updating Versioneer
To upgrade your project to a new release of Versioneer, do the following:
* install the new Versioneer (`pip install -U versioneer` or equivalent)
* edit `setup.cfg`, if necessary, to include any new configuration settings
indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details.
* re-run `versioneer install` in your source tree, to replace
`SRC/_version.py`
* commit any changed files
## Future Directions
This tool is designed to make it easily extended to other version-control
systems: all VCS-specific components are in separate directories like
src/git/ . The top-level `versioneer.py` script is assembled from these
components by running make-versioneer.py . In the future, make-versioneer.py
will take a VCS name as an argument, and will construct a version of
`versioneer.py` that is specific to the given VCS. It might also take the
configuration arguments that are currently provided manually during
installation by editing setup.py . Alternatively, it might go the other
direction and include code from all supported VCS systems, reducing the
number of intermediate scripts.
## License
To make Versioneer easier to embed, all its code is dedicated to the public
domain. The `_version.py` that it creates is also in the public domain.
Specifically, both are released under the Creative Commons "Public Domain
Dedication" license (CC0-1.0), as described in
https://creativecommons.org/publicdomain/zero/1.0/ .
"""
from __future__ import print_function
try:
import configparser
except ImportError:
import ConfigParser as configparser
import errno
import json
import os
import re
import subprocess
import sys
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_root():
"""Get the project root directory.
We require that all commands are run from the project root, i.e. the
directory that contains setup.py, setup.cfg, and versioneer.py .
"""
root = os.path.realpath(os.path.abspath(os.getcwd()))
setup_py = os.path.join(root, "setup.py")
versioneer_py = os.path.join(root, "versioneer.py")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
# allow 'python path/to/setup.py COMMAND'
root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))
setup_py = os.path.join(root, "setup.py")
versioneer_py = os.path.join(root, "versioneer.py")
if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):
err = ("Versioneer was unable to run the project root directory. "
"Versioneer requires setup.py to be executed from "
"its immediate directory (like 'python setup.py COMMAND'), "
"or in a way that lets it use sys.argv[0] to find the root "
"(like 'python path/to/setup.py COMMAND').")
raise VersioneerBadRootError(err)
try:
# Certain runtime workflows (setup.py install/develop in a setuptools
# tree) execute all dependencies in a single python process, so
# "versioneer" may be imported multiple times, and python's shared
# module-import table will cache the first one. So we can't use
# os.path.dirname(__file__), as that will find whichever
# versioneer.py was first imported, even in later projects.
me = os.path.realpath(os.path.abspath(__file__))
me_dir = os.path.normcase(os.path.splitext(me)[0])
vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0])
if me_dir != vsr_dir:
print("Warning: build in %s is using versioneer.py from %s"
% (os.path.dirname(me), versioneer_py))
except NameError:
pass
return root
def get_config_from_root(root):
"""Read the project setup.cfg file to determine Versioneer config."""
# This might raise EnvironmentError (if setup.cfg is missing), or
# configparser.NoSectionError (if it lacks a [versioneer] section), or
# configparser.NoOptionError (if it lacks "VCS="). See the docstring at
# the top of versioneer.py for instructions on writing your setup.cfg .
setup_cfg = os.path.join(root, "setup.cfg")
parser = configparser.SafeConfigParser()
with open(setup_cfg, "r") as f:
parser.readfp(f)
VCS = parser.get("versioneer", "VCS") # mandatory
def get(parser, name):
if parser.has_option("versioneer", name):
return parser.get("versioneer", name)
return None
cfg = VersioneerConfig()
cfg.VCS = VCS
cfg.style = get(parser, "style") or ""
cfg.versionfile_source = get(parser, "versionfile_source")
cfg.versionfile_build = get(parser, "versionfile_build")
cfg.tag_prefix = get(parser, "tag_prefix")
if cfg.tag_prefix in ("''", '""'):
cfg.tag_prefix = ""
cfg.parentdir_prefix = get(parser, "parentdir_prefix")
cfg.verbose = get(parser, "verbose")
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
# these dictionaries contain VCS-specific tools
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
LONG_VERSION_PY['git'] = '''
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "%(STYLE)s"
cfg.tag_prefix = "%(TAG_PREFIX)s"
cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s"
cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %%s" %% dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %%s" %% (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %%s (error)" %% dispcmd)
print("stdout was %%s" %% stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %%s but none started with prefix %%s" %%
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %%d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%%s', no digits" %% ",".join(refs - tags))
if verbose:
print("likely tags: %%s" %% ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %%s" %% r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %%s not under git control" %% root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%%s*" %% tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%%s'"
%% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%%s' doesn't start with prefix '%%s'"
print(fmt %% (full_tag, tag_prefix))
pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'"
%% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%%d" %% pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%%d" %% pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%%s" %% pieces["short"]
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%%s" %% pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%%d" %% pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%%s'" %% style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
'''
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def do_vcs_install(manifest_in, versionfile_source, ipy):
"""Git-specific installation logic for Versioneer.
For Git, this means creating/changing .gitattributes to mark _version.py
for export-subst keyword substitution.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
files = [manifest_in, versionfile_source]
if ipy:
files.append(ipy)
try:
me = __file__
if me.endswith(".pyc") or me.endswith(".pyo"):
me = os.path.splitext(me)[0] + ".py"
versioneer_file = os.path.relpath(me)
except NameError:
versioneer_file = "versioneer.py"
files.append(versioneer_file)
present = False
try:
f = open(".gitattributes", "r")
for line in f.readlines():
if line.strip().startswith(versionfile_source):
if "export-subst" in line.strip().split()[1:]:
present = True
f.close()
except EnvironmentError:
pass
if not present:
f = open(".gitattributes", "a+")
f.write("%s export-subst\n" % versionfile_source)
f.close()
files.append(".gitattributes")
run_command(GITS, ["add", "--"] + files)
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
SHORT_VERSION_PY = """
# This file was generated by 'versioneer.py' (0.18) from
# revision-control system data, or from the parent directory name of an
# unpacked source archive. Distribution tarballs contain a pre-generated copy
# of this file.
import json
version_json = '''
%s
''' # END VERSION_JSON
def get_versions():
return json.loads(version_json)
"""
def versions_from_file(filename):
"""Try to determine the version from _version.py if present."""
try:
with open(filename) as f:
contents = f.read()
except EnvironmentError:
raise NotThisMethod("unable to read _version.py")
mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON",
contents, re.M | re.S)
if not mo:
mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON",
contents, re.M | re.S)
if not mo:
raise NotThisMethod("no version_json in _version.py")
return json.loads(mo.group(1))
def write_to_version_file(filename, versions):
"""Write the given version number to the given _version.py file."""
os.unlink(filename)
contents = json.dumps(versions, sort_keys=True,
indent=1, separators=(",", ": "))
with open(filename, "w") as f:
f.write(SHORT_VERSION_PY % contents)
print("set %s to '%s'" % (filename, versions["version"]))
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
class VersioneerBadRootError(Exception):
"""The project root directory is unknown or missing key files."""
def get_versions(verbose=False):
"""Get the project version from whatever source is available.
Returns dict with two keys: 'version' and 'full'.
"""
if "versioneer" in sys.modules:
# see the discussion in cmdclass.py:get_cmdclass()
del sys.modules["versioneer"]
root = get_root()
cfg = get_config_from_root(root)
assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg"
handlers = HANDLERS.get(cfg.VCS)
assert handlers, "unrecognized VCS '%s'" % cfg.VCS
verbose = verbose or cfg.verbose
assert cfg.versionfile_source is not None, \
"please set versioneer.versionfile_source"
assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix"
versionfile_abs = os.path.join(root, cfg.versionfile_source)
# extract version from first of: _version.py, VCS command (e.g. 'git
# describe'), parentdir. This is meant to work for developers using a
# source checkout, for users of a tarball created by 'setup.py sdist',
# and for users of a tarball/zipball created by 'git archive' or github's
# download-from-tag feature or the equivalent in other VCSes.
get_keywords_f = handlers.get("get_keywords")
from_keywords_f = handlers.get("keywords")
if get_keywords_f and from_keywords_f:
try:
keywords = get_keywords_f(versionfile_abs)
ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)
if verbose:
print("got version from expanded keyword %s" % ver)
return ver
except NotThisMethod:
pass
try:
ver = versions_from_file(versionfile_abs)
if verbose:
print("got version from file %s %s" % (versionfile_abs, ver))
return ver
except NotThisMethod:
pass
from_vcs_f = handlers.get("pieces_from_vcs")
if from_vcs_f:
try:
pieces = from_vcs_f(cfg.tag_prefix, root, verbose)
ver = render(pieces, cfg.style)
if verbose:
print("got version from VCS %s" % ver)
return ver
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
if verbose:
print("got version from parentdir %s" % ver)
return ver
except NotThisMethod:
pass
if verbose:
print("unable to compute version")
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None, "error": "unable to compute version",
"date": None}
def get_version():
"""Get the short version string for this project."""
return get_versions()["version"]
def get_cmdclass():
"""Get the custom setuptools/distutils subclasses used by Versioneer."""
if "versioneer" in sys.modules:
del sys.modules["versioneer"]
# this fixes the "python setup.py develop" case (also 'install' and
# 'easy_install .'), in which subdependencies of the main project are
# built (using setup.py bdist_egg) in the same python process. Assume
# a main project A and a dependency B, which use different versions
# of Versioneer. A's setup.py imports A's Versioneer, leaving it in
# sys.modules by the time B's setup.py is executed, causing B to run
# with the wrong versioneer. Setuptools wraps the sub-dep builds in a
# sandbox that restores sys.modules to it's pre-build state, so the
# parent is protected against the child's "import versioneer". By
# removing ourselves from sys.modules here, before the child build
# happens, we protect the child from the parent's versioneer too.
# Also see https://github.com/warner/python-versioneer/issues/52
cmds = {}
# we add "version" to both distutils and setuptools
from distutils.core import Command
class cmd_version(Command):
description = "report generated version string"
user_options = []
boolean_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
vers = get_versions(verbose=True)
print("Version: %s" % vers["version"])
print(" full-revisionid: %s" % vers.get("full-revisionid"))
print(" dirty: %s" % vers.get("dirty"))
print(" date: %s" % vers.get("date"))
if vers["error"]:
print(" error: %s" % vers["error"])
cmds["version"] = cmd_version
# we override "build_py" in both distutils and setuptools
#
# most invocation pathways end up running build_py:
# distutils/build -> build_py
# distutils/install -> distutils/build ->..
# setuptools/bdist_wheel -> distutils/install ->..
# setuptools/bdist_egg -> distutils/install_lib -> build_py
# setuptools/install -> bdist_egg ->..
# setuptools/develop -> ?
# pip install:
# copies source tree to a tempdir before running egg_info/etc
# if .git isn't copied too, 'git describe' will fail
# then does setup.py bdist_wheel, or sometimes setup.py install
# setup.py egg_info -> ?
# we override different "build_py" commands for both environments
if "setuptools" in sys.modules:
from setuptools.command.build_py import build_py as _build_py
else:
from distutils.command.build_py import build_py as _build_py
class cmd_build_py(_build_py):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
_build_py.run(self)
# now locate _version.py in the new build/ directory and replace
# it with an updated value
if cfg.versionfile_build:
target_versionfile = os.path.join(self.build_lib,
cfg.versionfile_build)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
cmds["build_py"] = cmd_build_py
if "cx_Freeze" in sys.modules: # cx_freeze enabled?
from cx_Freeze.dist import build_exe as _build_exe
# nczeczulin reports that py2exe won't like the pep440-style string
# as FILEVERSION, but it can be used for PRODUCTVERSION, e.g.
# setup(console=[{
# "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION
# "product_version": versioneer.get_version(),
# ...
class cmd_build_exe(_build_exe):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
target_versionfile = cfg.versionfile_source
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_build_exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG %
{"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
cmds["build_exe"] = cmd_build_exe
del cmds["build_py"]
if 'py2exe' in sys.modules: # py2exe enabled?
try:
from py2exe.distutils_buildexe import py2exe as _py2exe # py3
except ImportError:
from py2exe.build_exe import py2exe as _py2exe # py2
class cmd_py2exe(_py2exe):
def run(self):
root = get_root()
cfg = get_config_from_root(root)
versions = get_versions()
target_versionfile = cfg.versionfile_source
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile, versions)
_py2exe.run(self)
os.unlink(target_versionfile)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG %
{"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
cmds["py2exe"] = cmd_py2exe
# we override different "sdist" commands for both environments
if "setuptools" in sys.modules:
from setuptools.command.sdist import sdist as _sdist
else:
from distutils.command.sdist import sdist as _sdist
class cmd_sdist(_sdist):
def run(self):
versions = get_versions()
self._versioneer_generated_versions = versions
# unless we update this, the command will keep using the old
# version
self.distribution.metadata.version = versions["version"]
return _sdist.run(self)
def make_release_tree(self, base_dir, files):
root = get_root()
cfg = get_config_from_root(root)
_sdist.make_release_tree(self, base_dir, files)
# now locate _version.py in the new base_dir directory
# (remembering that it may be a hardlink) and replace it with an
# updated value
target_versionfile = os.path.join(base_dir, cfg.versionfile_source)
print("UPDATING %s" % target_versionfile)
write_to_version_file(target_versionfile,
self._versioneer_generated_versions)
cmds["sdist"] = cmd_sdist
return cmds
CONFIG_ERROR = """
setup.cfg is missing the necessary Versioneer configuration. You need
a section like:
[versioneer]
VCS = git
style = pep440
versionfile_source = src/myproject/_version.py
versionfile_build = myproject/_version.py
tag_prefix =
parentdir_prefix = myproject-
You will also need to edit your setup.py to use the results:
import versioneer
setup(version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(), ...)
Please read the docstring in ./versioneer.py for configuration instructions,
edit setup.cfg, and re-run the installer or 'python versioneer.py setup'.
"""
SAMPLE_CONFIG = """
# See the docstring in versioneer.py for instructions. Note that you must
# re-run 'versioneer.py setup' after changing this section, and commit the
# resulting files.
[versioneer]
#VCS = git
#style = pep440
#versionfile_source =
#versionfile_build =
#tag_prefix =
#parentdir_prefix =
"""
INIT_PY_SNIPPET = """
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
"""
def do_setup():
"""Main VCS-independent setup function for installing Versioneer."""
root = get_root()
try:
cfg = get_config_from_root(root)
except (EnvironmentError, configparser.NoSectionError,
configparser.NoOptionError) as e:
if isinstance(e, (EnvironmentError, configparser.NoSectionError)):
print("Adding sample versioneer config to setup.cfg",
file=sys.stderr)
with open(os.path.join(root, "setup.cfg"), "a") as f:
f.write(SAMPLE_CONFIG)
print(CONFIG_ERROR, file=sys.stderr)
return 1
print(" creating %s" % cfg.versionfile_source)
with open(cfg.versionfile_source, "w") as f:
LONG = LONG_VERSION_PY[cfg.VCS]
f.write(LONG % {"DOLLAR": "$",
"STYLE": cfg.style,
"TAG_PREFIX": cfg.tag_prefix,
"PARENTDIR_PREFIX": cfg.parentdir_prefix,
"VERSIONFILE_SOURCE": cfg.versionfile_source,
})
ipy = os.path.join(os.path.dirname(cfg.versionfile_source),
"__init__.py")
if os.path.exists(ipy):
try:
with open(ipy, "r") as f:
old = f.read()
except EnvironmentError:
old = ""
if INIT_PY_SNIPPET not in old:
print(" appending to %s" % ipy)
with open(ipy, "a") as f:
f.write(INIT_PY_SNIPPET)
else:
print(" %s unmodified" % ipy)
else:
print(" %s doesn't exist, ok" % ipy)
ipy = None
# Make sure both the top-level "versioneer.py" and versionfile_source
# (PKG/_version.py, used by runtime code) are in MANIFEST.in, so
# they'll be copied into source distributions. Pip won't be able to
# install the package without this.
manifest_in = os.path.join(root, "MANIFEST.in")
simple_includes = set()
try:
with open(manifest_in, "r") as f:
for line in f:
if line.startswith("include "):
for include in line.split()[1:]:
simple_includes.add(include)
except EnvironmentError:
pass
# That doesn't cover everything MANIFEST.in can do
# (http://docs.python.org/2/distutils/sourcedist.html#commands), so
# it might give some false negatives. Appending redundant 'include'
# lines is safe, though.
if "versioneer.py" not in simple_includes:
print(" appending 'versioneer.py' to MANIFEST.in")
with open(manifest_in, "a") as f:
f.write("include versioneer.py\n")
else:
print(" 'versioneer.py' already in MANIFEST.in")
if cfg.versionfile_source not in simple_includes:
print(" appending versionfile_source ('%s') to MANIFEST.in" %
cfg.versionfile_source)
with open(manifest_in, "a") as f:
f.write("include %s\n" % cfg.versionfile_source)
else:
print(" versionfile_source already in MANIFEST.in")
# Make VCS-specific changes. For git, this means creating/changing
# .gitattributes to mark _version.py for export-subst keyword
# substitution.
do_vcs_install(manifest_in, cfg.versionfile_source, ipy)
return 0
def scan_setup_py():
"""Validate the contents of setup.py against Versioneer's expectations."""
found = set()
setters = False
errors = 0
with open("setup.py", "r") as f:
for line in f.readlines():
if "import versioneer" in line:
found.add("import")
if "versioneer.get_cmdclass()" in line:
found.add("cmdclass")
if "versioneer.get_version()" in line:
found.add("get_version")
if "versioneer.VCS" in line:
setters = True
if "versioneer.versionfile_source" in line:
setters = True
if len(found) != 3:
print("")
print("Your setup.py appears to be missing some important items")
print("(but I might be wrong). Please make sure it has something")
print("roughly like the following:")
print("")
print(" import versioneer")
print(" setup( version=versioneer.get_version(),")
print(" cmdclass=versioneer.get_cmdclass(), ...)")
print("")
errors += 1
if setters:
print("You should remove lines like 'versioneer.VCS = ' and")
print("'versioneer.versionfile_source = ' . This configuration")
print("now lives in setup.cfg, and should be removed from setup.py")
print("")
errors += 1
return errors
if __name__ == "__main__":
cmd = sys.argv[1]
if cmd == "setup":
errors = do_setup()
errors += scan_setup_py()
if errors:
sys.exit(1)
| apache-2.0 |
tejesh95/Zubio.in | zubio/allauth/account/auth_backends.py | 10 | 2211 | from django.contrib.auth.backends import ModelBackend
from django.db.models import Q
from ..utils import get_user_model
from .app_settings import AuthenticationMethod
from . import app_settings
class AuthenticationBackend(ModelBackend):
def authenticate(self, **credentials):
ret = None
if app_settings.AUTHENTICATION_METHOD == AuthenticationMethod.EMAIL:
ret = self._authenticate_by_email(**credentials)
elif app_settings.AUTHENTICATION_METHOD \
== AuthenticationMethod.USERNAME_EMAIL:
ret = self._authenticate_by_email(**credentials)
if not ret:
ret = self._authenticate_by_username(**credentials)
else:
ret = self._authenticate_by_username(**credentials)
return ret
def _authenticate_by_username(self, **credentials):
username_field = app_settings.USER_MODEL_USERNAME_FIELD
username = credentials.get('username')
password = credentials.get('password')
User = get_user_model()
if not username_field or username is None or password is None:
return None
try:
# Username query is case insensitive
query = {username_field+'__iexact': username}
user = User.objects.get(**query)
if user.check_password(password):
return user
except User.DoesNotExist:
return None
def _authenticate_by_email(self, **credentials):
# Even though allauth will pass along `email`, other apps may
# not respect this setting. For example, when using
# django-tastypie basic authentication, the login is always
# passed as `username`. So let's place nice with other apps
# and use username as fallback
User = get_user_model()
email = credentials.get('email', credentials.get('username'))
if email:
users = User.objects.filter(Q(email__iexact=email)
| Q(emailaddress__email__iexact=email))
for user in users:
if user.check_password(credentials["password"]):
return user
return None
| mit |
xen0l/ansible | lib/ansible/modules/network/f5/bigip_routedomain.py | 18 | 18324 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: bigip_routedomain
short_description: Manage route domains on a BIG-IP
description:
- Manage route domains on a BIG-IP.
version_added: 2.2
options:
name:
description:
- The name of the route domain.
version_added: 2.5
bwc_policy:
description:
- The bandwidth controller for the route domain.
connection_limit:
description:
- The maximum number of concurrent connections allowed for the
route domain. Setting this to C(0) turns off connection limits.
description:
description:
- Specifies descriptive text that identifies the route domain.
flow_eviction_policy:
description:
- The eviction policy to use with this route domain. Apply an eviction
policy to provide customized responses to flow overflows and slow
flows on the route domain.
id:
description:
- The unique identifying integer representing the route domain.
- This field is required when creating a new route domain.
- In version 2.5, this value is no longer used to reference a route domain when
making modifications to it (for instance during update and delete operations).
Instead, the C(name) parameter is used. In version 2.6, the C(name) value will
become a required parameter.
parent:
description:
- Specifies the route domain the system searches when it cannot
find a route in the configured domain.
partition:
description:
- Partition to create the route domain on. Partitions cannot be updated
once they are created.
default: Common
version_added: 2.5
routing_protocol:
description:
- Dynamic routing protocols for the system to use in the route domain.
choices:
- BFD
- BGP
- IS-IS
- OSPFv2
- OSPFv3
- PIM
- RIP
- RIPng
service_policy:
description:
- Service policy to associate with the route domain.
state:
description:
- Whether the route domain should exist or not.
default: present
choices:
- present
- absent
strict:
description:
- Specifies whether the system enforces cross-routing restrictions or not.
type: bool
vlans:
description:
- VLANs for the system to use in the route domain.
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Create a route domain
bigip_routedomain:
name: foo
id: 1234
password: secret
server: lb.mydomain.com
state: present
user: admin
delegate_to: localhost
- name: Set VLANs on the route domain
bigip_routedomain:
name: bar
password: secret
server: lb.mydomain.com
state: present
user: admin
vlans:
- net1
- foo
delegate_to: localhost
'''
RETURN = r'''
id:
description: The ID of the route domain that was changed.
returned: changed
type: int
sample: 2
description:
description: The description of the route domain.
returned: changed
type: string
sample: route domain foo
strict:
description: The new strict isolation setting.
returned: changed
type: string
sample: enabled
parent:
description: The new parent route domain.
returned: changed
type: int
sample: 0
vlans:
description: List of new VLANs the route domain is applied to.
returned: changed
type: list
sample: ['/Common/http-tunnel', '/Common/socks-tunnel']
routing_protocol:
description: List of routing protocols applied to the route domain.
returned: changed
type: list
sample: ['bfd', 'bgp']
bwc_policy:
description: The new bandwidth controller.
returned: changed
type: string
sample: /Common/foo
connection_limit:
description: The new connection limit for the route domain.
returned: changed
type: int
sample: 100
flow_eviction_policy:
description: The new eviction policy to use with this route domain.
returned: changed
type: string
sample: /Common/default-eviction-policy
service_policy:
description: The new service policy to use with this route domain.
returned: changed
type: string
sample: /Common-my-service-policy
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import HAS_F5SDK
from library.module_utils.network.f5.bigip import F5Client
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
try:
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
except ImportError:
from ansible.module_utils.network.f5.bigip import HAS_F5SDK
from ansible.module_utils.network.f5.bigip import F5Client
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
try:
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
class Parameters(AnsibleF5Parameters):
api_map = {
'connectionLimit': 'connection_limit',
'servicePolicy': 'service_policy',
'bwcPolicy': 'bwc_policy',
'flowEvictionPolicy': 'flow_eviction_policy',
'routingProtocol': 'routing_protocol'
}
api_attributes = [
'connectionLimit',
'description',
'strict',
'parent',
'servicePolicy',
'bwcPolicy',
'flowEvictionPolicy',
'routingProtocol',
'vlans',
'id'
]
returnables = [
'description',
'strict',
'parent',
'service_policy',
'bwc_policy',
'flow_eviction_policy',
'routing_protocol',
'vlans',
'connection_limit',
'id'
]
updatables = [
'description',
'strict',
'parent',
'service_policy',
'bwc_policy',
'flow_eviction_policy',
'routing_protocol',
'vlans',
'connection_limit',
'id'
]
@property
def connection_limit(self):
if self._values['connection_limit'] is None:
return None
return int(self._values['connection_limit'])
@property
def id(self):
if self._values['id'] is None:
return None
return int(self._values['id'])
class ApiParameters(Parameters):
@property
def strict(self):
if self._values['strict'] is None:
return None
if self._values['strict'] == 'enabled':
return True
return False
@property
def domains(self):
domains = self.read_domains_from_device()
result = [x.fullPath for x in domains]
return result
def read_domains_from_device(self):
collection = self.client.api.tm.net.route_domains.get_collection()
return collection
class ModuleParameters(Parameters):
@property
def bwc_policy(self):
if self._values['bwc_policy'] is None:
return None
return fq_name(self.partition, self._values['bwc_policy'])
@property
def flow_eviction_policy(self):
if self._values['flow_eviction_policy'] is None:
return None
return fq_name(self.partition, self._values['flow_eviction_policy'])
@property
def service_policy(self):
if self._values['service_policy'] is None:
return None
return fq_name(self.partition, self._values['service_policy'])
@property
def parent(self):
if self._values['parent'] is None:
return None
result = fq_name(self.partition, self._values['parent'])
return result
@property
def vlans(self):
if self._values['vlans'] is None:
return None
if len(self._values['vlans']) == 1 and self._values['vlans'][0] == '':
return ''
return [fq_name(self.partition, x) for x in self._values['vlans']]
@property
def name(self):
if self._values['name'] is None:
return str(self.id)
return self._values['name']
@property
def routing_protocol(self):
if self._values['routing_protocol'] is None:
return None
if len(self._values['routing_protocol']) == 1 and self._values['routing_protocol'][0] == '':
return ''
return self._values['routing_protocol']
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
@property
def strict(self):
if self._values['strict'] is None:
return None
if self._values['strict']:
return 'enabled'
return 'disabled'
class ReportableChanges(Changes):
@property
def strict(self):
if self._values['strict'] is None:
return None
if self._values['strict'] == 'enabled':
return 'yes'
return 'no'
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def routing_protocol(self):
if self.want.routing_protocol is None:
return None
if self.want.routing_protocol == '' and self.have.routing_protocol is None:
return None
if self.want.routing_protocol == '' and len(self.have.routing_protocol) > 0:
return []
if self.have.routing_protocol is None:
return self.want.routing_protocol
want = set(self.want.routing_protocol)
have = set(self.have.routing_protocol)
if want != have:
return list(want)
@property
def vlans(self):
if self.want.vlans is None:
return None
if self.want.vlans == '' and self.have.vlans is None:
return None
if self.want.vlans == '' and len(self.have.vlans) > 0:
return []
if self.have.vlans is None:
return self.want.vlans
want = set(self.want.vlans)
have = set(self.have.vlans)
if want != have:
return list(want)
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.want = ModuleParameters(params=self.module.params, client=self.client)
self.have = ApiParameters(client=self.client)
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def exists(self):
result = self.client.api.tm.net.route_domains.route_domain.exists(
name=self.want.name,
partition=self.want.partition
)
return result
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.want.parent and self.want.parent not in self.have.domains:
raise F5ModuleError(
"The parent route domain was not found."
)
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
if self.want.id is None:
raise F5ModuleError(
"The 'id' parameter is required when creating new route domains."
)
if self.want.parent and self.want.parent not in self.have.domains:
raise F5ModuleError(
"The parent route domain was not found."
)
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def create_on_device(self):
params = self.changes.api_params()
self.client.api.tm.net.route_domains.route_domain.create(
name=self.want.name,
partition=self.want.partition,
**params
)
def update_on_device(self):
params = self.changes.api_params()
resource = self.client.api.tm.net.route_domains.route_domain.load(
name=self.want.name,
partition=self.want.partition
)
resource.modify(**params)
def absent(self):
if self.exists():
return self.remove()
return False
def remove_from_device(self):
resource = self.client.api.tm.net.route_domains.route_domain.load(
name=self.want.name,
partition=self.want.partition
)
if resource:
resource.delete()
def read_current_from_device(self):
resource = self.client.api.tm.net.route_domains.route_domain.load(
name=self.want.name,
partition=self.want.partition
)
result = resource.attrs
return ApiParameters(params=result, client=self.client)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(),
id=dict(type='int'),
description=dict(),
strict=dict(type='bool'),
parent=dict(type='int'),
vlans=dict(type='list'),
routing_protocol=dict(
type='list',
choices=['BFD', 'BGP', 'IS-IS', 'OSPFv2', 'OSPFv3', 'PIM', 'RIP', 'RIPng']
),
bwc_policy=dict(),
connection_limit=dict(type='int'),
flow_eviction_policy=dict(),
service_policy=dict(),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
),
state=dict(
default='present',
choices=['present', 'absent']
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
self.required_one_of = [
['name', 'id']
]
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
if not HAS_F5SDK:
module.fail_json(msg="The python f5-sdk module is required")
try:
client = F5Client(**module.params)
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
module.exit_json(**results)
except F5ModuleError as ex:
cleanup_tokens(client)
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 |
SurfasJones/djcmsrc3 | venv/lib/python2.7/site-packages/cms/plugin_pool.py | 3 | 10688 | # -*- coding: utf-8 -*-
import warnings
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.conf.urls import url, patterns, include
from django.contrib.formtools.wizard.views import normalize_name
from django.db import connection
from django.db.models import signals
from django.db.models.fields.related import ManyToManyField
from django.db.models.fields.related import ReverseManyRelatedObjectsDescriptor
from django.template.defaultfilters import slugify
from django.utils.translation import get_language, deactivate_all, activate
from django.template import TemplateDoesNotExist, TemplateSyntaxError
from cms.utils.compat.type_checks import string_types
from cms.exceptions import PluginAlreadyRegistered, PluginNotRegistered
from cms.plugin_base import CMSPluginBase
from cms.models import CMSPlugin
from cms.utils.django_load import load, get_subclasses
from cms.utils.helpers import reversion_register
from cms.utils.placeholder import get_placeholder_conf
from cms.utils.compat.dj import force_unicode
class PluginPool(object):
def __init__(self):
self.plugins = {}
self.discovered = False
self.patched = False
def discover_plugins(self):
if self.discovered:
return
self.discovered = True
from cms.views import invalidate_cms_page_cache
invalidate_cms_page_cache()
load('cms_plugins')
def clear(self):
self.discovered = False
self.plugins = {}
self.patched = False
def register_plugin(self, plugin):
"""
Registers the given plugin(s).
If a plugin is already registered, this will raise PluginAlreadyRegistered.
"""
if not issubclass(plugin, CMSPluginBase):
raise ImproperlyConfigured(
"CMS Plugins must be subclasses of CMSPluginBase, %r is not."
% plugin
)
if plugin.render_plugin and not type(plugin.render_plugin) == property or hasattr(plugin.model, 'render_template'):
if plugin.render_template is None and not hasattr(plugin.model, 'render_template'):
raise ImproperlyConfigured(
"CMS Plugins must define a render template or set render_plugin=False: %s"
% plugin
)
else:
from django.template import loader
template = hasattr(plugin.model,
'render_template') and plugin.model.render_template or plugin.render_template
if isinstance(template, string_types) and template:
try:
loader.get_template(template)
except TemplateDoesNotExist:
raise ImproperlyConfigured(
"CMS Plugins must define a render template (%s) that exist: %s"
% (plugin, template)
)
except TemplateSyntaxError:
pass
else:
if plugin.allow_children:
raise ImproperlyConfigured(
"CMS Plugins can not define render_plugin=False and allow_children=True: %s"
% plugin
)
plugin_name = plugin.__name__
if plugin_name in self.plugins:
raise PluginAlreadyRegistered(
"Cannot register %r, a plugin with this name (%r) is already "
"registered." % (plugin, plugin_name)
)
plugin.value = plugin_name
self.plugins[plugin_name] = plugin
from cms.signals import pre_save_plugins, post_delete_plugins, pre_delete_plugins
signals.pre_save.connect(pre_save_plugins, sender=plugin.model,
dispatch_uid='cms_pre_save_plugin_%s' % plugin_name)
signals.post_delete.connect(post_delete_plugins, sender=CMSPlugin,
dispatch_uid='cms_post_delete_plugin_%s' % plugin_name)
signals.pre_delete.connect(pre_delete_plugins, sender=CMSPlugin,
dispatch_uid='cms_pre_delete_plugin_%s' % plugin_name)
if 'reversion' in settings.INSTALLED_APPS:
try:
from reversion.registration import RegistrationError
except ImportError:
from reversion.revisions import RegistrationError
try:
reversion_register(plugin.model)
except RegistrationError:
pass
def unregister_plugin(self, plugin):
"""
Unregisters the given plugin(s).
If a plugin isn't already registered, this will raise PluginNotRegistered.
"""
plugin_name = plugin.__name__
if plugin_name not in self.plugins:
raise PluginNotRegistered(
'The plugin %r is not registered' % plugin
)
del self.plugins[plugin_name]
def set_plugin_meta(self):
"""
Patches a plugin model by forcing a specifc db_table whether the
'new style' table name exists or not. The same goes for all the
ManyToMany attributes.
This method must be run whenever a plugin model is accessed
directly.
The model is modified in place; a 'patched' attribute is added
to the model to check whether it's already been modified.
"""
if self.patched:
return
table_names = connection.introspection.table_names()
subs = get_subclasses(CMSPlugin)
for model in subs:
if not model._meta.abstract:
splitter = '%s_' % model._meta.app_label
table_name = model._meta.db_table
if (table_name not in table_names
and splitter in table_name):
old_db_name = table_name
splitted = table_name.split(splitter, 1)
table_name = 'cmsplugin_%s' % splitted[1]
if table_name in table_names:
model._meta.db_table = table_name
warnings.warn(
'please rename the table "%s" to "%s" in %s\nThe compatibility code will be removed in 3.1' % (
table_name, old_db_name, model._meta.app_label), DeprecationWarning)
for att_name in model.__dict__.keys():
att = model.__dict__[att_name]
if isinstance(att, ManyToManyField):
table_name = att.rel.through._meta.db_table
if (table_name not in table_names
and splitter in table_name):
old_db_name = table_name
table_name.split(splitter, 1)
table_name = 'cmsplugin_%s' % splitted[1]
if table_name in table_names:
att.rel.through._meta.db_table = table_name
warnings.warn(
'please rename the table "%s" to "%s" in %s\nThe compatibility code will be removed in 3.1' % (
table_name, old_db_name, model._meta.app_label), DeprecationWarning)
elif isinstance(att, ReverseManyRelatedObjectsDescriptor):
table_name = att.through._meta.db_table
if (table_name not in table_names
and splitter in table_name):
old_db_name = table_name
table_name.split(splitter, 1)
table_name = 'cmsplugin_%s_items' % splitted[1]
if table_name in table_names:
att.through._meta.db_table = table_name
warnings.warn(
'please rename the table "%s" to "%s" in %s\nThe compatibility code will be removed in 3.1' % (
table_name, old_db_name, model._meta.app_label), DeprecationWarning)
self.patched = True
def get_all_plugins(self, placeholder=None, page=None, setting_key="plugins", include_page_only=True):
self.discover_plugins()
self.set_plugin_meta()
plugins = list(self.plugins.values())
plugins.sort(key=lambda obj: force_unicode(obj.name))
final_plugins = []
template = page and page.get_template() or None
allowed_plugins = get_placeholder_conf(
setting_key,
placeholder,
template,
) or ()
for plugin in plugins:
include_plugin = False
if placeholder and not plugin.require_parent:
include_plugin = not allowed_plugins and setting_key == "plugins" or plugin.__name__ in allowed_plugins
if plugin.page_only and not include_page_only:
include_plugin = False
if include_plugin:
final_plugins.append(plugin)
if final_plugins or placeholder:
plugins = final_plugins
# plugins sorted by modules
plugins = sorted(plugins, key=lambda obj: force_unicode(obj.module))
return plugins
def get_text_enabled_plugins(self, placeholder, page):
plugins = self.get_all_plugins(placeholder, page)
plugins += self.get_all_plugins(placeholder, page, 'text_only_plugins')
final = []
for plugin in plugins:
if plugin.text_enabled:
if plugin not in final:
final.append(plugin)
return final
def get_plugin(self, name):
"""
Retrieve a plugin from the cache.
"""
self.discover_plugins()
self.set_plugin_meta()
return self.plugins[name]
def get_patterns(self):
self.discover_plugins()
# We want untranslated name of the plugin for its slug so we deactivate translation
lang = get_language()
deactivate_all()
try:
url_patterns = []
for plugin in self.get_all_plugins():
p = plugin()
slug = slugify(force_unicode(normalize_name(p.__class__.__name__)))
url_patterns += patterns('',
url(r'^plugin/%s/' % (slug,), include(p.plugin_urls)),
)
finally:
# Reactivate translation
activate(lang)
return url_patterns
plugin_pool = PluginPool()
| mit |
cuongnv23/ansible | lib/ansible/utils/jsonrpc.py | 49 | 3546 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import traceback
from ansible.module_utils._text import to_text
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class Rpc:
def __init__(self, *args, **kwargs):
self._rpc = set()
super(Rpc, self).__init__(*args, **kwargs)
def _exec_rpc(self, request):
method = request.get('method')
if method.startswith('rpc.') or method.startswith('_'):
error = self.invalid_request()
return json.dumps(error)
params = request.get('params')
setattr(self, '_identifier', request.get('id'))
args = []
kwargs = {}
if all((params, isinstance(params, list))):
args = params
elif all((params, isinstance(params, dict))):
kwargs = params
rpc_method = None
for obj in self._rpc:
rpc_method = getattr(obj, method, None)
if rpc_method:
break
if not rpc_method:
error = self.method_not_found()
response = json.dumps(error)
else:
try:
result = rpc_method(*args, **kwargs)
except Exception as exc:
display.display(traceback.format_exc(), log_only=True)
error = self.internal_error(data=to_text(exc, errors='surrogate_then_replace'))
response = json.dumps(error)
else:
if isinstance(result, dict) and 'jsonrpc' in result:
response = result
else:
response = self.response(result)
response = json.dumps(response)
delattr(self, '_identifier')
return response
def header(self):
return {'jsonrpc': '2.0', 'id': self._identifier}
def response(self, result=None):
response = self.header()
response['result'] = result or 'ok'
return response
def error(self, code, message, data=None):
response = self.header()
error = {'code': code, 'message': message}
if data:
error['data'] = data
response['error'] = error
return response
# json-rpc standard errors (-32768 .. -32000)
def parse_error(self, data=None):
return self.error(-32700, 'Parse error', data)
def method_not_found(self, data=None):
return self.error(-32601, 'Method not found', data)
def invalid_request(self, data=None):
return self.error(-32600, 'Invalid request', data)
def invalid_params(self, data=None):
return self.error(-32602, 'Invalid params', data)
def internal_error(self, data=None):
return self.error(-32603, 'Internal error', data)
| gpl-3.0 |
kingcc/shadowsocks | shadowsocks/crypto/salsa20_ctr.py | 26 | 4894 | #!/usr/bin/env python
# Copyright (c) 2014 clowwindy
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import time
import struct
import logging
import sys
slow_xor = False
imported = False
BLOCK_SIZE = 16384
def run_imports():
global imported, slow_xor, salsa20, numpy
if not imported:
imported = True
try:
import numpy
except ImportError:
logging.error('can not import numpy, using SLOW XOR')
logging.error('please install numpy if you use salsa20')
slow_xor = True
try:
import salsa20
except ImportError:
logging.error('you have to install salsa20 before you use salsa20')
sys.exit(1)
def numpy_xor(a, b):
if slow_xor:
return py_xor_str(a, b)
dtype = numpy.byte
if len(a) % 4 == 0:
dtype = numpy.uint32
elif len(a) % 2 == 0:
dtype = numpy.uint16
ab = numpy.frombuffer(a, dtype=dtype)
bb = numpy.frombuffer(b, dtype=dtype)
c = numpy.bitwise_xor(ab, bb)
r = c.tostring()
return r
def py_xor_str(a, b):
c = []
for i in xrange(0, len(a)):
c.append(chr(ord(a[i]) ^ ord(b[i])))
return ''.join(c)
class Salsa20Cipher(object):
"""a salsa20 CTR implemetation, provides m2crypto like cipher API"""
def __init__(self, alg, key, iv, op, key_as_bytes=0, d=None, salt=None,
i=1, padding=1):
run_imports()
if alg != 'salsa20-ctr':
raise Exception('unknown algorithm')
self._key = key
self._nonce = struct.unpack('<Q', iv)[0]
self._pos = 0
self._next_stream()
def _next_stream(self):
self._nonce &= 0xFFFFFFFFFFFFFFFF
self._stream = salsa20.Salsa20_keystream(BLOCK_SIZE,
struct.pack('<Q',
self._nonce),
self._key)
self._nonce += 1
def update(self, data):
results = []
while True:
remain = BLOCK_SIZE - self._pos
cur_data = data[:remain]
cur_data_len = len(cur_data)
cur_stream = self._stream[self._pos:self._pos + cur_data_len]
self._pos = self._pos + cur_data_len
data = data[remain:]
results.append(numpy_xor(cur_data, cur_stream))
if self._pos >= BLOCK_SIZE:
self._next_stream()
self._pos = 0
if not data:
break
return ''.join(results)
ciphers = {
'salsa20-ctr': (32, 8, Salsa20Cipher),
}
def test():
from os import urandom
import random
rounds = 1 * 1024
plain = urandom(BLOCK_SIZE * rounds)
import M2Crypto.EVP
# cipher = M2Crypto.EVP.Cipher('aes_128_cfb', 'k' * 32, 'i' * 16, 1,
# key_as_bytes=0, d='md5', salt=None, i=1,
# padding=1)
# decipher = M2Crypto.EVP.Cipher('aes_128_cfb', 'k' * 32, 'i' * 16, 0,
# key_as_bytes=0, d='md5', salt=None, i=1,
# padding=1)
cipher = Salsa20Cipher('salsa20-ctr', 'k' * 32, 'i' * 8, 1)
decipher = Salsa20Cipher('salsa20-ctr', 'k' * 32, 'i' * 8, 1)
results = []
pos = 0
print 'salsa20 test start'
start = time.time()
while pos < len(plain):
l = random.randint(100, 32768)
c = cipher.update(plain[pos:pos + l])
results.append(c)
pos += l
pos = 0
c = ''.join(results)
results = []
while pos < len(plain):
l = random.randint(100, 32768)
results.append(decipher.update(c[pos:pos + l]))
pos += l
end = time.time()
print 'speed: %d bytes/s' % (BLOCK_SIZE * rounds / (end - start))
assert ''.join(results) == plain
if __name__ == '__main__':
test()
| mit |
incaser/odoo-odoo | addons/multi_company/__openerp__.py | 259 | 1620 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Multi-Company',
'version': '1.0',
'category': 'Tools',
'description': """
This module is for managing a multicompany environment.
=======================================================
This module is the base module for other multi-company modules.
""",
'author': 'OpenERP SA,SYLEAM',
'website': 'https://www.odoo.com',
'depends': [
'base',
'sale_stock',
'project',
],
'data': ['res_company_view.xml'],
'demo': ['multi_company_demo.xml'],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
chrisndodge/edx-platform | common/lib/sandbox-packages/eia.py | 193 | 4344 | """
Standard resistor values.
Commonly used for verifying electronic components in circuit classes are
standard values, or conversely, for generating realistic component
values in parameterized problems. For details, see:
http://en.wikipedia.org/wiki/Electronic_color_code
"""
# pylint: disable=invalid-name
# r is standard name for a resistor. We would like to use it as such.
import math
import numbers
E6 = [10, 15, 22, 33, 47, 68]
E12 = [10, 12, 15, 18, 22, 27, 33, 39, 47, 56, 68, 82]
E24 = [10, 12, 15, 18, 22, 27, 33, 39, 47, 56, 68, 82, 11, 13, 16, 20,
24, 30, 36, 43, 51, 62, 75, 91]
E48 = [100, 121, 147, 178, 215, 261, 316, 383, 464, 562, 681, 825, 105,
127, 154, 187, 226, 274, 332, 402, 487, 590, 715, 866, 110, 133,
162, 196, 237, 287, 348, 422, 511, 619, 750, 909, 115, 140, 169,
205, 249, 301, 365, 442, 536, 649, 787, 953]
E96 = [100, 121, 147, 178, 215, 261, 316, 383, 464, 562, 681, 825, 102,
124, 150, 182, 221, 267, 324, 392, 475, 576, 698, 845, 105, 127,
154, 187, 226, 274, 332, 402, 487, 590, 715, 866, 107, 130, 158,
191, 232, 280, 340, 412, 499, 604, 732, 887, 110, 133, 162, 196,
237, 287, 348, 422, 511, 619, 750, 909, 113, 137, 165, 200, 243,
294, 357, 432, 523, 634, 768, 931, 115, 140, 169, 205, 249, 301,
365, 442, 536, 649, 787, 953, 118, 143, 174, 210, 255, 309, 374,
453, 549, 665, 806, 976]
E192 = [100, 121, 147, 178, 215, 261, 316, 383, 464, 562, 681, 825, 101,
123, 149, 180, 218, 264, 320, 388, 470, 569, 690, 835, 102, 124,
150, 182, 221, 267, 324, 392, 475, 576, 698, 845, 104, 126, 152,
184, 223, 271, 328, 397, 481, 583, 706, 856, 105, 127, 154, 187,
226, 274, 332, 402, 487, 590, 715, 866, 106, 129, 156, 189, 229,
277, 336, 407, 493, 597, 723, 876, 107, 130, 158, 191, 232, 280,
340, 412, 499, 604, 732, 887, 109, 132, 160, 193, 234, 284, 344,
417, 505, 612, 741, 898, 110, 133, 162, 196, 237, 287, 348, 422,
511, 619, 750, 909, 111, 135, 164, 198, 240, 291, 352, 427, 517,
626, 759, 920, 113, 137, 165, 200, 243, 294, 357, 432, 523, 634,
768, 931, 114, 138, 167, 203, 246, 298, 361, 437, 530, 642, 777,
942, 115, 140, 169, 205, 249, 301, 365, 442, 536, 649, 787, 953,
117, 142, 172, 208, 252, 305, 370, 448, 542, 657, 796, 965, 118,
143, 174, 210, 255, 309, 374, 453, 549, 665, 806, 976, 120, 145,
176, 213, 258, 312, 379, 459, 556, 673, 816, 988]
def iseia(r, valid_types=(E6, E12, E24)):
'''
Check if a component is a valid EIA value.
By default, check 5% component values
'''
# Step 1: Discount things which are not numbers
if not isinstance(r, numbers.Number) or \
r < 0 or \
math.isnan(r) or \
math.isinf(r):
return False
# Special case: 0 is an okay resistor
if r == 0:
return True
# Step 2: Move into the range [100, 1000)
while r < 100:
r = r * 10
while r >= 1000:
r = r / 10
# Step 3: Discount things which are not integers, and cast to int
if abs(r - round(r)) > 0.01:
return False
r = int(round(r))
# Step 4: Check if we're a valid EIA value
for type_list in valid_types:
if r in type_list:
return True
if int(r / 10.) in type_list and (r % 10) == 0:
return True
return False
if __name__ == '__main__':
# Test cases. All of these should return True
print iseia(100) # 100 ohm resistor is EIA
print not iseia(101) # 101 is not
print not iseia(100.3) # Floating point close to EIA is not EIA
print iseia(100.001) # But within floating point error is
print iseia(1e5) # We handle big numbers well
print iseia(2200) # We handle middle-of-the-list well
# We can handle 1% components correctly; 2.2k is EIA24, but not EIA48.
print not iseia(2200, (E48, E96, E192))
print iseia(5490e2, (E48, E96, E192))
print iseia(2200)
print not iseia(5490e2)
print iseia(1e-5) # We handle little numbers well
print not iseia("Hello") # Junk handled okay
print not iseia(float('NaN'))
print not iseia(-1)
print not iseia(iseia)
print not iseia(float('Inf'))
print iseia(0) # Corner case. 0 is a standard resistor value.
| agpl-3.0 |
gramps-project/gramps | docs/update_doc.py | 10 | 3491 | #! /usr/bin/env python3
#
# update_po - a gramps tool to update translations
#
# Copyright (C) 2006-2006 Kees Bakker
# Copyright (C) 2006 Brian Matherly
# Copyright (C) 2008 Stephen George
# Copyright (C) 2012
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
update_doc.py for Gramps API(s) documentation.
Examples:
python update_doc.py -t
Tests if 'sphinx' and 'python' are well configured.
"""
from __future__ import print_function
import os
import sys
from argparse import ArgumentParser
# You can set these variables from the command line.
SPHINXBUILD = 'sphinx-build'
if sys.platform == 'win32':
pythonCmd = os.path.join(sys.prefix, 'bin', 'python.exe')
sphinxCmd = os.path.join(sys.prefix, 'bin', 'sphinx-build.exe')
elif sys.platform in ['linux', 'linux2', 'darwin', 'cygwin']:
pythonCmd = os.path.join(sys.prefix, 'bin', 'python')
sphinxCmd = SPHINXBUILD
else:
print ("Update Docs ERROR: unknown system, don't know sphinx, ... commands")
sys.exit(0)
def tests():
"""
Testing installed programs.
We made tests (-t flag) by displaying versions of tools if properly
installed. Cannot run all commands without 'sphinx' and 'python'.
"""
try:
print("\n=================='python'=============================\n")
os.system('''%(program)s -V''' % {'program': pythonCmd})
except:
print ('Please, install python')
try:
print("\n=================='sphinx-build'=============================\n")
os.system('''%(program)s''' % {'program': sphinxCmd})
except:
print ('Please, install sphinx')
def main():
"""
The utility for handling documentation stuff.
What is need by Gramps, nothing more.
"""
parser = ArgumentParser(
description='This program aims to handle manual'
' and translated version.',
)
parser.add_argument("-t", "--test",
action="store_true", dest="test", default=True,
help="test if 'python' and 'sphinx' are properly installed")
parser.add_argument("-b", "--build",
action="store_true", dest="build", default=True,
help="build documentation")
args = parser.parse_args()
if args.test:
tests()
if args.build:
build()
def build():
"""
Build documentation.
"""
# testing stage
os.system('''%(program)s -b html . _build/html''' % {'program': sphinxCmd})
#os.system('''%(program)s -b changes . _build/changes''' % {'program': sphinxCmd})
#os.system('''%(program)s -b linkcheck . _build/linkcheck''' % {'program': sphinxCmd})
#os.system('''%(program)s -b devhelp . _build/devhelp''' % {'program': sphinxCmd})
if __name__ == "__main__":
main()
| gpl-2.0 |
ojengwa/oh-mainline | vendor/packages/Django/django/core/serializers/pyyaml.py | 110 | 2353 | """
YAML serializer.
Requires PyYaml (http://pyyaml.org/), but that's checked for in __init__.
"""
import decimal
import yaml
from io import StringIO
from django.db import models
from django.core.serializers.base import DeserializationError
from django.core.serializers.python import Serializer as PythonSerializer
from django.core.serializers.python import Deserializer as PythonDeserializer
from django.utils import six
class DjangoSafeDumper(yaml.SafeDumper):
def represent_decimal(self, data):
return self.represent_scalar('tag:yaml.org,2002:str', str(data))
DjangoSafeDumper.add_representer(decimal.Decimal, DjangoSafeDumper.represent_decimal)
class Serializer(PythonSerializer):
"""
Convert a queryset to YAML.
"""
internal_use_only = False
def handle_field(self, obj, field):
# A nasty special case: base YAML doesn't support serialization of time
# types (as opposed to dates or datetimes, which it does support). Since
# we want to use the "safe" serializer for better interoperability, we
# need to do something with those pesky times. Converting 'em to strings
# isn't perfect, but it's better than a "!!python/time" type which would
# halt deserialization under any other language.
if isinstance(field, models.TimeField) and getattr(obj, field.name) is not None:
self._current[field.name] = str(getattr(obj, field.name))
else:
super(Serializer, self).handle_field(obj, field)
def end_serialization(self):
yaml.dump(self.objects, self.stream, Dumper=DjangoSafeDumper, **self.options)
def getvalue(self):
# Grand-parent super
return super(PythonSerializer, self).getvalue()
def Deserializer(stream_or_string, **options):
"""
Deserialize a stream or string of YAML data.
"""
if isinstance(stream_or_string, bytes):
stream_or_string = stream_or_string.decode('utf-8')
if isinstance(stream_or_string, six.string_types):
stream = StringIO(stream_or_string)
else:
stream = stream_or_string
try:
for obj in PythonDeserializer(yaml.safe_load(stream), **options):
yield obj
except GeneratorExit:
raise
except Exception as e:
# Map to deserializer error
raise DeserializationError(e)
| agpl-3.0 |
manderson23/NewsBlur | vendor/feedvalidator/validators.py | 16 | 36588 | """$Id: validators.py 749 2007-04-02 15:45:49Z rubys $"""
__author__ = "Sam Ruby <http://intertwingly.net/> and Mark Pilgrim <http://diveintomark.org/>"
__version__ = "$Revision: 749 $"
__date__ = "$Date: 2007-04-02 15:45:49 +0000 (Mon, 02 Apr 2007) $"
__copyright__ = "Copyright (c) 2002 Sam Ruby and Mark Pilgrim"
from base import validatorBase
from logging import *
import re, time, datetime
from uri import canonicalForm, urljoin
from rfc822 import AddressList, parsedate
rdfNS = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"
#
# Valid mime type
#
mime_re = re.compile('[^\s()<>,;:\\"/[\]?=]+/[^\s()<>,;:\\"/[\]?=]+(\s*;\s*[^\s()<>,;:\\"/[\]?=]+=("(\\"|[^"])*"|[^\s()<>,;:\\"/[\]?=]+))*$')
#
# Extensibility hook: logic varies based on type of feed
#
def any(self, name, qname, attrs):
if self.getFeedType() != TYPE_RSS1:
return eater()
else:
from rdf import rdfExtension
return rdfExtension(qname)
#
# This class simply eats events. Useful to prevent cascading of errors
#
class eater(validatorBase):
def getExpectedAttrNames(self):
return self.attrs.getNames()
def characters(self, string):
for c in string:
if 0x80 <= ord(c) <= 0x9F or c == u'\ufffd':
from validators import BadCharacters
self.log(BadCharacters({"parent":self.parent.name, "element":self.name}))
def startElementNS(self, name, qname, attrs):
# RSS 2.0 arbitrary restriction on extensions
feedtype=self.getFeedType()
if (not qname) and feedtype and (feedtype==TYPE_RSS2) and self.name.find('_')>=0:
from logging import NotInANamespace
self.log(NotInANamespace({"parent":self.name, "element":name, "namespace":'""'}))
# ensure element is "namespace well formed"
if name.find(':') != -1:
from logging import MissingNamespace
self.log(MissingNamespace({"parent":self.name, "element":name}))
# ensure all attribute namespaces are properly defined
for (namespace,attr) in attrs.keys():
if ':' in attr and not namespace:
from logging import MissingNamespace
self.log(MissingNamespace({"parent":self.name, "element":attr}))
for c in attrs.get((namespace,attr)):
if 0x80 <= ord(c) <= 0x9F or c == u'\ufffd':
from validators import BadCharacters
self.log(BadCharacters({"parent":name, "element":attr}))
# eat children
self.push(eater(), name, attrs)
from HTMLParser import HTMLParser, HTMLParseError
class HTMLValidator(HTMLParser):
htmltags = [
"a", "abbr", "acronym", "address", "applet", "area", "b", "base",
"basefont", "bdo", "big", "blockquote", "body", "br", "button", "caption",
"center", "cite", "code", "col", "colgroup", "dd", "del", "dir", "div",
"dfn", "dl", "dt", "em", "fieldset", "font", "form", "frame", "frameset",
"h1", "h2", "h3", "h4", "h5", "h6",
"head", "hr", "html", "i", "iframe", "img", "input", "ins",
"isindex", "kbd", "label", "legend", "li", "link", "map", "menu", "meta",
"noframes", "noscript", "object", "ol", "optgroup", "option", "p",
"param", "pre", "q", "s", "samp", "script", "select", "small", "span",
"strike", "strong", "style", "sub", "sup", "table", "tbody", "td",
"textarea", "tfoot", "th", "thead", "title", "tr", "tt", "u", "ul",
"var", "xmp", "plaintext", "embed", "comment", "listing"]
acceptable_elements = ['a', 'abbr', 'acronym', 'address', 'area', 'b', 'big',
'blockquote', 'br', 'button', 'caption', 'center', 'cite', 'code', 'col',
'colgroup', 'dd', 'del', 'dfn', 'dir', 'div', 'dl', 'dt', 'em', 'fieldset',
'font', 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i', 'img',
'input', 'ins', 'kbd', 'label', 'legend', 'li', 'map', 'menu', 'ol',
'optgroup', 'option', 'p', 'pre', 'q', 's', 'samp', 'select', 'small',
'span', 'strike', 'strong', 'sub', 'sup', 'table', 'tbody', 'td',
'textarea', 'tfoot', 'th', 'thead', 'tr', 'tt', 'u', 'ul', 'var',
'noscript']
acceptable_attributes = ['abbr', 'accept', 'accept-charset', 'accesskey',
'action', 'align', 'alt', 'axis', 'bgcolor', 'border', 'cellpadding',
'cellspacing', 'char', 'charoff', 'charset', 'checked', 'cite', 'class',
'clear', 'cols', 'colspan', 'color', 'compact', 'coords', 'datetime',
'dir', 'disabled', 'enctype', 'face', 'for', 'frame', 'headers', 'height',
'href', 'hreflang', 'hspace', 'id', 'ismap', 'label', 'lang', 'longdesc',
'maxlength', 'media', 'method', 'multiple', 'name', 'nohref', 'noshade',
'nowrap', 'prompt', 'readonly', 'rel', 'rev', 'rows', 'rowspan', 'rules',
'scope', 'selected', 'shape', 'size', 'span', 'src', 'start', 'summary',
'tabindex', 'target', 'title', 'type', 'usemap', 'valign', 'value',
'vspace', 'width', 'xml:lang', 'xmlns']
acceptable_css_properties = ['azimuth', 'background', 'background-color',
'border', 'border-bottom', 'border-bottom-color', 'border-bottom-style',
'border-bottom-width', 'border-collapse', 'border-color', 'border-left',
'border-left-color', 'border-left-style', 'border-left-width',
'border-right', 'border-right-color', 'border-right-style',
'border-right-width', 'border-spacing', 'border-style', 'border-top',
'border-top-color', 'border-top-style', 'border-top-width', 'border-width',
'clear', 'color', 'cursor', 'direction', 'display', 'elevation', 'float',
'font', 'font-family', 'font-size', 'font-style', 'font-variant',
'font-weight', 'height', 'letter-spacing', 'line-height', 'margin',
'margin-bottom', 'margin-left', 'margin-right', 'margin-top', 'overflow',
'padding', 'padding-bottom', 'padding-left', 'padding-right',
'padding-top', 'pause', 'pause-after', 'pause-before', 'pitch',
'pitch-range', 'richness', 'speak', 'speak-header', 'speak-numeral',
'speak-punctuation', 'speech-rate', 'stress', 'text-align',
'text-decoration', 'text-indent', 'unicode-bidi', 'vertical-align',
'voice-family', 'volume', 'white-space', 'width']
# survey of common keywords found in feeds
acceptable_css_keywords = ['aqua', 'auto', 'black', 'block', 'blue', 'bold',
'both', 'bottom', 'brown', 'center', 'collapse', 'dashed', 'dotted',
'fuchsia', 'gray', 'green', '!important', 'italic', 'left', 'lime',
'maroon', 'medium', 'none', 'navy', 'normal', 'nowrap', 'olive',
'pointer', 'purple', 'red', 'right', 'solid', 'silver', 'teal', 'top',
'transparent', 'underline', 'white', 'yellow']
valid_css_values = re.compile('^(#[0-9a-f]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|' +
'\d?\.?\d?\d(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$')
def log(self,msg):
offset = [self.element.line + self.getpos()[0] - 1 -
self.element.dispatcher.locator.getLineNumber(),
-self.element.dispatcher.locator.getColumnNumber()]
self.element.log(msg, offset)
def __init__(self,value,element):
self.element=element
self.valid = True
HTMLParser.__init__(self)
if value.lower().find('<?import ') >= 0:
self.log(SecurityRisk({"parent":self.element.parent.name, "element":self.element.name, "tag":"?import"}))
try:
self.feed(value)
self.close()
if self.valid:
self.log(ValidHtml({"parent":self.element.parent.name, "element":self.element.name}))
except HTMLParseError, msg:
element = self.element
offset = [element.line - element.dispatcher.locator.getLineNumber(),
- element.dispatcher.locator.getColumnNumber()]
match = re.search(', at line (\d+), column (\d+)',str(msg))
if match: offset[0] += int(match.group(1))-1
element.log(NotHtml({"parent":element.parent.name, "element":element.name, "value": str(msg)}),offset)
def handle_starttag(self, tag, attributes):
if tag.lower() not in self.htmltags:
self.log(NotHtml({"parent":self.element.parent.name, "element":self.element.name,"value":tag, "message": "Non-html tag"}))
self.valid = False
elif tag.lower() not in HTMLValidator.acceptable_elements:
self.log(SecurityRisk({"parent":self.element.parent.name, "element":self.element.name, "tag":tag}))
for (name,value) in attributes:
if name.lower() == 'style':
for evil in checkStyle(value):
self.log(DangerousStyleAttr({"parent":self.element.parent.name, "element":self.element.name, "attr":"style", "value":evil}))
elif name.lower() not in self.acceptable_attributes:
self.log(SecurityRiskAttr({"parent":self.element.parent.name, "element":self.element.name, "attr":name}))
def handle_charref(self, name):
if name.startswith('x'):
value = int(name[1:],16)
else:
value = int(name)
if 0x80 <= value <= 0x9F or value == 0xfffd:
self.log(BadCharacters({"parent":self.element.parent.name,
"element":self.element.name, "value":"&#" + name + ";"}))
#
# Scub CSS properties for potentially evil intent
#
def checkStyle(style):
if not re.match("""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style):
return [style]
if not re.match("^(\s*[-\w]+\s*:\s*[^:;]*(;|$))*$", style):
return [style]
unsafe = []
for prop,value in re.findall("([-\w]+)\s*:\s*([^:;]*)",style.lower()):
if prop not in HTMLValidator.acceptable_css_properties:
if prop not in unsafe: unsafe.append(prop)
elif prop.split('-')[0] in ['background','border','margin','padding']:
for keyword in value.split():
if keyword not in HTMLValidator.acceptable_css_keywords and \
not HTMLValidator.valid_css_values.match(keyword):
if keyword not in unsafe: unsafe.append(keyword)
return unsafe
#
# This class simply html events. Identifies unsafe events
#
class htmlEater(validatorBase):
def getExpectedAttrNames(self):
if self.attrs and len(self.attrs):
return self.attrs.getNames()
def textOK(self): pass
def startElementNS(self, name, qname, attrs):
for attr in attrs.getNames():
if attr[0]==None:
if attr[1].lower() == 'style':
for value in checkStyle(attrs.get(attr)):
self.log(DangerousStyleAttr({"parent":self.parent.name, "element":self.name, "attr":attr[1], "value":value}))
elif attr[1].lower() not in HTMLValidator.acceptable_attributes:
self.log(SecurityRiskAttr({"parent":self.parent.name, "element":self.name, "attr":attr[1]}))
self.push(htmlEater(), self.name, attrs)
if name.lower() not in HTMLValidator.acceptable_elements:
self.log(SecurityRisk({"parent":self.parent.name, "element":self.name, "tag":name}))
def endElementNS(self,name,qname):
pass
#
# text: i.e., no child elements allowed (except rdf:Description).
#
class text(validatorBase):
def textOK(self): pass
def getExpectedAttrNames(self):
if self.getFeedType() == TYPE_RSS1:
return [(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#', u'parseType'),
(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#', u'datatype'),
(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#', u'resource')]
else:
return []
def startElementNS(self, name, qname, attrs):
if self.getFeedType() == TYPE_RSS1:
if self.value.strip() or self.children:
if self.attrs.get((u'http://www.w3.org/1999/02/22-rdf-syntax-ns#', u'parseType')) != 'Literal':
self.log(InvalidRDF({"message":"mixed content"}))
from rdf import rdfExtension
self.push(rdfExtension(qname), name, attrs)
else:
from base import namespaces
ns = namespaces.get(qname, '')
if name.find(':') != -1:
from logging import MissingNamespace
self.log(MissingNamespace({"parent":self.name, "element":name}))
else:
self.log(UndefinedElement({"parent":self.name, "element":name}))
self.push(eater(), name, attrs)
#
# noduplicates: no child elements, no duplicate siblings
#
class noduplicates(validatorBase):
def __init__(self, message=DuplicateElement):
self.message=message
validatorBase.__init__(self)
def startElementNS(self, name, qname, attrs):
pass
def characters(self, string):
pass
def prevalidate(self):
if self.name in self.parent.children:
self.log(self.message({"parent":self.parent.name, "element":self.name}))
#
# valid e-mail addr-spec
#
class addr_spec(text):
email_re = re.compile('''([a-zA-Z0-9_\-\+\.\']+)@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.)|(([a-zA-Z0-9\-]+\.)+))([a-zA-Z]{2,4}|[0-9]{1,3})(\]?)$''')
message = InvalidAddrSpec
def validate(self, value=None):
if not value: value=self.value
if not self.email_re.match(value):
self.log(self.message({"parent":self.parent.name, "element":self.name, "value":self.value}))
else:
self.log(ValidContact({"parent":self.parent.name, "element":self.name, "value":self.value}))
#
# iso639 language code
#
def iso639_validate(log,value,element,parent):
import iso639codes
if '-' in value:
lang, sublang = value.split('-', 1)
else:
lang = value
if not iso639codes.isoLang.has_key(unicode.lower(unicode(lang))):
log(InvalidLanguage({"parent":parent, "element":element, "value":value}))
else:
log(ValidLanguage({"parent":parent, "element":element}))
class iso639(text):
def validate(self):
iso639_validate(self.log, self.value, self.name, self.parent.name)
#
# Encoding charset
#
class Charset(text):
def validate(self):
try:
import codecs
codecs.lookup(self.value)
except:
self.log(InvalidEncoding({'value': self.value}))
#
# Mime type
#
class MimeType(text):
def validate(self):
if not mime_re.match(self.value):
self.log(InvalidMIMEType({'attr':'type'}))
#
# iso8601 dateTime
#
class iso8601(text):
iso8601_re = re.compile("^\d\d\d\d(-\d\d(-\d\d(T\d\d:\d\d(:\d\d(\.\d*)?)?" +
"(Z|([+-]\d\d:\d\d))?)?)?)?$")
message = InvalidISO8601DateTime
def validate(self):
if not self.iso8601_re.match(self.value):
self.log(self.message({"parent":self.parent.name, "element":self.name, "value":self.value}))
return
work=self.value.split('T')
date=work[0].split('-')
year=int(date[0])
if len(date)>1:
month=int(date[1])
try:
if len(date)>2: datetime.date(year,month,int(date[2]))
except ValueError, e:
return self.log(self.message({"parent":self.parent.name, "element":self.name, "value":str(e)}))
if len(work) > 1:
time=work[1].split('Z')[0].split('+')[0].split('-')[0]
time=time.split(':')
if int(time[0])>23:
self.log(self.message({"parent":self.parent.name, "element":self.name, "value":self.value}))
return
if len(time)>1 and int(time[1])>60:
self.log(self.message({"parent":self.parent.name, "element":self.name, "value":self.value}))
return
if len(time)>2 and float(time[2])>60.0:
self.log(self.message({"parent":self.parent.name, "element":self.name, "value":self.value}))
return
self.log(ValidW3CDTFDate({"parent":self.parent.name, "element":self.name, "value":self.value}))
return 1
class w3cdtf(iso8601):
# The same as in iso8601, except a timezone is not optional when
# a time is present
iso8601_re = re.compile("^\d\d\d\d(-\d\d(-\d\d(T\d\d:\d\d(:\d\d(\.\d*)?)?" +
"(Z|([+-]\d\d:\d\d)))?)?)?$")
message = InvalidW3CDTFDate
class rfc3339(iso8601):
# The same as in iso8601, except that the only thing that is optional
# is the seconds
iso8601_re = re.compile("^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d(\.\d*)?" +
"(Z|([+-]\d\d:\d\d))$")
message = InvalidRFC3339Date
def validate(self):
if iso8601.validate(self):
tomorrow=time.strftime("%Y-%m-%dT%H:%M:%SZ",time.localtime(time.time()+86400))
if self.value > tomorrow or self.value < "1970":
self.log(ImplausibleDate({"parent":self.parent.name,
"element":self.name, "value":self.value}))
return 0
return 1
return 0
class iso8601_date(iso8601):
date_re = re.compile("^\d\d\d\d-\d\d-\d\d$")
def validate(self):
if iso8601.validate(self):
if not self.date_re.search(self.value):
self.log(InvalidISO8601Date({"parent":self.parent.name, "element":self.name, "value":self.value}))
iana_schemes = [ # http://www.iana.org/assignments/uri-schemes.html
"ftp", "http", "gopher", "mailto", "news", "nntp", "telnet", "wais",
"file", "prospero", "z39.50s", "z39.50r", "cid", "mid", "vemmi",
"service", "imap", "nfs", "acap", "rtsp", "tip", "pop", "data", "dav",
"opaquelocktoken", "sip", "sips", "tel", "fax", "modem", "ldap",
"https", "soap.beep", "soap.beeps", "xmlrpc.beep", "xmlrpc.beeps",
"urn", "go", "h323", "ipp", "tftp", "mupdate", "pres", "im", "mtqp",
"iris.beep", "dict", "snmp", "crid", "tag", "dns", "info"
]
#
# rfc2396 fully qualified (non-relative) uri
#
class rfc2396(text):
rfc2396_re = re.compile("([a-zA-Z][0-9a-zA-Z+\\-\\.]*:)?/{0,2}" +
"[0-9a-zA-Z;/?:@&=+$\\.\\-_!~*'()%,#]*$")
urn_re = re.compile(r"^[Uu][Rr][Nn]:[a-zA-Z0-9][a-zA-Z0-9-]{1,31}:([a-zA-Z0-9()+,\.:=@;$_!*'\-]|%[0-9A-Fa-f]{2})+$")
tag_re = re.compile(r"^tag:([a-z0-9\-\._]+?@)?[a-z0-9\.\-]+?,\d{4}(-\d{2}(-\d{2})?)?:[0-9a-zA-Z;/\?:@&=+$\.\-_!~*'\(\)%,]*(#[0-9a-zA-Z;/\?:@&=+$\.\-_!~*'\(\)%,]*)?$")
def validate(self, errorClass=InvalidLink, successClass=ValidURI, extraParams={}):
success = 0
scheme=self.value.split(':')[0].lower()
if scheme=='tag':
if self.tag_re.match(self.value):
success = 1
logparams = {"parent":self.parent.name, "element":self.name, "value":self.value}
logparams.update(extraParams)
self.log(ValidTAG(logparams))
else:
logparams = {"parent":self.parent.name, "element":self.name, "value":self.value}
logparams.update(extraParams)
self.log(InvalidTAG(logparams))
elif scheme=="urn":
if self.urn_re.match(self.value):
success = 1
logparams = {"parent":self.parent.name, "element":self.name, "value":self.value}
logparams.update(extraParams)
self.log(ValidURN(logparams))
else:
logparams = {"parent":self.parent.name, "element":self.name, "value":self.value}
logparams.update(extraParams)
self.log(InvalidURN(logparams))
elif not self.rfc2396_re.match(self.value):
logparams = {"parent":self.parent.name, "element":self.name, "value":self.value}
logparams.update(extraParams)
urichars_re=re.compile("[0-9a-zA-Z;/?:@&=+$\\.\\-_!~*'()%,#]")
for c in self.value:
if ord(c)<128 and not urichars_re.match(c):
logparams['value'] = repr(str(c))
self.log(InvalidUriChar(logparams))
break
else:
try:
if self.rfc2396_re.match(self.value.encode('idna')):
errorClass=UriNotIri
except:
pass
self.log(errorClass(logparams))
elif scheme in ['http','ftp']:
if not re.match('^\w+://[^/].*',self.value):
logparams = {"parent":self.parent.name, "element":self.name, "value":self.value}
logparams.update(extraParams)
self.log(errorClass(logparams))
else:
success = 1
elif self.value.find(':')>=0 and scheme.isalpha() and scheme not in iana_schemes:
self.log(SchemeNotIANARegistered({"parent":self.parent.name, "element":self.name, "value":scheme}))
else:
success = 1
if success:
logparams = {"parent":self.parent.name, "element":self.name, "value":self.value}
logparams.update(extraParams)
self.log(successClass(logparams))
return success
#
# rfc3987 iri
#
class rfc3987(rfc2396):
def validate(self, errorClass=InvalidIRI, successClass=ValidURI, extraParams={}):
try:
if self.value: self.value = self.value.encode('idna')
except:
pass # apparently '.' produces label too long
return rfc2396.validate(self, errorClass, successClass, extraParams)
class rfc2396_full(rfc2396):
rfc2396_re = re.compile("[a-zA-Z][0-9a-zA-Z+\\-\\.]*:(//)?" +
"[0-9a-zA-Z;/?:@&=+$\\.\\-_!~*'()%,#]+$")
def validate(self, errorClass=InvalidFullLink, successClass=ValidURI, extraParams={}):
return rfc2396.validate(self, errorClass, successClass, extraParams)
#
# URI reference resolvable relative to xml:base
#
class xmlbase(rfc3987):
def validate(self, errorClass=InvalidIRI, successClass=ValidURI, extraParams={}):
if rfc3987.validate(self, errorClass, successClass, extraParams):
if self.dispatcher.xmlBase != self.xmlBase:
docbase=canonicalForm(self.dispatcher.xmlBase).split('#')[0]
elembase=canonicalForm(self.xmlBase).split('#')[0]
value=canonicalForm(urljoin(elembase,self.value)).split('#')[0]
if (value==elembase) and (elembase.encode('idna')!=docbase):
self.log(SameDocumentReference({"parent":self.parent.name, "element":self.name, "value":self.value}))
#
# rfc822 dateTime (+Y2K extension)
#
class rfc822(text):
rfc822_re = re.compile("(((mon)|(tue)|(wed)|(thu)|(fri)|(sat)|(sun))\s*,\s*)?" +
"\d\d?\s+((jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|(aug)|(sep)|(oct)|" +
"(nov)|(dec))\s+\d\d(\d\d)?\s+\d\d:\d\d(:\d\d)?\s+(([+-]\d\d\d\d)|" +
"(ut)|(gmt)|(est)|(edt)|(cst)|(cdt)|(mst)|(mdt)|(pst)|(pdt)|[a-ik-z])?$",
re.UNICODE)
rfc2822_re = re.compile("(((Mon)|(Tue)|(Wed)|(Thu)|(Fri)|(Sat)|(Sun)), )?" +
"\d\d? ((Jan)|(Feb)|(Mar)|(Apr)|(May)|(Jun)|(Jul)|(Aug)|(Sep)|(Oct)|" +
"(Nov)|(Dec)) \d\d\d\d \d\d:\d\d(:\d\d)? (([+-]?\d\d[03]0)|" +
"(UT)|(GMT)|(EST)|(EDT)|(CST)|(CDT)|(MST)|(MDT)|(PST)|(PDT)|Z)$")
def validate(self):
if self.rfc2822_re.match(self.value):
import calendar
value = parsedate(self.value)
try:
if value[0] > 1900:
dow = datetime.date(*value[:3]).strftime("%a")
if self.value.find(',')>0 and dow.lower() != self.value[:3].lower():
self.log(IncorrectDOW({"parent":self.parent.name, "element":self.name, "value":self.value[:3]}))
return
except ValueError, e:
self.log(InvalidRFC2822Date({"parent":self.parent.name, "element":self.name, "value":str(e)}))
return
tomorrow=time.localtime(time.time()+86400)
if value > tomorrow or value[0] < 1970:
self.log(ImplausibleDate({"parent":self.parent.name,
"element":self.name, "value":self.value}))
else:
self.log(ValidRFC2822Date({"parent":self.parent.name, "element":self.name, "value":self.value}))
else:
value1,value2 = '', self.value
value2 = re.sub(r'[\\](.)','',value2)
while value1!=value2: value1,value2=value2,re.sub('\([^(]*?\)',' ',value2)
if not self.rfc822_re.match(value2.strip().lower()):
self.log(InvalidRFC2822Date({"parent":self.parent.name, "element":self.name, "value":self.value}))
else:
self.log(ProblematicalRFC822Date({"parent":self.parent.name, "element":self.name, "value":self.value}))
#
# Decode html entityrefs
#
from htmlentitydefs import name2codepoint
def decodehtml(data):
chunks=re.split('&#?(\w+);',data)
for i in range(1,len(chunks),2):
if chunks[i].isdigit():
# print chunks[i]
chunks[i]=unichr(int(chunks[i]))
elif chunks[i] in name2codepoint:
chunks[i]=unichr(name2codepoint[chunks[i]])
else:
chunks[i]='&' + chunks[i] +';'
# print repr(chunks)
return u"".join(map(unicode,chunks))
#
# Scan HTML for relative URLs
#
class absUrlMixin:
anchor_re = re.compile('<a\s+href=(?:"(.*?)"|\'(.*?)\'|([\w-]+))\s*>', re.IGNORECASE)
img_re = re.compile('<img\s+[^>]*src=(?:"(.*?)"|\'(.*?)\'|([\w-]+))[\s>]', re.IGNORECASE)
absref_re = re.compile("\w+:")
def validateAbsUrl(self,value):
refs = self.img_re.findall(self.value) + self.anchor_re.findall(self.value)
for ref in [reduce(lambda a,b: a or b, x) for x in refs]:
if not self.absref_re.match(decodehtml(ref)):
self.log(ContainsRelRef({"parent":self.parent.name, "element":self.name, "value": ref}))
#
# Scan HTML for 'devious' content
#
class safeHtmlMixin:
def validateSafe(self,value):
HTMLValidator(value, self)
class safeHtml(text, safeHtmlMixin, absUrlMixin):
def prevalidate(self):
self.children.append(True) # force warnings about "mixed" content
def validate(self):
self.validateSafe(self.value)
self.validateAbsUrl(self.value)
#
# Elements for which email addresses are discouraged
#
class nonemail(text):
email_re = re.compile("<" + addr_spec.email_re.pattern[:-1] + ">")
def validate(self):
if self.email_re.search(self.value):
self.log(ContainsEmail({"parent":self.parent.name, "element":self.name}))
#
# Elements for which html is discouraged, also checks for relative URLs
#
class nonhtml(text,safeHtmlMixin):#,absUrlMixin):
htmlEndTag_re = re.compile("</(\w+)>")
htmlEntity_re = re.compile("&(#?\w+);")
def prevalidate(self):
self.children.append(True) # force warnings about "mixed" content
def validate(self, message=ContainsHTML):
tags = [t for t in self.htmlEndTag_re.findall(self.value) if t.lower() in HTMLValidator.htmltags]
if tags:
self.log(message({"parent":self.parent.name, "element":self.name, "value":tags[0]}))
elif self.htmlEntity_re.search(self.value):
for value in self.htmlEntity_re.findall(self.value):
from htmlentitydefs import name2codepoint
if (value in name2codepoint or not value.isalpha()) and \
value not in self.dispatcher.literal_entities:
self.log(message({"parent":self.parent.name, "element":self.name, "value":'&'+value+';'}))
#
# valid e-mail addresses
#
class email(addr_spec,nonhtml):
message = InvalidContact
def validate(self):
value=self.value
list = AddressList(self.value)
if len(list)==1: value=list[0][1]
nonhtml.validate(self)
addr_spec.validate(self, value)
class nonNegativeInteger(text):
def validate(self):
try:
t = int(self.value)
if t < 0:
raise ValueError
else:
self.log(ValidInteger({"parent":self.parent.name, "element":self.name, "value":self.value}))
except ValueError:
self.log(InvalidNonNegativeInteger({"parent":self.parent.name, "element":self.name, "value":self.value}))
class positiveInteger(text):
def validate(self):
if self.value == '': return
try:
t = int(self.value)
if t <= 0:
raise ValueError
else:
self.log(ValidInteger({"parent":self.parent.name, "element":self.name, "value":self.value}))
except ValueError:
self.log(InvalidPositiveInteger({"parent":self.parent.name, "element":self.name, "value":self.value}))
class Integer(text):
def validate(self):
if self.value == '': return
try:
t = int(self.value)
self.log(ValidInteger({"parent":self.parent.name, "element":self.name, "value":self.value}))
except ValueError:
self.log(InvalidInteger({"parent":self.parent.name, "element":self.name, "value":self.value}))
class Float(text):
def validate(self, name=None):
if not re.match('\d+\.?\d*$', self.value):
self.log(InvalidFloat({"attr":name or self.name, "value":self.value}))
class percentType(text):
def validate(self):
try:
t = float(self.value)
if t < 0.0 or t > 100.0:
raise ValueError
else:
self.log(ValidPercentage({"parent":self.parent.name, "element":self.name, "value":self.value}))
except ValueError:
self.log(InvalidPercentage({"parent":self.parent.name, "element":self.name, "value":self.value}))
class latitude(text):
def validate(self):
try:
lat = float(self.value)
if lat > 90 or lat < -90:
raise ValueError
else:
self.log(ValidLatitude({"parent":self.parent.name, "element":self.name, "value":self.value}))
except ValueError:
self.log(InvalidLatitude({"parent":self.parent.name, "element":self.name, "value":self.value}))
class longitude(text):
def validate(self):
try:
lon = float(self.value)
if lon > 180 or lon < -180:
raise ValueError
else:
self.log(ValidLongitude({"parent":self.parent.name, "element":self.name, "value":self.value}))
except ValueError:
self.log(InvalidLongitude({"parent":self.parent.name, "element":self.name, "value":self.value}))
#
# mixin to validate URL in attribute
#
class httpURLMixin:
http_re = re.compile("http://", re.IGNORECASE)
def validateHttpURL(self, ns, attr):
value = self.attrs[(ns, attr)]
if not self.http_re.search(value):
self.log(InvalidURLAttribute({"parent":self.parent.name, "element":self.name, "attr":attr}))
elif not rfc2396_full.rfc2396_re.match(value):
self.log(InvalidURLAttribute({"parent":self.parent.name, "element":self.name, "attr":attr}))
else:
self.log(ValidURLAttribute({"parent":self.parent.name, "element":self.name, "attr":attr}))
class rdfResourceURI(rfc2396):
def getExpectedAttrNames(self):
return [(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#', u'resource'),
(u'http://purl.org/dc/elements/1.1/', u'title')]
def validate(self):
if (rdfNS, 'resource') in self.attrs.getNames():
self.value=self.attrs.getValue((rdfNS, 'resource'))
rfc2396.validate(self)
elif self.getFeedType() == TYPE_RSS1:
self.log(MissingAttribute({"parent":self.parent.name, "element":self.name, "attr":"rdf:resource"}))
class rdfAbout(validatorBase):
def getExpectedAttrNames(self):
return [(u'http://www.w3.org/1999/02/22-rdf-syntax-ns#', u'about')]
def startElementNS(self, name, qname, attrs):
pass
def validate(self):
if (rdfNS, 'about') not in self.attrs.getNames():
self.log(MissingAttribute({"parent":self.parent.name, "element":self.name, "attr":"rdf:about"}))
else:
test=rfc2396().setElement(self.name, self.attrs, self)
test.value=self.attrs.getValue((rdfNS, 'about'))
test.validate()
class nonblank(text):
def validate(self, errorClass=NotBlank, extraParams={}):
if not self.value:
logparams={"parent":self.parent.name,"element":self.name}
logparams.update(extraParams)
self.log(errorClass(logparams))
class nows(text):
def __init__(self):
self.ok = 1
text.__init__(self)
def characters(self, string):
text.characters(self, string)
if self.ok and (self.value != self.value.strip()):
self.log(UnexpectedWhitespace({"parent":self.parent.name, "element":self.name}))
self.ok = 0
class unique(nonblank):
def __init__(self, name, scope, message=DuplicateValue):
self.name=name
self.scope=scope
self.message=message
nonblank.__init__(self)
if not name+'s' in self.scope.__dict__:
self.scope.__dict__[name+'s']=[]
def validate(self):
nonblank.validate(self)
list=self.scope.__dict__[self.name+'s']
if self.value in list:
self.log(self.message({"parent":self.parent.name, "element":self.name,"value":self.value}))
elif self.value:
list.append(self.value)
class rfc3987_full(xmlbase):
rfc2396_re = rfc2396_full.rfc2396_re
def validate(self, errorClass=InvalidFullLink, successClass=ValidURI, extraParams={}):
return rfc2396.validate(self, errorClass, successClass, extraParams)
class canonicaluri(rfc3987_full):
def validate(self):
prestrip = self.value
self.value = self.value.strip()
if rfc3987_full.validate(self):
c = canonicalForm(self.value)
if c is None or c != prestrip:
self.log(NonCanonicalURI({"parent":self.parent.name,"element":self.name,"uri":prestrip, "curi":c or 'N/A'}))
class yesno(text):
def normalizeWhitespace(self):
pass
def validate(self):
if not self.value.lower() in ['yes','no','clean']:
self.log(InvalidYesNo({"parent":self.parent.name, "element":self.name,"value":self.value}))
class truefalse(text):
def normalizeWhitespace(self):
pass
def validate(self):
if not self.value.lower() in ['true','false']:
self.log(InvalidTrueFalse({"parent":self.parent.name, "element":self.name,"value":self.value}))
class duration(text):
duration_re = re.compile("([0-9]?[0-9]:)?[0-5]?[0-9]:[0-5][0-9]$")
def validate(self):
if not self.duration_re.search(self.value):
self.log(InvalidDuration({"parent":self.parent.name, "element":self.name
, "value":self.value}))
class lengthLimitedText(nonhtml):
def __init__(self, max):
self.max = max
text.__init__(self)
def validate(self):
if len(self.value)>self.max:
self.log(TooLong({"parent":self.parent.name, "element":self.name,
"len": len(self.value), "max": self.max}))
nonhtml.validate(self)
class keywords(text):
def validate(self):
if self.value.find(' ')>=0 and self.value.find(',')<0:
self.log(InvalidKeywords({"parent":self.parent.name, "element":self.name}))
class commaSeparatedIntegers(text):
def validate(self):
if not re.match("^\d+(,\s*\d+)*$", self.value):
self.log(InvalidCommaSeparatedIntegers({"parent":self.parent.name,
"element":self.name}))
class formname(text):
def validate(self):
if not re.match("^[a-zA-z][a-zA-z0-9:._]*", self.value):
self.log(InvalidFormComponentName({"parent":self.parent.name,
"element":self.name, "value":self.value}))
class enumeration(text):
def validate(self):
if self.value not in self.valuelist:
self.log(self.error({"parent":self.parent.name, "element":self.name,
"attr": ':'.join(self.name.split('_',1)), "value":self.value}))
class caseinsensitive_enumeration(enumeration):
def validate(self):
self.value=self.value.lower()
enumeration.validate(self)
class iso3166(enumeration):
error = InvalidCountryCode
valuelist = [
"AD", "AE", "AF", "AG", "AI", "AM", "AN", "AO", "AQ", "AR", "AS", "AT",
"AU", "AW", "AZ", "BA", "BB", "BD", "BE", "BF", "BG", "BH", "BI", "BJ",
"BM", "BN", "BO", "BR", "BS", "BT", "BV", "BW", "BY", "BZ", "CA", "CC",
"CD", "CF", "CG", "CH", "CI", "CK", "CL", "CM", "CN", "CO", "CR", "CU",
"CV", "CX", "CY", "CZ", "DE", "DJ", "DK", "DM", "DO", "DZ", "EC", "EE",
"EG", "EH", "ER", "ES", "ET", "FI", "FJ", "FK", "FM", "FO", "FR", "GA",
"GB", "GD", "GE", "GF", "GH", "GI", "GL", "GM", "GN", "GP", "GQ", "GR",
"GS", "GT", "GU", "GW", "GY", "HK", "HM", "HN", "HR", "HT", "HU", "ID",
"IE", "IL", "IN", "IO", "IQ", "IR", "IS", "IT", "JM", "JO", "JP", "KE",
"KG", "KH", "KI", "KM", "KN", "KP", "KR", "KW", "KY", "KZ", "LA", "LB",
"LC", "LI", "LK", "LR", "LS", "LT", "LU", "LV", "LY", "MA", "MC", "MD",
"MG", "MH", "MK", "ML", "MM", "MN", "MO", "MP", "MQ", "MR", "MS", "MT",
"MU", "MV", "MW", "MX", "MY", "MZ", "NA", "NC", "NE", "NF", "NG", "NI",
"NL", "NO", "NP", "NR", "NU", "NZ", "OM", "PA", "PE", "PF", "PG", "PH",
"PK", "PL", "PM", "PN", "PR", "PS", "PT", "PW", "PY", "QA", "RE", "RO",
"RU", "RW", "SA", "SB", "SC", "SD", "SE", "SG", "SH", "SI", "SJ", "SK",
"SL", "SM", "SN", "SO", "SR", "ST", "SV", "SY", "SZ", "TC", "TD", "TF",
"TG", "TH", "TJ", "TK", "TM", "TN", "TO", "TR", "TT", "TV", "TW", "TZ",
"UA", "UG", "UM", "US", "UY", "UZ", "VA", "VC", "VE", "VG", "VI", "VN",
"VU", "WF", "WS", "YE", "YT", "ZA", "ZM", "ZW"]
class iso4217(enumeration):
error = InvalidCurrencyUnit
valuelist = [
"AED", "AFN", "ALL", "AMD", "ANG", "AOA", "ARS", "AUD", "AWG", "AZM",
"BAM", "BBD", "BDT", "BGN", "BHD", "BIF", "BMD", "BND", "BOB", "BOV",
"BRL", "BSD", "BTN", "BWP", "BYR", "BZD", "CAD", "CDF", "CHE", "CHF",
"CHW", "CLF", "CLP", "CNY", "COP", "COU", "CRC", "CSD", "CUP", "CVE",
"CYP", "CZK", "DJF", "DKK", "DOP", "DZD", "EEK", "EGP", "ERN", "ETB",
"EUR", "FJD", "FKP", "GBP", "GEL", "GHC", "GIP", "GMD", "GNF", "GTQ",
"GWP", "GYD", "HKD", "HNL", "HRK", "HTG", "HUF", "IDR", "ILS", "INR",
"IQD", "IRR", "ISK", "JMD", "JOD", "JPY", "KES", "KGS", "KHR", "KMF",
"KPW", "KRW", "KWD", "KYD", "KZT", "LAK", "LBP", "LKR", "LRD", "LSL",
"LTL", "LVL", "LYD", "MAD", "MDL", "MGA", "MKD", "MMK", "MNT", "MOP",
"MRO", "MTL", "MUR", "MWK", "MXN", "MXV", "MYR", "MZM", "NAD", "NGN",
"NIO", "NOK", "NPR", "NZD", "OMR", "PAB", "PEN", "PGK", "PHP", "PKR",
"PLN", "PYG", "QAR", "ROL", "RON", "RUB", "RWF", "SAR", "SBD", "SCR",
"SDD", "SEK", "SGD", "SHP", "SIT", "SKK", "SLL", "SOS", "SRD", "STD",
"SVC", "SYP", "SZL", "THB", "TJS", "TMM", "TND", "TOP", "TRL", "TRY",
"TTD", "TWD", "TZS", "UAH", "UGX", "USD", "USN", "USS", "UYU", "UZS",
"VEB", "VND", "VUV", "WST", "XAF", "XAG", "XAU", "XBA", "XBB", "XBC",
"XBD", "XCD", "XDR", "XFO", "XFU", "XOF", "XPD", "XPF", "XPT", "XTS",
"XXX", "YER", "ZAR", "ZMK", "ZWD"]
| mit |
movicha/dcos | setup.py | 1 | 2953 | from setuptools import setup
def get_advanced_templates():
template_base = 'aws/templates/advanced/'
template_names = ['advanced-master', 'advanced-priv-agent', 'advanced-pub-agent', 'infra', 'zen']
return [template_base + name + '.json' for name in template_names]
setup(
name='dcos_image',
version='0.1',
description='DC/OS packaging, management, install utilities',
url='https://dcos.io',
author='Mesosphere, Inc.',
author_email='help@dcos.io',
license='apache2',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
],
packages=[
'gen',
'gen.aws',
'gen.azure',
'gen.installer',
'pkgpanda',
'pkgpanda.build',
'pkgpanda.http',
'release',
'release.storage',
'ssh',
'test_util'],
install_requires=[
'Flask',
# Pins taken from 'azure==2.0.0rc4'
'msrest==0.4.0',
'msrestazure==0.4.1',
'azure-storage==0.32.0',
'azure-mgmt-network==0.30.0rc4',
'azure-mgmt-resource==0.30.0rc4',
'boto3',
'botocore',
'coloredlogs',
'docopt',
'passlib',
'pyyaml',
'requests',
'retrying',
'keyring==9.1'], # FIXME: pin keyring to prevent dbus dep
entry_points={
'console_scripts': [
'release=release:main',
'ccm-deploy-test=test_util.test_installer_ccm:main',
'pkgpanda=pkgpanda.cli:main',
'mkpanda=pkgpanda.build.cli:main',
],
},
package_data={
'gen': [
'ip-detect/aws.sh',
'ip-detect/aws_public.sh',
'ip-detect/azure.sh',
'ip-detect/vagrant.sh',
'cloud-config.yaml',
'dcos-config.yaml',
'dcos-metadata.yaml',
'dcos-services.yaml',
'aws/dcos-config.yaml',
'aws/templates/aws.html',
'aws/templates/cloudformation.json',
'azure/cloud-config.yaml',
'azure/azuredeploy-parameters.json',
'azure/templates/acs.json',
'azure/templates/azure.html',
'azure/templates/azuredeploy.json',
'installer/bash/dcos_generate_config.sh.in',
'installer/bash/Dockerfile.in',
'installer/bash/installer_internal_wrapper.in',
'coreos-aws/cloud-config.yaml',
'coreos/cloud-config.yaml'
] + get_advanced_templates(),
'pkgpanda': [
'docker/dcos-builder/Dockerfile'
],
'test_util': [
'docker/py.test/Dockerfile',
'docker/test_server/Dockerfile',
'docker/test_server/test_server.py',
'integration_test.py'],
},
zip_safe=False
)
| apache-2.0 |
nickjacek/yum-cron | test/merge-history-transactions-tests.py | 11 | 35064 | import unittest
import yum.history as hist
_fake_count = 0
class FakeYumHistoryTransaction(hist.YumHistoryTransaction):
def __init__(self, pkgs, tid=None, beg_timestamp=None, end_timestamp=None,
beg_rpmdbversion=None, end_rpmdbversion=None,
loginuid=0, return_code=0, pkgs_with=[],
errors=[], output=[]):
global _fake_count
if tid is None:
_fake_count += 1
tid = _fake_count
if beg_timestamp is None:
_fake_count += 1
beg_timestamp = _fake_count
if end_timestamp is None:
_fake_count += 1
end_timestamp = _fake_count
if beg_rpmdbversion is None:
_fake_count += 1
beg_rpmdbversion = '?:<n/a>,' + str(_fake_count)
if end_rpmdbversion is None:
_fake_count += 1
end_rpmdbversion = '?:<n/a>,' + str(_fake_count)
self.tid = tid
self.beg_timestamp = beg_timestamp
self.beg_rpmdbversion = beg_rpmdbversion
self.end_timestamp = end_timestamp
self.end_rpmdbversion = end_rpmdbversion
self.loginuid = loginuid
self.return_code = return_code
self._loaded_TW = pkgs_with
self._loaded_TD = pkgs
self._loaded_ER = errors
self._loaded_OT = output
self.altered_lt_rpmdb = None
self.altered_gt_rpmdb = None
def _dump_trans_data(pkgs):
""" For debugging to see WTF is going on with .trans_data. """
return [(str(pkg), pkg.state) for pkg in pkgs]
class MergeHistTransTests(unittest.TestCase):
def __init__(self, methodName='runTest'):
unittest.TestCase.__init__(self, methodName)
def setUp(self):
pass
def tearDown(self):
pass
def _merge_new(self, trans):
merged = hist.YumMergedHistoryTransaction(trans[0])
for pkg in trans[1:]:
merged.merge(pkg)
return merged
def _trans_new(self, *args, **kwargs):
return FakeYumHistoryTransaction(*args, **kwargs)
def _pkg_new(self, name, version='1', release='2',
arch='noarch', epoch='0', checksum=None, state='Install'):
self.assertTrue(state in hist._sttxt2stcode)
pkg = hist.YumHistoryPackageState(name,arch,epoch,version,release,
state, checksum)
return pkg
def assertMergedBeg(self, merged, beg):
self.assertTrue(beg.tid in merged.tid)
self.assertEquals(beg.beg_timestamp, merged.beg_timestamp)
self.assertEquals(beg.beg_rpmdbversion, merged.beg_rpmdbversion)
def assertMergedEnd(self, merged, end):
self.assertTrue(end.tid in merged.tid)
self.assertEquals(end.end_timestamp, merged.end_timestamp)
self.assertEquals(end.end_rpmdbversion, merged.end_rpmdbversion)
def assertMergedCodes(self, merged, trans):
ret = set()
uid = set()
for trans in trans:
ret.add(trans.loginuid)
uid.add(trans.return_code)
if len(ret) == 1:
self.assertEquals(list(ret)[0], merged.return_code)
else:
for ret in ret:
self.assertTrue(ret in merged.return_code)
if len(uid) == 1:
self.assertEquals(list(uid)[0], merged.loginuid)
else:
for uid in uid:
self.assertTrue(uid in merged.loginuid)
def assertMergedMain(self, merged, trans):
self.assertMergedBeg(merged, trans[0])
self.assertMergedEnd(merged, trans[-1])
self.assertMergedCodes(merged, trans)
def testSimpleInMerge1(self, xstate='Install'):
pkg1 = self._pkg_new('foo', state=xstate)
pkg2 = self._pkg_new('xbar', version='4')
trans = []
trans.append(self._trans_new([pkg1]))
trans.append(self._trans_new([pkg2]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 2)
self.assertEquals(pkgs[0], pkg1)
self.assertEquals(pkgs[0].state, xstate)
self.assertEquals(pkgs[1], pkg2)
self.assertEquals(pkgs[1].state, pkg2.state)
def testSimpleInMerge2(self, xstate='Install'):
pkg1 = self._pkg_new('foo', state=xstate)
pkg2 = self._pkg_new('bar', version='4')
pkg3 = self._pkg_new('xbar', version='6')
pkg4 = self._pkg_new('xfoo', version='3')
trans = []
trans.append(self._trans_new([pkg1, pkg3]))
trans.append(self._trans_new([pkg2, pkg4]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 4)
self.assertEquals(pkgs[0], pkg2)
self.assertEquals(pkgs[0].state, pkg2.state)
self.assertEquals(pkgs[1], pkg1)
self.assertEquals(pkgs[1].state, xstate)
self.assertEquals(pkgs[2], pkg3)
self.assertEquals(pkgs[2].state, pkg3.state)
self.assertEquals(pkgs[3], pkg4)
self.assertEquals(pkgs[3].state, pkg4.state)
def testSimpleUpMerge1(self, xstate='Update'):
opkg1 = self._pkg_new('foo', state='Updated')
npkg1 = self._pkg_new('foo', version='3', state=xstate)
opkg2 = self._pkg_new('bar', version='4', state='Updated')
npkg2 = self._pkg_new('bar', version='6', state='Update')
trans = []
trans.append(self._trans_new([opkg1, npkg1]))
trans.append(self._trans_new([opkg2, npkg2]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 4)
self.assertEquals(pkgs[0], opkg2)
self.assertEquals(pkgs[0].state, opkg2.state)
self.assertEquals(pkgs[1], npkg2)
self.assertEquals(pkgs[1].state, npkg2.state)
self.assertEquals(pkgs[2], opkg1)
self.assertEquals(pkgs[2].state, opkg1.state)
self.assertEquals(pkgs[3], npkg1)
self.assertEquals(pkgs[3].state, xstate)
def testSimpleUpMerge2(self, xstate='Update'):
opkg1 = self._pkg_new('foo', state='Updated')
npkg1 = self._pkg_new('foo', version='3', state=xstate)
opkg2 = self._pkg_new('bar', version='4', state='Updated')
npkg2 = self._pkg_new('bar', version='6', state='Update')
opkg3 = self._pkg_new('foo', version='3', state='Updated')
npkg3 = self._pkg_new('foo', version='5', state='Update')
trans = []
trans.append(self._trans_new([opkg2, npkg2, opkg1, npkg1]))
trans.append(self._trans_new([opkg3, npkg3]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 4)
self.assertEquals(pkgs[0], opkg2)
self.assertEquals(pkgs[0].state, opkg2.state)
self.assertEquals(pkgs[1], npkg2)
self.assertEquals(pkgs[1].state, npkg2.state)
self.assertEquals(pkgs[2], opkg1)
self.assertEquals(pkgs[2].state, opkg1.state)
self.assertEquals(pkgs[3], npkg3)
self.assertEquals(pkgs[3].state, xstate)
def testSimpleUpMerge3(self, xstate='Install'):
opkg1 = self._pkg_new('foo', state=xstate)
opkg2 = self._pkg_new('bar', version='4', state='Updated')
npkg2 = self._pkg_new('bar', version='6', state='Update')
opkg3 = self._pkg_new('foo', state='Updated')
npkg3 = self._pkg_new('foo', version='5', state='Update')
trans = []
trans.append(self._trans_new([opkg2, npkg2, opkg1]))
trans.append(self._trans_new([opkg3, npkg3]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 3)
self.assertEquals(pkgs[0], opkg2)
self.assertEquals(pkgs[0].state, opkg2.state)
self.assertEquals(pkgs[1], npkg2)
self.assertEquals(pkgs[1].state, npkg2.state)
self.assertEquals(pkgs[2], npkg3)
self.assertEquals(pkgs[2].state, xstate)
def testSimpleUpMultiMerge1(self, xstate='Install'):
opkg1 = self._pkg_new('foo', arch='i586', state=xstate)
opkg2 = self._pkg_new('bar', version='4', state='Updated')
npkg2 = self._pkg_new('bar', version='6', state='Update')
opkg3 = self._pkg_new('foo', arch='i586', state='Updated')
npkg3 = self._pkg_new('foo', arch='i686', version='5', state='Update')
trans = []
trans.append(self._trans_new([opkg2, npkg2, opkg1]))
trans.append(self._trans_new([opkg3, npkg3]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 3)
self.assertEquals(pkgs[0], opkg2)
self.assertEquals(pkgs[0].state, opkg2.state)
self.assertEquals(pkgs[1], npkg2)
self.assertEquals(pkgs[1].state, npkg2.state)
self.assertEquals(pkgs[2], npkg3)
self.assertEquals(pkgs[2].state, xstate)
def testUpDownMerge1(self, xstate='Update'):
opkg1 = self._pkg_new('foo', version='0', state='Updated')
npkg1 = self._pkg_new('foo', state=xstate)
opkg2 = self._pkg_new('bar', version='4', state='Updated')
npkg2 = self._pkg_new('bar', version='6', state='Update')
opkg3 = self._pkg_new('foo', state='Updated')
npkg3 = self._pkg_new('foo', version='7', state='Update')
opkg4 = self._pkg_new('foo', version='7', state='Downgraded')
npkg4 = self._pkg_new('foo', version='5', state='Downgrade')
trans = []
trans.append(self._trans_new([opkg2, npkg2, opkg1, npkg1]))
trans.append(self._trans_new([opkg3, npkg3]))
trans.append(self._trans_new([opkg4, npkg4]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 4)
self.assertEquals(pkgs[0], opkg2)
self.assertEquals(pkgs[1], npkg2)
self.assertEquals(pkgs[2], opkg1)
self.assertNotEquals(pkgs[3], opkg3)
self.assertNotEquals(pkgs[3], npkg3)
self.assertNotEquals(pkgs[3], opkg4)
self.assertNotEquals(pkgs[3].state, npkg4.state)
self.assertEquals(pkgs[3].pkgtup, npkg4.pkgtup)
self.assertEquals(pkgs[3].state, xstate)
def testUpDownMerge2(self, xstate='Install'):
opkg1 = self._pkg_new('foo')
opkg2 = self._pkg_new('bar', version='4', state='Updated')
npkg2 = self._pkg_new('bar', version='6', state='Update')
opkg3 = self._pkg_new('foo', state='Updated')
npkg3 = self._pkg_new('foo', version='7', state=xstate)
opkg4 = self._pkg_new('foo', version='7', state='Downgraded')
npkg4 = self._pkg_new('foo', version='5', state='Downgrade')
trans = []
trans.append(self._trans_new([opkg2, npkg2, opkg1]))
trans.append(self._trans_new([opkg3, npkg3]))
trans.append(self._trans_new([opkg4, npkg4]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 3)
self.assertEquals(pkgs[0], opkg2)
self.assertEquals(pkgs[1], npkg2)
self.assertNotEquals(pkgs[2], opkg1)
self.assertNotEquals(pkgs[2], opkg3)
self.assertNotEquals(pkgs[2], npkg3)
self.assertNotEquals(pkgs[2], opkg4)
self.assertNotEquals(pkgs[2].state, npkg4.state)
self.assertEquals(pkgs[2].pkgtup, npkg4.pkgtup)
self.assertEquals(pkgs[2].state, xstate)
def testUpDownMerge3(self):
opkg1 = self._pkg_new('foo')
opkg2 = self._pkg_new('bar', version='4', state='Updated')
npkg2 = self._pkg_new('bar', version='6', state='Update')
opkg3 = self._pkg_new('foo', version='3', state='Updated') # rpmdbv
npkg3 = self._pkg_new('foo', version='7', state='Update')
opkg4 = self._pkg_new('foo', version='7', state='Downgraded')
npkg4 = self._pkg_new('foo', version='3', state='Downgrade')
trans = []
trans.append(self._trans_new([opkg2, npkg2, opkg1]))
trans.append(self._trans_new([opkg3, npkg3]))
trans.append(self._trans_new([opkg4, npkg4]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 4)
self.assertEquals(pkgs[0], opkg2)
self.assertEquals(pkgs[1], npkg2)
self.assertEquals(pkgs[2], opkg1)
self.assertEquals(pkgs[2].state, opkg1.state)
self.assertNotEquals(pkgs[3], opkg1)
self.assertNotEquals(pkgs[3].state, opkg3.state)
self.assertNotEquals(pkgs[3], npkg3)
self.assertNotEquals(pkgs[3], opkg4)
self.assertNotEquals(pkgs[3].state, npkg4.state)
self.assertEquals(pkgs[3].pkgtup, npkg4.pkgtup)
self.assertEquals(pkgs[3].state, 'Reinstall')
def testUpDownMerge4(self, xstate='Update'):
opkg2 = self._pkg_new('bar', version='4', state='Updated')
npkg2 = self._pkg_new('bar', version='6', state='Update')
opkg3 = self._pkg_new('foo', version='3', state='Updated')
npkg3 = self._pkg_new('foo', version='7', state=xstate)
opkg4 = self._pkg_new('foo', version='7', state='Downgraded')
npkg4 = self._pkg_new('foo', version='3', state='Downgrade')
trans = []
trans.append(self._trans_new([opkg2, npkg2]))
trans.append(self._trans_new([opkg3, npkg3]))
trans.append(self._trans_new([opkg4, npkg4]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 3)
self.assertEquals(pkgs[0], opkg2)
self.assertEquals(pkgs[1], npkg2)
self.assertNotEquals(pkgs[2].state, opkg3.state)
self.assertNotEquals(pkgs[2], npkg3)
self.assertNotEquals(pkgs[2], opkg4)
self.assertNotEquals(pkgs[2].state, npkg4.state)
self.assertEquals(pkgs[2].pkgtup, opkg3.pkgtup)
if xstate == 'Obsoleting':
self.assertEquals(pkgs[2].state, 'Obsoleting')
else:
self.assertEquals(pkgs[2].state, 'Reinstall')
def testUpDownMerge5(self, xstate='Update'):
opkg2 = self._pkg_new('bar', version='4', state='Updated')
npkg2 = self._pkg_new('bar', version='6', state='Update')
opkg3 = self._pkg_new('foo', version='3', state='Updated')
npkg3 = self._pkg_new('foo', version='21', state=xstate)
opkg4 = self._pkg_new('foo', version='21', state='Downgraded')
npkg4 = self._pkg_new('foo', version='19', state='Downgrade')
opkg5 = self._pkg_new('foo', version='19', state='Downgraded')
npkg5 = self._pkg_new('foo', version='13', state='Downgrade')
trans = []
trans.append(self._trans_new([opkg2, npkg2]))
trans.append(self._trans_new([opkg3, npkg3]))
trans.append(self._trans_new([opkg4, npkg4]))
trans.append(self._trans_new([opkg5, npkg5]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 4)
self.assertEquals(pkgs[0], opkg2)
self.assertEquals(pkgs[0].state, opkg2.state)
self.assertEquals(pkgs[1], npkg2)
self.assertEquals(pkgs[1].state, npkg2.state)
self.assertEquals(pkgs[2], opkg3)
self.assertEquals(pkgs[2].state, opkg3.state)
self.assertEquals(pkgs[3], npkg5)
self.assertEquals(pkgs[3].state, xstate)
def testDownUpMerge1(self, xstate='Downgrade'):
opkg1 = self._pkg_new('foo', version='10', state='Downgraded')
npkg1 = self._pkg_new('foo', version='9', state=xstate)
opkg2 = self._pkg_new('bar', version='4', state='Updated')
npkg2 = self._pkg_new('bar', version='6', state='Update')
opkg3 = self._pkg_new('foo', version='7', state='Updated')
npkg3 = self._pkg_new('foo', version='8', state='Update')
opkg4 = self._pkg_new('foo', version='9', state='Downgraded')
npkg4 = self._pkg_new('foo', version='7', state='Downgrade')
trans = []
trans.append(self._trans_new([opkg2, npkg2, opkg1, npkg1]))
trans.append(self._trans_new([opkg4, npkg4]))
trans.append(self._trans_new([opkg3, npkg3]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 4)
self.assertEquals(pkgs[0], opkg2)
self.assertEquals(pkgs[1], npkg2)
self.assertNotEquals(pkgs[2], opkg3)
self.assertNotEquals(pkgs[2].state, npkg3.state)
self.assertNotEquals(pkgs[2], opkg4)
self.assertNotEquals(pkgs[2], npkg4)
self.assertEquals(pkgs[2].pkgtup, npkg3.pkgtup)
self.assertEquals(pkgs[2].state, xstate)
self.assertEquals(pkgs[3], opkg1)
self.assertEquals(pkgs[3].state, opkg1.state)
def testDownUpMerge2(self, xstate='Install'):
opkg1 = self._pkg_new('foo', version='7', state=xstate)
opkg2 = self._pkg_new('bar', version='4', state='Updated')
npkg2 = self._pkg_new('bar', version='6', state='Update')
opkg3 = self._pkg_new('foo', version='5', state='Updated')
npkg3 = self._pkg_new('foo', version='6', state='Update')
opkg4 = self._pkg_new('foo', version='7', state='Downgraded')
npkg4 = self._pkg_new('foo', version='5', state='Downgrade')
trans = []
trans.append(self._trans_new([opkg2, npkg2, opkg1]))
trans.append(self._trans_new([opkg4, npkg4]))
trans.append(self._trans_new([opkg3, npkg3]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 3)
self.assertEquals(pkgs[0], opkg2)
self.assertEquals(pkgs[1], npkg2)
self.assertNotEquals(pkgs[2], opkg1)
self.assertNotEquals(pkgs[2], opkg3)
self.assertNotEquals(pkgs[2], opkg4)
self.assertNotEquals(pkgs[2], npkg4)
self.assertNotEquals(pkgs[2].state, npkg3.state)
self.assertEquals(pkgs[2].pkgtup, npkg3.pkgtup)
self.assertEquals(pkgs[2].state, xstate)
def testDownUpMerge3(self):
opkg1 = self._pkg_new('foo')
opkg2 = self._pkg_new('bar', version='4', state='Updated')
npkg2 = self._pkg_new('bar', version='6', state='Update')
opkg3 = self._pkg_new('foo', version='3', state='Updated')
npkg3 = self._pkg_new('foo', version='7', state='Update')
opkg4 = self._pkg_new('foo', version='7', state='Downgraded') # rpmdbv
npkg4 = self._pkg_new('foo', version='3', state='Downgrade')
trans = []
trans.append(self._trans_new([opkg2, npkg2, opkg1]))
trans.append(self._trans_new([opkg4, npkg4]))
trans.append(self._trans_new([opkg3, npkg3]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 4)
self.assertEquals(pkgs[0], opkg2)
self.assertEquals(pkgs[1], npkg2)
self.assertEquals(pkgs[2], opkg1)
self.assertEquals(pkgs[2].state, opkg1.state)
self.assertNotEquals(pkgs[3], opkg1)
self.assertNotEquals(pkgs[3], opkg3)
self.assertNotEquals(pkgs[3].state, npkg3.state)
self.assertNotEquals(pkgs[3].state, opkg4.state)
self.assertNotEquals(pkgs[3], npkg4)
self.assertEquals(pkgs[3].pkgtup, npkg3.pkgtup)
self.assertEquals(pkgs[3].state, 'Reinstall')
def testDownUpMerge4(self, xstate='Update'):
opkg2 = self._pkg_new('bar', version='4', state='Updated')
npkg2 = self._pkg_new('bar', version='6', state='Update')
opkg3 = self._pkg_new('foo', version='3', state='Updated')
npkg3 = self._pkg_new('foo', version='7', state=xstate)
opkg4 = self._pkg_new('foo', version='7', state='Downgraded')
npkg4 = self._pkg_new('foo', version='3', state='Downgrade')
trans = []
trans.append(self._trans_new([opkg2, npkg2]))
trans.append(self._trans_new([opkg4, npkg4]))
trans.append(self._trans_new([opkg3, npkg3]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 3)
self.assertEquals(pkgs[0], opkg2)
self.assertEquals(pkgs[1], npkg2)
self.assertNotEquals(pkgs[2], opkg3)
self.assertNotEquals(pkgs[2].state, 'Update')
self.assertNotEquals(pkgs[2].state, opkg4.state)
self.assertNotEquals(pkgs[2], npkg4)
self.assertEquals(pkgs[2].pkgtup, npkg3.pkgtup)
if xstate == 'Obsoleting':
self.assertEquals(pkgs[2].state, 'Obsoleting')
else:
self.assertEquals(pkgs[2].state, 'Reinstall')
def testDownUpMerge5(self, xstate='Downgrade'):
opkg2 = self._pkg_new('bar', version='4', state='Updated')
npkg2 = self._pkg_new('bar', version='6', state='Update')
opkg3 = self._pkg_new('foo', version='21', state='Downgraded')
npkg3 = self._pkg_new('foo', version='3', state=xstate)
opkg4 = self._pkg_new('foo', version='3', state='Updated')
npkg4 = self._pkg_new('foo', version='7', state='Update')
opkg5 = self._pkg_new('foo', version='7', state='Updated')
npkg5 = self._pkg_new('foo', version='13', state='Update')
trans = []
trans.append(self._trans_new([opkg2, npkg2]))
trans.append(self._trans_new([opkg3, npkg3]))
trans.append(self._trans_new([opkg4, npkg4]))
trans.append(self._trans_new([opkg5, npkg5]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 4)
self.assertEquals(pkgs[0], opkg2)
self.assertEquals(pkgs[0].state, opkg2.state)
self.assertEquals(pkgs[1], npkg2)
self.assertEquals(pkgs[1].state, npkg2.state)
self.assertEquals(pkgs[2], npkg5)
self.assertEquals(pkgs[2].state, xstate)
self.assertEquals(pkgs[3], opkg3)
self.assertEquals(pkgs[3].state, opkg3.state)
def testInRmMerge1(self, xstate='Install', estate='Erase'):
npkg1 = self._pkg_new('foo', state=xstate)
npkg2 = self._pkg_new('foo', state=estate)
npkg3 = self._pkg_new('bar', version='6', state='True-Install')
trans = []
trans.append(self._trans_new([npkg1]))
trans.append(self._trans_new([npkg2]))
trans.append(self._trans_new([npkg3]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 1)
self.assertEquals(pkgs[0], npkg3)
self.assertEquals(pkgs[0].state, npkg3.state)
def testInRmMerge2(self, xstate='Install'):
self.testInRmMerge1(xstate, 'Obsoleted')
def testInRmInonlyMerge1(self, xstate='True-Install', estate='Erase'):
npkg1 = self._pkg_new('foo', state=xstate)
npkg2 = self._pkg_new('foo', version='2', state=xstate)
npkg3 = self._pkg_new('foo', version='3', state=xstate)
npkg4 = self._pkg_new('foo', state=estate)
npkg5 = self._pkg_new('foo', version='2', state=estate)
npkg6 = self._pkg_new('foo', version='3', state=estate)
npkg9 = self._pkg_new('bar', version='6', state=xstate)
trans = []
trans.append(self._trans_new([npkg1]))
trans.append(self._trans_new([npkg2]))
trans.append(self._trans_new([npkg3]))
trans.append(self._trans_new([npkg4]))
trans.append(self._trans_new([npkg5]))
trans.append(self._trans_new([npkg6]))
trans.append(self._trans_new([npkg9]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 1)
self.assertEquals(pkgs[0], npkg9)
self.assertEquals(pkgs[0].state, npkg9.state)
def testInRmInonlyMerge2(self, xstate='True-Install'):
self.testInRmInonlyMerge1(xstate, 'Obsoleted')
def testUpRmMerge1(self, xstate='Update'):
npkg1 = self._pkg_new('foo')
opkg2 = self._pkg_new('bar', version='4', state='Updated')
npkg2 = self._pkg_new('bar', version='6', state=xstate)
npkg3 = self._pkg_new('bar', version='6', state='Erase')
trans = []
trans.append(self._trans_new([npkg1]))
trans.append(self._trans_new([opkg2, npkg2]))
trans.append(self._trans_new([npkg3]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 2)
self.assertEquals(pkgs[0], opkg2)
self.assertEquals(pkgs[0].state, npkg3.state)
self.assertEquals(pkgs[1], npkg1)
self.assertEquals(pkgs[1].state, npkg1.state)
def testUpRmMerge2(self, xstate='True-Install'):
npkg1 = self._pkg_new('foo')
npkg4 = self._pkg_new('bar', version='4', state=xstate)
opkg2 = self._pkg_new('bar', version='4', state='Updated')
npkg2 = self._pkg_new('bar', version='6', state='Update')
npkg3 = self._pkg_new('bar', version='6', state='Erase')
trans = []
trans.append(self._trans_new([npkg1, npkg4]))
trans.append(self._trans_new([opkg2, npkg2]))
trans.append(self._trans_new([npkg3]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 1)
self.assertEquals(pkgs[0], npkg1)
self.assertEquals(pkgs[0].state, npkg1.state)
def testUpRmMerge3(self, xstate='Update'):
npkg1 = self._pkg_new('foo')
npkg4 = self._pkg_new('bar', version='4', state='Dep-Install')
opkg2 = self._pkg_new('bar', version='4', state='Updated')
npkg2 = self._pkg_new('bar', version='6', state=xstate)
npkg3 = self._pkg_new('bar', version='6', state='Erase')
trans = []
trans.append(self._trans_new([npkg1, npkg4]))
trans.append(self._trans_new([opkg2, npkg2]))
trans.append(self._trans_new([npkg3]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 1)
self.assertEquals(pkgs[0], npkg1)
self.assertEquals(pkgs[0].state, npkg1.state)
def testRmInMerge1(self, xstate='Install', estate='Erase'):
npkg1 = self._pkg_new('foo', state=xstate)
npkg2 = self._pkg_new('foo', state=estate)
npkg3 = self._pkg_new('bar', version='6', state='True-Install')
trans = []
trans.append(self._trans_new([npkg2]))
trans.append(self._trans_new([npkg1]))
trans.append(self._trans_new([npkg3]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 2)
self.assertEquals(pkgs[0], npkg3)
self.assertEquals(pkgs[0].state, npkg3.state)
self.assertEquals(pkgs[1], npkg1)
if xstate == 'Obsoleting':
self.assertEquals(pkgs[1].state, 'Obsoleting')
else:
self.assertEquals(pkgs[1].state, 'Reinstall')
def testRmInMerge2(self, xstate='Install'):
self.testRmInMerge1(xstate, 'Obsoleted')
def testUpRmInlMerge1(self, xstate='Update', ystate='Install',
estate='Erase'):
npkg1 = self._pkg_new('bar', version='6', state='True-Install')
opkg2 = self._pkg_new('foo', version='3', state='Updated')
npkg2 = self._pkg_new('foo', version='7', state=xstate)
npkg3 = self._pkg_new('foo', version='7', state=estate)
npkg4 = self._pkg_new('foo', state=ystate)
trans = []
trans.append(self._trans_new([npkg1]))
trans.append(self._trans_new([opkg2, npkg2]))
trans.append(self._trans_new([npkg3]))
trans.append(self._trans_new([npkg4]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 3)
self.assertEquals(pkgs[0], npkg1)
self.assertEquals(pkgs[0].state, npkg1.state)
self.assertEquals(pkgs[1].pkgtup, npkg4.pkgtup)
if ystate == 'Obsoleting':
self.assertEquals(pkgs[1].state, "Obsoleting")
else:
self.assertEquals(pkgs[1].state, "Downgrade")
self.assertEquals(pkgs[2].pkgtup, opkg2.pkgtup)
self.assertEquals(pkgs[2].state, "Downgraded")
def testUpRmInlMerge2(self, xstate='Update', ystate='Install'):
self.testUpRmInlMerge1(xstate, ystate, 'Obsoleted')
def testUpRmInuMerge1(self, xstate='Update', ystate='Install',
estate='Erase'):
npkg1 = self._pkg_new('bar', version='6', state='True-Install')
opkg2 = self._pkg_new('foo', version='3', state='Updated')
npkg2 = self._pkg_new('foo', version='7', state=xstate)
npkg3 = self._pkg_new('foo', version='7', state=estate)
npkg4 = self._pkg_new('foo', version='4', state=ystate)
trans = []
trans.append(self._trans_new([npkg1]))
trans.append(self._trans_new([opkg2, npkg2]))
trans.append(self._trans_new([npkg3]))
trans.append(self._trans_new([npkg4]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 3)
self.assertEquals(pkgs[0], npkg1)
self.assertEquals(pkgs[0].state, npkg1.state)
self.assertEquals(pkgs[1].pkgtup, opkg2.pkgtup)
self.assertEquals(pkgs[1].state, "Updated")
self.assertEquals(pkgs[2].pkgtup, npkg4.pkgtup)
if ystate == 'Obsoleting':
self.assertEquals(pkgs[2].state, "Obsoleting")
else:
self.assertEquals(pkgs[2].state, "Update")
def testUpRmInuMerge2(self, xstate='Update', ystate='Install'):
self.testUpRmInuMerge1(xstate, ystate, 'Obsoleted')
def testBrokenUpMerge1(self, xstate='Update', estate='Erase'):
# This is "broken", so as long as we don't die it's all good.
# The below test basically documents what we do.
opkg1 = self._pkg_new('foo', version='1', state='Updated')
npkg1 = self._pkg_new('foo', version='2', state=xstate)
opkg2 = self._pkg_new('foo', version='11', state='Updated')
npkg2 = self._pkg_new('foo', version='21', state=xstate)
opkg3 = self._pkg_new('foo', version='110', state='Updated')
npkg3 = self._pkg_new('foo', version='210', state=xstate)
npkg4 = self._pkg_new('foo', version='2', state=estate)
npkg5 = self._pkg_new('foo', version='21', state=estate)
npkg6 = self._pkg_new('foo', version='210', state=estate)
trans = []
trans.append(self._trans_new([opkg1, npkg1]))
trans.append(self._trans_new([opkg2, npkg2]))
trans.append(self._trans_new([opkg3, npkg3]))
trans.append(self._trans_new([npkg4]))
trans.append(self._trans_new([npkg5]))
trans.append(self._trans_new([npkg6]))
merged = self._merge_new(trans)
self.assertMergedMain(merged, trans)
pkgs = merged.trans_data
self.assertEquals(len(pkgs), 3)
self.assertEquals(pkgs[0], opkg1)
self.assertEquals(pkgs[0].state, 'Updated')
self.assertEquals(pkgs[1], opkg2)
self.assertEquals(pkgs[1].state, 'Updated')
self.assertEquals(pkgs[2], opkg3)
self.assertEquals(pkgs[2].state, estate)
# Obsoleting is the _painful_ one because it really should be a state, but
# an attribute. So "Obsoleting" can be any of:
# Install*, Reinstall, Update, Downgrade
def testObsSIM1(self):
self.testSimpleInMerge1(xstate='Obsoleting')
def testObsSIM2(self):
self.testSimpleInMerge2(xstate='Obsoleting')
def testObsSUM1(self):
self.testSimpleUpMerge1(xstate='Obsoleting')
def testObsSUM2(self):
self.testSimpleUpMerge2(xstate='Obsoleting')
def testObsSUM3(self):
self.testSimpleUpMerge3(xstate='Obsoleting')
def testObsSUMM1(self):
self.testSimpleUpMultiMerge1(xstate='Obsoleting')
def testObsUDM1(self):
self.testUpDownMerge1(xstate='Obsoleting')
def testObsUDM2(self):
self.testUpDownMerge2(xstate='Obsoleting')
def testObsUDM4(self):
self.testUpDownMerge4(xstate='Obsoleting')
def testObsUDM5(self):
self.testUpDownMerge5(xstate='Obsoleting')
def testObsDUM1(self):
self.testDownUpMerge1(xstate='Obsoleting')
def testObsDUM2(self):
self.testDownUpMerge2(xstate='Obsoleting')
def testObsDUM4(self):
self.testDownUpMerge4(xstate='Obsoleting')
def testObsDUM5(self):
self.testDownUpMerge5(xstate='Obsoleting')
def testObsIRM1(self):
self.testInRmMerge1(xstate='Obsoleting')
def testObsIRM2(self):
self.testInRmMerge2(xstate='Obsoleting')
def testObsIRMM1(self):
self.testInRmInonlyMerge1(xstate='Obsoleting')
def testObsIRMM2(self):
self.testInRmInonlyMerge1(xstate='Obsoleting')
def testObsURM1(self):
self.testUpRmMerge1(xstate='Obsoleting')
def testObsURM2(self):
self.testUpRmMerge2(xstate='Obsoleting')
def testObsURM3(self):
self.testUpRmMerge3(xstate='Obsoleting')
def testObsRIM1(self):
self.testRmInMerge1(xstate='Obsoleting')
def testObsRIM2(self):
self.testRmInMerge2(xstate='Obsoleting')
def testObsURIlM1(self):
self.testUpRmInlMerge1(xstate='Obsoleting')
self.testUpRmInlMerge1(ystate='Obsoleting')
self.testUpRmInlMerge1(xstate='Obsoleting', ystate='Obsoleting')
def testObsURIlM2(self):
self.testUpRmInlMerge2(xstate='Obsoleting')
self.testUpRmInlMerge2(ystate='Obsoleting')
self.testUpRmInlMerge2(xstate='Obsoleting', ystate='Obsoleting')
def testObsURIuM1(self):
self.testUpRmInuMerge1(xstate='Obsoleting')
self.testUpRmInuMerge1(ystate='Obsoleting')
self.testUpRmInuMerge1(xstate='Obsoleting', ystate='Obsoleting')
def testObsURIuM2(self):
self.testUpRmInuMerge2(xstate='Obsoleting')
self.testUpRmInuMerge2(ystate='Obsoleting')
self.testUpRmInuMerge2(xstate='Obsoleting', ystate='Obsoleting')
| gpl-2.0 |
angelapper/odoo | addons/hr_payroll/wizard/hr_payroll_contribution_register_report.py | 47 | 1128 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import time
from datetime import datetime
from dateutil import relativedelta
from openerp.osv import fields, osv
class payslip_lines_contribution_register(osv.osv_memory):
_name = 'payslip.lines.contribution.register'
_description = 'PaySlip Lines by Contribution Registers'
_columns = {
'date_from': fields.date('Date From', required=True),
'date_to': fields.date('Date To', required=True),
}
_defaults = {
'date_from': lambda *a: time.strftime('%Y-%m-01'),
'date_to': lambda *a: str(datetime.now() + relativedelta.relativedelta(months=+1, day=1, days=-1))[:10],
}
def print_report(self, cr, uid, ids, context=None):
datas = {
'ids': context.get('active_ids', []),
'model': 'hr.contribution.register',
'form': self.read(cr, uid, ids, context=context)[0]
}
return self.pool['report'].get_action(
cr, uid, [], 'hr_payroll.report_contributionregister', data=datas, context=context
)
| agpl-3.0 |
jmchilton/pulsar | pulsar/client/path_mapper.py | 2 | 4246 | import os.path
from .action_mapper import FileActionMapper
from .action_mapper import path_type
from .util import PathHelper
from galaxy.util import in_directory
class PathMapper(object):
""" Ties together a FileActionMapper and remote job configuration returned
by the Pulsar setup method to pre-determine the location of files for staging
on the remote Pulsar server.
This is not useful when rewrite_paths (as has traditionally been done with
the Pulsar) because when doing that the Pulsar determines the paths as files are
uploaded. When rewrite_paths is disabled however, the destination of files
needs to be determined prior to transfer so an object of this class can be
used.
"""
def __init__(
self,
client,
remote_job_config,
local_working_directory,
action_mapper=None,
):
self.local_working_directory = local_working_directory
if not action_mapper:
action_mapper = FileActionMapper(client)
self.action_mapper = action_mapper
self.input_directory = remote_job_config["inputs_directory"]
self.output_directory = remote_job_config["outputs_directory"]
self.working_directory = remote_job_config["working_directory"]
self.unstructured_files_directory = remote_job_config["unstructured_files_directory"]
self.config_directory = remote_job_config["configs_directory"]
separator = remote_job_config["system_properties"]["separator"]
self.path_helper = PathHelper(separator)
def remote_output_path_rewrite(self, local_path):
output_type = path_type.OUTPUT
if in_directory(local_path, self.local_working_directory):
output_type = path_type.OUTPUT_WORKDIR
remote_path = self.__remote_path_rewrite(local_path, output_type)
return remote_path
def remote_input_path_rewrite(self, local_path):
remote_path = self.__remote_path_rewrite(local_path, path_type.INPUT)
return remote_path
def remote_version_path_rewrite(self, local_path):
remote_path = self.__remote_path_rewrite(local_path, path_type.OUTPUT, name="COMMAND_VERSION")
return remote_path
def check_for_arbitrary_rewrite(self, local_path):
path = str(local_path) # Use false_path if needed.
action = self.action_mapper.action(path, path_type.UNSTRUCTURED)
if not action.staging_needed:
return action.path_rewrite(self.path_helper), []
unique_names = action.unstructured_map()
name = unique_names[path]
remote_path = self.path_helper.remote_join(self.unstructured_files_directory, name)
return remote_path, unique_names
def __remote_path_rewrite(self, dataset_path, dataset_path_type, name=None):
""" Return remote path of this file (if staging is required) else None.
"""
path = str(dataset_path) # Use false_path if needed.
action = self.action_mapper.action(path, dataset_path_type)
if action.staging_needed:
if name is None:
name = os.path.basename(path)
remote_directory = self.__remote_directory(dataset_path_type)
remote_path_rewrite = self.path_helper.remote_join(remote_directory, name)
else:
# Actions which don't require staging MUST define a path_rewrite
# method.
remote_path_rewrite = action.path_rewrite(self.path_helper)
return remote_path_rewrite
def __action(self, dataset_path, dataset_path_type):
path = str(dataset_path) # Use false_path if needed.
action = self.action_mapper.action(path, dataset_path_type)
return action
def __remote_directory(self, dataset_path_type):
if dataset_path_type in [path_type.OUTPUT]:
return self.output_directory
elif dataset_path_type in [path_type.WORKDIR, path_type.OUTPUT_WORKDIR]:
return self.working_directory
elif dataset_path_type in [path_type.INPUT]:
return self.input_directory
else:
message = "PathMapper cannot handle path type %s" % dataset_path_type
raise Exception(message)
__all__ = ['PathMapper']
| apache-2.0 |
rlutes/volttron-applications | nrel/agents/SunspecInverter/sunspecinverter/agent.py | 2 | 4737 | """
NREL
This module controls a Sunspec Inverter.
"""
# !/usr/local/bin/python
import sys
import logging
import time
from datetime import datetime
import xmltodict
import requests
from OpenSSL import crypto
from helper import *
from DER import DERProgramList,DERControlBase
from TLS import Certificate_Mgmt
from Device import Inverter
from utilities import *
from volttron.platform.agent.utils import jsonapi
from volttron.platform.agent import utils
from volttron.platform.messaging import headers as headers_mod
from volttron.platform.vip.agent import Agent, Core, RPC
from volttron.platform.vip.agent import *
utils.setup_logging()
_log = logging.getLogger(__name__)
__version__ = '4.0'
poll_interval = 600
pub_interval = 600
class SunspecAgent(Agent):
def __init__(self, config_path, **kwargs):
#TODO: cretae guid, lfid and sfid
super(SunspecAgent, self).__init__(**kwargs)
print("Init function called")
config = utils.load_config(config_path)
self.inverter = Inverter(config['device_type'], config['slave_id'], config['name'], config['pathlist'], config['baudrate'], config['parity'], config['ip'], config['port'], config['timeout'] )
self.pin = config['pin']
self.inverter_name = config['Inverter Name']
self.server_ip = config['server_IP']
self.server_port = config['server_port']
self.device_capability_link = config['device_capability_link']
self.poll_interval = 300
self.DERPgm_list = None
self.DER_list = None
self.EndDev = None
self.poll_list = []
def create_cert(self):
Cert = Certificate_Mgmt()
Cert.cakey = Cert.createKeyPair(crypto.TYPE_RSA, 2048)
Cert.careq = Cert.createCertRequest(Cert.cakey, CN='Certificate Authority')
# CA certificate is valid for five years.
Cert.cacert = Cert.createCertificate(Cert.careq, (Cert.careq, Cert.cakey), 0, (0, 60*60*24*365*5))
with open('certificates/CA.pkey', 'w') as capkey:
capkey.write(
crypto.dump_privatekey(crypto.FILETYPE_ASN1, Cert.cakey).decode('utf-8')
)
print('Creating Certificate Authority certificate in "simple/CA.cert"')
with open('certificates/CA.cert', 'w') as ca:
ca.write(
crypto.dump_certificate(crypto.FILETYPE_ASN1, Cert.cacert).decode('utf-8')
)
def initialize_comm(self):
self.DevCap = DeviceCapability(self.device_capability_link)
self.EndDev = EndDevice(self.DevCap.EndDeviceLink, self.inverter)
for fsa in self.EndDev.FSAList.FSAs:
global poll_interval
poll_interval = fsa.DERProgramList.pollRate
break
self.DERControlBase = DERControlBase(self.inverter,{})
self.DER_list = self.EndDev.DERList
print("all classes init")
@Core.receiver('onstart')
def onstart(self, sender, **kwargs):
self.initialize_comm()
@Core.receiver('onstop')
def close_con(self,sender, **kwargs):
if self.inverter is not None:
self.inverter.close()
@Core.periodic(60*10)
def push_updates(self, **kwargs):
if self.DER_list is not None:
self.DER_list.push_updates()
@Core.periodic(poll_interval)
def poll_controls(self, **kwargs):
if self.EndDev is not None:
for fsa in self.EndDev.FSAList.FSAs:
fsa.DERProgramList.poll()
@RPC.export
def get_device_values(self, map):
#Function for interface
attr = map[0]
package_type = map.get(attr)
control_val = {
'DER_Control': self.DERControlBase[attr],
'DER_Availability': self.DER_list.DERAvailability[attr],
'DER_Settings': self.DER_list.DERSettings[attr],
'DER_Status': self.DER_list.DERStatus[attr],
'DER_Capability': self.DER_list.DERCapability[attr]
}
result = {attr: control_val[package_type]()}
if result[attr] == None:
_log.warning("Set value before reading")
return result
@RPC.export
def set_device_values(self, map):
if package_type == 'DER_Control':
self.DERControlBase.set_controls(attr)
else:
_log.info("Not writable")
def dict_control_values(self, map):
control_stat = {}
for k,v in map.items():
control_stat[k] = self.get_device_values({k,v})
return control_stat
def main(argv=sys.argv):
'''Main method called by the eggsecutable.'''
utils.vip_main(SunspecAgent)
if __name__ == '__main__':
# Entry point for script
try:
sys.exit(main())
except KeyboardInterrupt:
pass
| bsd-3-clause |
sneaker-rohit/PI2-ns-3 | src/wave/bindings/callbacks_list.py | 40 | 2502 | callback_classes = [
['void', 'ns3::Ptr<ns3::Packet>', 'ns3::WifiMacHeader const*', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::Packet const>', 'ns3::Address const&', 'unsigned int', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::WifiMacHeader const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Packet>', 'ns3::Mac48Address', 'ns3::Mac48Address', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Mac48Address', 'unsigned char', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Mac48Address', 'unsigned char', 'bool', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'unsigned int', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::Socket>', 'ns3::Address const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['bool', 'ns3::Ptr<ns3::Socket>', 'ns3::Address const&', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<ns3::Packet const>', 'unsigned short', 'ns3::Address const&', 'ns3::Address const&', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
['unsigned char', 'ns3::Ptr<ns3::QueueItem>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'],
]
| gpl-2.0 |
softtyphoon/tz | tools/zl/11/query_proc.py | 21 | 2217 |
import urllib
import urllib2
import cookielib
import time
import StringIO
import gzip
import sys
import re
import time
import os
import copy
import zlib
import random
import urlparse
from patent_query import patent_query
class quey_proc():
def __init__(self, input_file='setting.csv', output_file=u'结果.csv'):
self.ifnp = input_file
self.ifn = ''
self.ofnp = output_file
self.sfnpath = 'ea2c86'
self.sfn= ''
def del_sf(self):
if os.path.exists(self.sfnpath):
os.remove(self.sfnpath)
def clr_sf(self):
with open(self.sfnpath, 'w+') as self.sfn:
self.sfn.write('')
def get_corp(self):
bp = []
if os.path.exists(self.sfnpath):
self.sfn = open(self.sfnpath, 'r')
setting = self.sfn.readline().decode('gbk').split(',')
if len(setting) == 3:
print u'发现上次爬取断点:', setting[2]
print u'是否从上次断点处继续爬取?Y是,N从头开始爬取'
an = raw_input().lower()
if an == 'y':
bp = setting
self.sfn.close()
# with open(self.ifnp, 'r') as self.ifn:
# corps = self.ifn.readline()
self.ifn = open(self.ifnp, 'r')
if bp == []:
st = True
else:
st = False
while True:
i = self.ifn.readline().strip().decode('gbk').split(',')
if len(i) < 3:
break
if i == bp:
st = True
if st:
self.sfn = open(self.sfnpath, 'w+')
with open(self.sfnpath, 'w+') as self.sfn:
self.sfn.write((u','.join(i)).encode('gbk'))
yield i
# def test():
# for i in range(1, 10):
# time.sleep(5)
# yield i
if __name__ == "__main__":
a = quey_proc()
b = patent_query()
for i in a.get_corp():
# print i
# print u'开始处理:', i[0], i[1], i[2]
result = b.run(i[2], i[0], i[1])
b.save2file(result)
time.sleep(random.uniform(1.5, 2.5))
a.clr_sf()
a.del_sf()
| gpl-2.0 |
microelly2/cadquery-freecad-module | CadQuery/Libs/future/backports/urllib/request.py | 78 | 96184 | """
Ported using Python-Future from the Python 3.3 standard library.
An extensible library for opening URLs using a variety of protocols
The simplest way to use this module is to call the urlopen function,
which accepts a string containing a URL or a Request object (described
below). It opens the URL and returns the results as file-like
object; the returned object has some extra methods described below.
The OpenerDirector manages a collection of Handler objects that do
all the actual work. Each Handler implements a particular protocol or
option. The OpenerDirector is a composite object that invokes the
Handlers needed to open the requested URL. For example, the
HTTPHandler performs HTTP GET and POST requests and deals with
non-error returns. The HTTPRedirectHandler automatically deals with
HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
deals with digest authentication.
urlopen(url, data=None) -- Basic usage is the same as original
urllib. pass the url and optionally data to post to an HTTP URL, and
get a file-like object back. One difference is that you can also pass
a Request instance instead of URL. Raises a URLError (subclass of
IOError); for HTTP errors, raises an HTTPError, which can also be
treated as a valid response.
build_opener -- Function that creates a new OpenerDirector instance.
Will install the default handlers. Accepts one or more Handlers as
arguments, either instances or Handler classes that it will
instantiate. If one of the argument is a subclass of the default
handler, the argument will be installed instead of the default.
install_opener -- Installs a new opener as the default opener.
objects of interest:
OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages
the Handler classes, while dealing with requests and responses.
Request -- An object that encapsulates the state of a request. The
state can be as simple as the URL. It can also include extra HTTP
headers, e.g. a User-Agent.
BaseHandler --
internals:
BaseHandler and parent
_call_chain conventions
Example usage:
import urllib.request
# set up authentication info
authinfo = urllib.request.HTTPBasicAuthHandler()
authinfo.add_password(realm='PDQ Application',
uri='https://mahler:8092/site-updates.py',
user='klem',
passwd='geheim$parole')
proxy_support = urllib.request.ProxyHandler({"http" : "http://ahad-haam:3128"})
# build a new opener that adds authentication and caching FTP handlers
opener = urllib.request.build_opener(proxy_support, authinfo,
urllib.request.CacheFTPHandler)
# install it
urllib.request.install_opener(opener)
f = urllib.request.urlopen('http://www.python.org/')
"""
# XXX issues:
# If an authentication error handler that tries to perform
# authentication for some reason but fails, how should the error be
# signalled? The client needs to know the HTTP error code. But if
# the handler knows that the problem was, e.g., that it didn't know
# that hash algo that requested in the challenge, it would be good to
# pass that information along to the client, too.
# ftp errors aren't handled cleanly
# check digest against correct (i.e. non-apache) implementation
# Possible extensions:
# complex proxies XXX not sure what exactly was meant by this
# abstract factory for opener
from __future__ import absolute_import, division, print_function, unicode_literals
from future.builtins import bytes, dict, filter, input, int, map, open, str
from future.utils import PY2, PY3, raise_with_traceback
import base64
import bisect
import hashlib
import array
from future.backports import email
from future.backports.http import client as http_client
from .error import URLError, HTTPError, ContentTooShortError
from .parse import (
urlparse, urlsplit, urljoin, unwrap, quote, unquote,
splittype, splithost, splitport, splituser, splitpasswd,
splitattr, splitquery, splitvalue, splittag, to_bytes, urlunparse)
from .response import addinfourl, addclosehook
import io
import os
import posixpath
import re
import socket
import sys
import time
import collections
import tempfile
import contextlib
import warnings
# check for SSL
try:
import ssl
# Not available in the SSL module in Py2:
from ssl import SSLContext
except ImportError:
_have_ssl = False
else:
_have_ssl = True
__all__ = [
# Classes
'Request', 'OpenerDirector', 'BaseHandler', 'HTTPDefaultErrorHandler',
'HTTPRedirectHandler', 'HTTPCookieProcessor', 'ProxyHandler',
'HTTPPasswordMgr', 'HTTPPasswordMgrWithDefaultRealm',
'AbstractBasicAuthHandler', 'HTTPBasicAuthHandler', 'ProxyBasicAuthHandler',
'AbstractDigestAuthHandler', 'HTTPDigestAuthHandler', 'ProxyDigestAuthHandler',
'HTTPHandler', 'FileHandler', 'FTPHandler', 'CacheFTPHandler',
'UnknownHandler', 'HTTPErrorProcessor',
# Functions
'urlopen', 'install_opener', 'build_opener',
'pathname2url', 'url2pathname', 'getproxies',
# Legacy interface
'urlretrieve', 'urlcleanup', 'URLopener', 'FancyURLopener',
]
# used in User-Agent header sent
__version__ = sys.version[:3]
_opener = None
def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **_3to2kwargs):
if 'cadefault' in _3to2kwargs: cadefault = _3to2kwargs['cadefault']; del _3to2kwargs['cadefault']
else: cadefault = False
if 'capath' in _3to2kwargs: capath = _3to2kwargs['capath']; del _3to2kwargs['capath']
else: capath = None
if 'cafile' in _3to2kwargs: cafile = _3to2kwargs['cafile']; del _3to2kwargs['cafile']
else: cafile = None
global _opener
if cafile or capath or cadefault:
if not _have_ssl:
raise ValueError('SSL support not available')
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.options |= ssl.OP_NO_SSLv2
context.verify_mode = ssl.CERT_REQUIRED
if cafile or capath:
context.load_verify_locations(cafile, capath)
else:
context.set_default_verify_paths()
https_handler = HTTPSHandler(context=context, check_hostname=True)
opener = build_opener(https_handler)
elif _opener is None:
_opener = opener = build_opener()
else:
opener = _opener
return opener.open(url, data, timeout)
def install_opener(opener):
global _opener
_opener = opener
_url_tempfiles = []
def urlretrieve(url, filename=None, reporthook=None, data=None):
"""
Retrieve a URL into a temporary location on disk.
Requires a URL argument. If a filename is passed, it is used as
the temporary file location. The reporthook argument should be
a callable that accepts a block number, a read size, and the
total file size of the URL target. The data argument should be
valid URL encoded data.
If a filename is passed and the URL points to a local resource,
the result is a copy from local file to new file.
Returns a tuple containing the path to the newly created
data file as well as the resulting HTTPMessage object.
"""
url_type, path = splittype(url)
with contextlib.closing(urlopen(url, data)) as fp:
headers = fp.info()
# Just return the local path and the "headers" for file://
# URLs. No sense in performing a copy unless requested.
if url_type == "file" and not filename:
return os.path.normpath(path), headers
# Handle temporary file setup.
if filename:
tfp = open(filename, 'wb')
else:
tfp = tempfile.NamedTemporaryFile(delete=False)
filename = tfp.name
_url_tempfiles.append(filename)
with tfp:
result = filename, headers
bs = 1024*8
size = -1
read = 0
blocknum = 0
if "content-length" in headers:
size = int(headers["Content-Length"])
if reporthook:
reporthook(blocknum, bs, size)
while True:
block = fp.read(bs)
if not block:
break
read += len(block)
tfp.write(block)
blocknum += 1
if reporthook:
reporthook(blocknum, bs, size)
if size >= 0 and read < size:
raise ContentTooShortError(
"retrieval incomplete: got only %i out of %i bytes"
% (read, size), result)
return result
def urlcleanup():
for temp_file in _url_tempfiles:
try:
os.unlink(temp_file)
except EnvironmentError:
pass
del _url_tempfiles[:]
global _opener
if _opener:
_opener = None
if PY3:
_cut_port_re = re.compile(r":\d+$", re.ASCII)
else:
_cut_port_re = re.compile(r":\d+$")
def request_host(request):
"""Return request-host, as defined by RFC 2965.
Variation from RFC: returned value is lowercased, for convenient
comparison.
"""
url = request.full_url
host = urlparse(url)[1]
if host == "":
host = request.get_header("Host", "")
# remove port, if present
host = _cut_port_re.sub("", host, 1)
return host.lower()
class Request(object):
def __init__(self, url, data=None, headers={},
origin_req_host=None, unverifiable=False,
method=None):
# unwrap('<URL:type://host/path>') --> 'type://host/path'
self.full_url = unwrap(url)
self.full_url, self.fragment = splittag(self.full_url)
self.data = data
self.headers = {}
self._tunnel_host = None
for key, value in headers.items():
self.add_header(key, value)
self.unredirected_hdrs = {}
if origin_req_host is None:
origin_req_host = request_host(self)
self.origin_req_host = origin_req_host
self.unverifiable = unverifiable
self.method = method
self._parse()
def _parse(self):
self.type, rest = splittype(self.full_url)
if self.type is None:
raise ValueError("unknown url type: %r" % self.full_url)
self.host, self.selector = splithost(rest)
if self.host:
self.host = unquote(self.host)
def get_method(self):
"""Return a string indicating the HTTP request method."""
if self.method is not None:
return self.method
elif self.data is not None:
return "POST"
else:
return "GET"
def get_full_url(self):
if self.fragment:
return '%s#%s' % (self.full_url, self.fragment)
else:
return self.full_url
# Begin deprecated methods
def add_data(self, data):
msg = "Request.add_data method is deprecated."
warnings.warn(msg, DeprecationWarning, stacklevel=1)
self.data = data
def has_data(self):
msg = "Request.has_data method is deprecated."
warnings.warn(msg, DeprecationWarning, stacklevel=1)
return self.data is not None
def get_data(self):
msg = "Request.get_data method is deprecated."
warnings.warn(msg, DeprecationWarning, stacklevel=1)
return self.data
def get_type(self):
msg = "Request.get_type method is deprecated."
warnings.warn(msg, DeprecationWarning, stacklevel=1)
return self.type
def get_host(self):
msg = "Request.get_host method is deprecated."
warnings.warn(msg, DeprecationWarning, stacklevel=1)
return self.host
def get_selector(self):
msg = "Request.get_selector method is deprecated."
warnings.warn(msg, DeprecationWarning, stacklevel=1)
return self.selector
def is_unverifiable(self):
msg = "Request.is_unverifiable method is deprecated."
warnings.warn(msg, DeprecationWarning, stacklevel=1)
return self.unverifiable
def get_origin_req_host(self):
msg = "Request.get_origin_req_host method is deprecated."
warnings.warn(msg, DeprecationWarning, stacklevel=1)
return self.origin_req_host
# End deprecated methods
def set_proxy(self, host, type):
if self.type == 'https' and not self._tunnel_host:
self._tunnel_host = self.host
else:
self.type= type
self.selector = self.full_url
self.host = host
def has_proxy(self):
return self.selector == self.full_url
def add_header(self, key, val):
# useful for something like authentication
self.headers[key.capitalize()] = val
def add_unredirected_header(self, key, val):
# will not be added to a redirected request
self.unredirected_hdrs[key.capitalize()] = val
def has_header(self, header_name):
return (header_name in self.headers or
header_name in self.unredirected_hdrs)
def get_header(self, header_name, default=None):
return self.headers.get(
header_name,
self.unredirected_hdrs.get(header_name, default))
def header_items(self):
hdrs = self.unredirected_hdrs.copy()
hdrs.update(self.headers)
return list(hdrs.items())
class OpenerDirector(object):
def __init__(self):
client_version = "Python-urllib/%s" % __version__
self.addheaders = [('User-agent', client_version)]
# self.handlers is retained only for backward compatibility
self.handlers = []
# manage the individual handlers
self.handle_open = {}
self.handle_error = {}
self.process_response = {}
self.process_request = {}
def add_handler(self, handler):
if not hasattr(handler, "add_parent"):
raise TypeError("expected BaseHandler instance, got %r" %
type(handler))
added = False
for meth in dir(handler):
if meth in ["redirect_request", "do_open", "proxy_open"]:
# oops, coincidental match
continue
i = meth.find("_")
protocol = meth[:i]
condition = meth[i+1:]
if condition.startswith("error"):
j = condition.find("_") + i + 1
kind = meth[j+1:]
try:
kind = int(kind)
except ValueError:
pass
lookup = self.handle_error.get(protocol, {})
self.handle_error[protocol] = lookup
elif condition == "open":
kind = protocol
lookup = self.handle_open
elif condition == "response":
kind = protocol
lookup = self.process_response
elif condition == "request":
kind = protocol
lookup = self.process_request
else:
continue
handlers = lookup.setdefault(kind, [])
if handlers:
bisect.insort(handlers, handler)
else:
handlers.append(handler)
added = True
if added:
bisect.insort(self.handlers, handler)
handler.add_parent(self)
def close(self):
# Only exists for backwards compatibility.
pass
def _call_chain(self, chain, kind, meth_name, *args):
# Handlers raise an exception if no one else should try to handle
# the request, or return None if they can't but another handler
# could. Otherwise, they return the response.
handlers = chain.get(kind, ())
for handler in handlers:
func = getattr(handler, meth_name)
result = func(*args)
if result is not None:
return result
def open(self, fullurl, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
"""
Accept a URL or a Request object
Python-Future: if the URL is passed as a byte-string, decode it first.
"""
if isinstance(fullurl, bytes):
fullurl = fullurl.decode()
if isinstance(fullurl, str):
req = Request(fullurl, data)
else:
req = fullurl
if data is not None:
req.data = data
req.timeout = timeout
protocol = req.type
# pre-process request
meth_name = protocol+"_request"
for processor in self.process_request.get(protocol, []):
meth = getattr(processor, meth_name)
req = meth(req)
response = self._open(req, data)
# post-process response
meth_name = protocol+"_response"
for processor in self.process_response.get(protocol, []):
meth = getattr(processor, meth_name)
response = meth(req, response)
return response
def _open(self, req, data=None):
result = self._call_chain(self.handle_open, 'default',
'default_open', req)
if result:
return result
protocol = req.type
result = self._call_chain(self.handle_open, protocol, protocol +
'_open', req)
if result:
return result
return self._call_chain(self.handle_open, 'unknown',
'unknown_open', req)
def error(self, proto, *args):
if proto in ('http', 'https'):
# XXX http[s] protocols are special-cased
dict = self.handle_error['http'] # https is not different than http
proto = args[2] # YUCK!
meth_name = 'http_error_%s' % proto
http_err = 1
orig_args = args
else:
dict = self.handle_error
meth_name = proto + '_error'
http_err = 0
args = (dict, proto, meth_name) + args
result = self._call_chain(*args)
if result:
return result
if http_err:
args = (dict, 'default', 'http_error_default') + orig_args
return self._call_chain(*args)
# XXX probably also want an abstract factory that knows when it makes
# sense to skip a superclass in favor of a subclass and when it might
# make sense to include both
def build_opener(*handlers):
"""Create an opener object from a list of handlers.
The opener will use several default handlers, including support
for HTTP, FTP and when applicable HTTPS.
If any of the handlers passed as arguments are subclasses of the
default handlers, the default handlers will not be used.
"""
def isclass(obj):
return isinstance(obj, type) or hasattr(obj, "__bases__")
opener = OpenerDirector()
default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
HTTPDefaultErrorHandler, HTTPRedirectHandler,
FTPHandler, FileHandler, HTTPErrorProcessor]
if hasattr(http_client, "HTTPSConnection"):
default_classes.append(HTTPSHandler)
skip = set()
for klass in default_classes:
for check in handlers:
if isclass(check):
if issubclass(check, klass):
skip.add(klass)
elif isinstance(check, klass):
skip.add(klass)
for klass in skip:
default_classes.remove(klass)
for klass in default_classes:
opener.add_handler(klass())
for h in handlers:
if isclass(h):
h = h()
opener.add_handler(h)
return opener
class BaseHandler(object):
handler_order = 500
def add_parent(self, parent):
self.parent = parent
def close(self):
# Only exists for backwards compatibility
pass
def __lt__(self, other):
if not hasattr(other, "handler_order"):
# Try to preserve the old behavior of having custom classes
# inserted after default ones (works only for custom user
# classes which are not aware of handler_order).
return True
return self.handler_order < other.handler_order
class HTTPErrorProcessor(BaseHandler):
"""Process HTTP error responses."""
handler_order = 1000 # after all other processing
def http_response(self, request, response):
code, msg, hdrs = response.code, response.msg, response.info()
# According to RFC 2616, "2xx" code indicates that the client's
# request was successfully received, understood, and accepted.
if not (200 <= code < 300):
response = self.parent.error(
'http', request, response, code, msg, hdrs)
return response
https_response = http_response
class HTTPDefaultErrorHandler(BaseHandler):
def http_error_default(self, req, fp, code, msg, hdrs):
raise HTTPError(req.full_url, code, msg, hdrs, fp)
class HTTPRedirectHandler(BaseHandler):
# maximum number of redirections to any single URL
# this is needed because of the state that cookies introduce
max_repeats = 4
# maximum total number of redirections (regardless of URL) before
# assuming we're in a loop
max_redirections = 10
def redirect_request(self, req, fp, code, msg, headers, newurl):
"""Return a Request or None in response to a redirect.
This is called by the http_error_30x methods when a
redirection response is received. If a redirection should
take place, return a new Request to allow http_error_30x to
perform the redirect. Otherwise, raise HTTPError if no-one
else should try to handle this url. Return None if you can't
but another Handler might.
"""
m = req.get_method()
if (not (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
or code in (301, 302, 303) and m == "POST")):
raise HTTPError(req.full_url, code, msg, headers, fp)
# Strictly (according to RFC 2616), 301 or 302 in response to
# a POST MUST NOT cause a redirection without confirmation
# from the user (of urllib.request, in this case). In practice,
# essentially all clients do redirect in this case, so we do
# the same.
# be conciliant with URIs containing a space
newurl = newurl.replace(' ', '%20')
CONTENT_HEADERS = ("content-length", "content-type")
newheaders = dict((k, v) for k, v in req.headers.items()
if k.lower() not in CONTENT_HEADERS)
return Request(newurl,
headers=newheaders,
origin_req_host=req.origin_req_host,
unverifiable=True)
# Implementation note: To avoid the server sending us into an
# infinite loop, the request object needs to track what URLs we
# have already seen. Do this by adding a handler-specific
# attribute to the Request object.
def http_error_302(self, req, fp, code, msg, headers):
# Some servers (incorrectly) return multiple Location headers
# (so probably same goes for URI). Use first header.
if "location" in headers:
newurl = headers["location"]
elif "uri" in headers:
newurl = headers["uri"]
else:
return
# fix a possible malformed URL
urlparts = urlparse(newurl)
# For security reasons we don't allow redirection to anything other
# than http, https or ftp.
if urlparts.scheme not in ('http', 'https', 'ftp', ''):
raise HTTPError(
newurl, code,
"%s - Redirection to url '%s' is not allowed" % (msg, newurl),
headers, fp)
if not urlparts.path:
urlparts = list(urlparts)
urlparts[2] = "/"
newurl = urlunparse(urlparts)
newurl = urljoin(req.full_url, newurl)
# XXX Probably want to forget about the state of the current
# request, although that might interact poorly with other
# handlers that also use handler-specific request attributes
new = self.redirect_request(req, fp, code, msg, headers, newurl)
if new is None:
return
# loop detection
# .redirect_dict has a key url if url was previously visited.
if hasattr(req, 'redirect_dict'):
visited = new.redirect_dict = req.redirect_dict
if (visited.get(newurl, 0) >= self.max_repeats or
len(visited) >= self.max_redirections):
raise HTTPError(req.full_url, code,
self.inf_msg + msg, headers, fp)
else:
visited = new.redirect_dict = req.redirect_dict = {}
visited[newurl] = visited.get(newurl, 0) + 1
# Don't close the fp until we are sure that we won't use it
# with HTTPError.
fp.read()
fp.close()
return self.parent.open(new, timeout=req.timeout)
http_error_301 = http_error_303 = http_error_307 = http_error_302
inf_msg = "The HTTP server returned a redirect error that would " \
"lead to an infinite loop.\n" \
"The last 30x error message was:\n"
def _parse_proxy(proxy):
"""Return (scheme, user, password, host/port) given a URL or an authority.
If a URL is supplied, it must have an authority (host:port) component.
According to RFC 3986, having an authority component means the URL must
have two slashes after the scheme:
>>> _parse_proxy('file:/ftp.example.com/')
Traceback (most recent call last):
ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
The first three items of the returned tuple may be None.
Examples of authority parsing:
>>> _parse_proxy('proxy.example.com')
(None, None, None, 'proxy.example.com')
>>> _parse_proxy('proxy.example.com:3128')
(None, None, None, 'proxy.example.com:3128')
The authority component may optionally include userinfo (assumed to be
username:password):
>>> _parse_proxy('joe:password@proxy.example.com')
(None, 'joe', 'password', 'proxy.example.com')
>>> _parse_proxy('joe:password@proxy.example.com:3128')
(None, 'joe', 'password', 'proxy.example.com:3128')
Same examples, but with URLs instead:
>>> _parse_proxy('http://proxy.example.com/')
('http', None, None, 'proxy.example.com')
>>> _parse_proxy('http://proxy.example.com:3128/')
('http', None, None, 'proxy.example.com:3128')
>>> _parse_proxy('http://joe:password@proxy.example.com/')
('http', 'joe', 'password', 'proxy.example.com')
>>> _parse_proxy('http://joe:password@proxy.example.com:3128')
('http', 'joe', 'password', 'proxy.example.com:3128')
Everything after the authority is ignored:
>>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128')
('ftp', 'joe', 'password', 'proxy.example.com')
Test for no trailing '/' case:
>>> _parse_proxy('http://joe:password@proxy.example.com')
('http', 'joe', 'password', 'proxy.example.com')
"""
scheme, r_scheme = splittype(proxy)
if not r_scheme.startswith("/"):
# authority
scheme = None
authority = proxy
else:
# URL
if not r_scheme.startswith("//"):
raise ValueError("proxy URL with no authority: %r" % proxy)
# We have an authority, so for RFC 3986-compliant URLs (by ss 3.
# and 3.3.), path is empty or starts with '/'
end = r_scheme.find("/", 2)
if end == -1:
end = None
authority = r_scheme[2:end]
userinfo, hostport = splituser(authority)
if userinfo is not None:
user, password = splitpasswd(userinfo)
else:
user = password = None
return scheme, user, password, hostport
class ProxyHandler(BaseHandler):
# Proxies must be in front
handler_order = 100
def __init__(self, proxies=None):
if proxies is None:
proxies = getproxies()
assert hasattr(proxies, 'keys'), "proxies must be a mapping"
self.proxies = proxies
for type, url in proxies.items():
setattr(self, '%s_open' % type,
lambda r, proxy=url, type=type, meth=self.proxy_open:
meth(r, proxy, type))
def proxy_open(self, req, proxy, type):
orig_type = req.type
proxy_type, user, password, hostport = _parse_proxy(proxy)
if proxy_type is None:
proxy_type = orig_type
if req.host and proxy_bypass(req.host):
return None
if user and password:
user_pass = '%s:%s' % (unquote(user),
unquote(password))
creds = base64.b64encode(user_pass.encode()).decode("ascii")
req.add_header('Proxy-authorization', 'Basic ' + creds)
hostport = unquote(hostport)
req.set_proxy(hostport, proxy_type)
if orig_type == proxy_type or orig_type == 'https':
# let other handlers take care of it
return None
else:
# need to start over, because the other handlers don't
# grok the proxy's URL type
# e.g. if we have a constructor arg proxies like so:
# {'http': 'ftp://proxy.example.com'}, we may end up turning
# a request for http://acme.example.com/a into one for
# ftp://proxy.example.com/a
return self.parent.open(req, timeout=req.timeout)
class HTTPPasswordMgr(object):
def __init__(self):
self.passwd = {}
def add_password(self, realm, uri, user, passwd):
# uri could be a single URI or a sequence
if isinstance(uri, str):
uri = [uri]
if realm not in self.passwd:
self.passwd[realm] = {}
for default_port in True, False:
reduced_uri = tuple(
[self.reduce_uri(u, default_port) for u in uri])
self.passwd[realm][reduced_uri] = (user, passwd)
def find_user_password(self, realm, authuri):
domains = self.passwd.get(realm, {})
for default_port in True, False:
reduced_authuri = self.reduce_uri(authuri, default_port)
for uris, authinfo in domains.items():
for uri in uris:
if self.is_suburi(uri, reduced_authuri):
return authinfo
return None, None
def reduce_uri(self, uri, default_port=True):
"""Accept authority or URI and extract only the authority and path."""
# note HTTP URLs do not have a userinfo component
parts = urlsplit(uri)
if parts[1]:
# URI
scheme = parts[0]
authority = parts[1]
path = parts[2] or '/'
else:
# host or host:port
scheme = None
authority = uri
path = '/'
host, port = splitport(authority)
if default_port and port is None and scheme is not None:
dport = {"http": 80,
"https": 443,
}.get(scheme)
if dport is not None:
authority = "%s:%d" % (host, dport)
return authority, path
def is_suburi(self, base, test):
"""Check if test is below base in a URI tree
Both args must be URIs in reduced form.
"""
if base == test:
return True
if base[0] != test[0]:
return False
common = posixpath.commonprefix((base[1], test[1]))
if len(common) == len(base[1]):
return True
return False
class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
def find_user_password(self, realm, authuri):
user, password = HTTPPasswordMgr.find_user_password(self, realm,
authuri)
if user is not None:
return user, password
return HTTPPasswordMgr.find_user_password(self, None, authuri)
class AbstractBasicAuthHandler(object):
# XXX this allows for multiple auth-schemes, but will stupidly pick
# the last one with a realm specified.
# allow for double- and single-quoted realm values
# (single quotes are a violation of the RFC, but appear in the wild)
rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
'realm=(["\']?)([^"\']*)\\2', re.I)
# XXX could pre-emptively send auth info already accepted (RFC 2617,
# end of section 2, and section 1.2 immediately after "credentials"
# production).
def __init__(self, password_mgr=None):
if password_mgr is None:
password_mgr = HTTPPasswordMgr()
self.passwd = password_mgr
self.add_password = self.passwd.add_password
self.retried = 0
def reset_retry_count(self):
self.retried = 0
def http_error_auth_reqed(self, authreq, host, req, headers):
# host may be an authority (without userinfo) or a URL with an
# authority
# XXX could be multiple headers
authreq = headers.get(authreq, None)
if self.retried > 5:
# retry sending the username:password 5 times before failing.
raise HTTPError(req.get_full_url(), 401, "basic auth failed",
headers, None)
else:
self.retried += 1
if authreq:
scheme = authreq.split()[0]
if scheme.lower() != 'basic':
raise ValueError("AbstractBasicAuthHandler does not"
" support the following scheme: '%s'" %
scheme)
else:
mo = AbstractBasicAuthHandler.rx.search(authreq)
if mo:
scheme, quote, realm = mo.groups()
if quote not in ['"',"'"]:
warnings.warn("Basic Auth Realm was unquoted",
UserWarning, 2)
if scheme.lower() == 'basic':
response = self.retry_http_basic_auth(host, req, realm)
if response and response.code != 401:
self.retried = 0
return response
def retry_http_basic_auth(self, host, req, realm):
user, pw = self.passwd.find_user_password(realm, host)
if pw is not None:
raw = "%s:%s" % (user, pw)
auth = "Basic " + base64.b64encode(raw.encode()).decode("ascii")
if req.headers.get(self.auth_header, None) == auth:
return None
req.add_unredirected_header(self.auth_header, auth)
return self.parent.open(req, timeout=req.timeout)
else:
return None
class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
auth_header = 'Authorization'
def http_error_401(self, req, fp, code, msg, headers):
url = req.full_url
response = self.http_error_auth_reqed('www-authenticate',
url, req, headers)
self.reset_retry_count()
return response
class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
auth_header = 'Proxy-authorization'
def http_error_407(self, req, fp, code, msg, headers):
# http_error_auth_reqed requires that there is no userinfo component in
# authority. Assume there isn't one, since urllib.request does not (and
# should not, RFC 3986 s. 3.2.1) support requests for URLs containing
# userinfo.
authority = req.host
response = self.http_error_auth_reqed('proxy-authenticate',
authority, req, headers)
self.reset_retry_count()
return response
# Return n random bytes.
_randombytes = os.urandom
class AbstractDigestAuthHandler(object):
# Digest authentication is specified in RFC 2617.
# XXX The client does not inspect the Authentication-Info header
# in a successful response.
# XXX It should be possible to test this implementation against
# a mock server that just generates a static set of challenges.
# XXX qop="auth-int" supports is shaky
def __init__(self, passwd=None):
if passwd is None:
passwd = HTTPPasswordMgr()
self.passwd = passwd
self.add_password = self.passwd.add_password
self.retried = 0
self.nonce_count = 0
self.last_nonce = None
def reset_retry_count(self):
self.retried = 0
def http_error_auth_reqed(self, auth_header, host, req, headers):
authreq = headers.get(auth_header, None)
if self.retried > 5:
# Don't fail endlessly - if we failed once, we'll probably
# fail a second time. Hm. Unless the Password Manager is
# prompting for the information. Crap. This isn't great
# but it's better than the current 'repeat until recursion
# depth exceeded' approach <wink>
raise HTTPError(req.full_url, 401, "digest auth failed",
headers, None)
else:
self.retried += 1
if authreq:
scheme = authreq.split()[0]
if scheme.lower() == 'digest':
return self.retry_http_digest_auth(req, authreq)
elif scheme.lower() != 'basic':
raise ValueError("AbstractDigestAuthHandler does not support"
" the following scheme: '%s'" % scheme)
def retry_http_digest_auth(self, req, auth):
token, challenge = auth.split(' ', 1)
chal = parse_keqv_list(filter(None, parse_http_list(challenge)))
auth = self.get_authorization(req, chal)
if auth:
auth_val = 'Digest %s' % auth
if req.headers.get(self.auth_header, None) == auth_val:
return None
req.add_unredirected_header(self.auth_header, auth_val)
resp = self.parent.open(req, timeout=req.timeout)
return resp
def get_cnonce(self, nonce):
# The cnonce-value is an opaque
# quoted string value provided by the client and used by both client
# and server to avoid chosen plaintext attacks, to provide mutual
# authentication, and to provide some message integrity protection.
# This isn't a fabulous effort, but it's probably Good Enough.
s = "%s:%s:%s:" % (self.nonce_count, nonce, time.ctime())
b = s.encode("ascii") + _randombytes(8)
dig = hashlib.sha1(b).hexdigest()
return dig[:16]
def get_authorization(self, req, chal):
try:
realm = chal['realm']
nonce = chal['nonce']
qop = chal.get('qop')
algorithm = chal.get('algorithm', 'MD5')
# mod_digest doesn't send an opaque, even though it isn't
# supposed to be optional
opaque = chal.get('opaque', None)
except KeyError:
return None
H, KD = self.get_algorithm_impls(algorithm)
if H is None:
return None
user, pw = self.passwd.find_user_password(realm, req.full_url)
if user is None:
return None
# XXX not implemented yet
if req.data is not None:
entdig = self.get_entity_digest(req.data, chal)
else:
entdig = None
A1 = "%s:%s:%s" % (user, realm, pw)
A2 = "%s:%s" % (req.get_method(),
# XXX selector: what about proxies and full urls
req.selector)
if qop == 'auth':
if nonce == self.last_nonce:
self.nonce_count += 1
else:
self.nonce_count = 1
self.last_nonce = nonce
ncvalue = '%08x' % self.nonce_count
cnonce = self.get_cnonce(nonce)
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
respdig = KD(H(A1), noncebit)
elif qop is None:
respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
else:
# XXX handle auth-int.
raise URLError("qop '%s' is not supported." % qop)
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
'response="%s"' % (user, realm, nonce, req.selector,
respdig)
if opaque:
base += ', opaque="%s"' % opaque
if entdig:
base += ', digest="%s"' % entdig
base += ', algorithm="%s"' % algorithm
if qop:
base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
return base
def get_algorithm_impls(self, algorithm):
# lambdas assume digest modules are imported at the top level
if algorithm == 'MD5':
H = lambda x: hashlib.md5(x.encode("ascii")).hexdigest()
elif algorithm == 'SHA':
H = lambda x: hashlib.sha1(x.encode("ascii")).hexdigest()
# XXX MD5-sess
KD = lambda s, d: H("%s:%s" % (s, d))
return H, KD
def get_entity_digest(self, data, chal):
# XXX not implemented yet
return None
class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
"""An authentication protocol defined by RFC 2069
Digest authentication improves on basic authentication because it
does not transmit passwords in the clear.
"""
auth_header = 'Authorization'
handler_order = 490 # before Basic auth
def http_error_401(self, req, fp, code, msg, headers):
host = urlparse(req.full_url)[1]
retry = self.http_error_auth_reqed('www-authenticate',
host, req, headers)
self.reset_retry_count()
return retry
class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
auth_header = 'Proxy-Authorization'
handler_order = 490 # before Basic auth
def http_error_407(self, req, fp, code, msg, headers):
host = req.host
retry = self.http_error_auth_reqed('proxy-authenticate',
host, req, headers)
self.reset_retry_count()
return retry
class AbstractHTTPHandler(BaseHandler):
def __init__(self, debuglevel=0):
self._debuglevel = debuglevel
def set_http_debuglevel(self, level):
self._debuglevel = level
def do_request_(self, request):
host = request.host
if not host:
raise URLError('no host given')
if request.data is not None: # POST
data = request.data
if isinstance(data, str):
msg = "POST data should be bytes or an iterable of bytes. " \
"It cannot be of type str."
raise TypeError(msg)
if not request.has_header('Content-type'):
request.add_unredirected_header(
'Content-type',
'application/x-www-form-urlencoded')
if not request.has_header('Content-length'):
size = None
try:
### For Python-Future:
if PY2 and isinstance(data, array.array):
# memoryviews of arrays aren't supported
# in Py2.7. (e.g. memoryview(array.array('I',
# [1, 2, 3, 4])) raises a TypeError.)
# So we calculate the size manually instead:
size = len(data) * data.itemsize
###
else:
mv = memoryview(data)
size = len(mv) * mv.itemsize
except TypeError:
if isinstance(data, collections.Iterable):
raise ValueError("Content-Length should be specified "
"for iterable data of type %r %r" % (type(data),
data))
else:
request.add_unredirected_header(
'Content-length', '%d' % size)
sel_host = host
if request.has_proxy():
scheme, sel = splittype(request.selector)
sel_host, sel_path = splithost(sel)
if not request.has_header('Host'):
request.add_unredirected_header('Host', sel_host)
for name, value in self.parent.addheaders:
name = name.capitalize()
if not request.has_header(name):
request.add_unredirected_header(name, value)
return request
def do_open(self, http_class, req, **http_conn_args):
"""Return an HTTPResponse object for the request, using http_class.
http_class must implement the HTTPConnection API from http.client.
"""
host = req.host
if not host:
raise URLError('no host given')
# will parse host:port
h = http_class(host, timeout=req.timeout, **http_conn_args)
headers = dict(req.unredirected_hdrs)
headers.update(dict((k, v) for k, v in req.headers.items()
if k not in headers))
# TODO(jhylton): Should this be redesigned to handle
# persistent connections?
# We want to make an HTTP/1.1 request, but the addinfourl
# class isn't prepared to deal with a persistent connection.
# It will try to read all remaining data from the socket,
# which will block while the server waits for the next request.
# So make sure the connection gets closed after the (only)
# request.
headers["Connection"] = "close"
headers = dict((name.title(), val) for name, val in headers.items())
if req._tunnel_host:
tunnel_headers = {}
proxy_auth_hdr = "Proxy-Authorization"
if proxy_auth_hdr in headers:
tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
# Proxy-Authorization should not be sent to origin
# server.
del headers[proxy_auth_hdr]
h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
try:
h.request(req.get_method(), req.selector, req.data, headers)
except socket.error as err: # timeout error
h.close()
raise URLError(err)
else:
r = h.getresponse()
# If the server does not send us a 'Connection: close' header,
# HTTPConnection assumes the socket should be left open. Manually
# mark the socket to be closed when this response object goes away.
if h.sock:
h.sock.close()
h.sock = None
r.url = req.get_full_url()
# This line replaces the .msg attribute of the HTTPResponse
# with .headers, because urllib clients expect the response to
# have the reason in .msg. It would be good to mark this
# attribute is deprecated and get then to use info() or
# .headers.
r.msg = r.reason
return r
class HTTPHandler(AbstractHTTPHandler):
def http_open(self, req):
return self.do_open(http_client.HTTPConnection, req)
http_request = AbstractHTTPHandler.do_request_
if hasattr(http_client, 'HTTPSConnection'):
class HTTPSHandler(AbstractHTTPHandler):
def __init__(self, debuglevel=0, context=None, check_hostname=None):
AbstractHTTPHandler.__init__(self, debuglevel)
self._context = context
self._check_hostname = check_hostname
def https_open(self, req):
return self.do_open(http_client.HTTPSConnection, req,
context=self._context, check_hostname=self._check_hostname)
https_request = AbstractHTTPHandler.do_request_
__all__.append('HTTPSHandler')
class HTTPCookieProcessor(BaseHandler):
def __init__(self, cookiejar=None):
import future.backports.http.cookiejar as http_cookiejar
if cookiejar is None:
cookiejar = http_cookiejar.CookieJar()
self.cookiejar = cookiejar
def http_request(self, request):
self.cookiejar.add_cookie_header(request)
return request
def http_response(self, request, response):
self.cookiejar.extract_cookies(response, request)
return response
https_request = http_request
https_response = http_response
class UnknownHandler(BaseHandler):
def unknown_open(self, req):
type = req.type
raise URLError('unknown url type: %s' % type)
def parse_keqv_list(l):
"""Parse list of key=value strings where keys are not duplicated."""
parsed = {}
for elt in l:
k, v = elt.split('=', 1)
if v[0] == '"' and v[-1] == '"':
v = v[1:-1]
parsed[k] = v
return parsed
def parse_http_list(s):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Neither commas nor quotes count if they are escaped.
Only double-quotes count, not single-quotes.
"""
res = []
part = ''
escape = quote = False
for cur in s:
if escape:
part += cur
escape = False
continue
if quote:
if cur == '\\':
escape = True
continue
elif cur == '"':
quote = False
part += cur
continue
if cur == ',':
res.append(part)
part = ''
continue
if cur == '"':
quote = True
part += cur
# append last part
if part:
res.append(part)
return [part.strip() for part in res]
class FileHandler(BaseHandler):
# Use local file or FTP depending on form of URL
def file_open(self, req):
url = req.selector
if url[:2] == '//' and url[2:3] != '/' and (req.host and
req.host != 'localhost'):
if not req.host is self.get_names():
raise URLError("file:// scheme is supported only on localhost")
else:
return self.open_local_file(req)
# names for the localhost
names = None
def get_names(self):
if FileHandler.names is None:
try:
FileHandler.names = tuple(
socket.gethostbyname_ex('localhost')[2] +
socket.gethostbyname_ex(socket.gethostname())[2])
except socket.gaierror:
FileHandler.names = (socket.gethostbyname('localhost'),)
return FileHandler.names
# not entirely sure what the rules are here
def open_local_file(self, req):
import future.backports.email.utils as email_utils
import mimetypes
host = req.host
filename = req.selector
localfile = url2pathname(filename)
try:
stats = os.stat(localfile)
size = stats.st_size
modified = email_utils.formatdate(stats.st_mtime, usegmt=True)
mtype = mimetypes.guess_type(filename)[0]
headers = email.message_from_string(
'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
(mtype or 'text/plain', size, modified))
if host:
host, port = splitport(host)
if not host or \
(not port and _safe_gethostbyname(host) in self.get_names()):
if host:
origurl = 'file://' + host + filename
else:
origurl = 'file://' + filename
return addinfourl(open(localfile, 'rb'), headers, origurl)
except OSError as exp:
# users shouldn't expect OSErrors coming from urlopen()
raise URLError(exp)
raise URLError('file not on local host')
def _safe_gethostbyname(host):
try:
return socket.gethostbyname(host)
except socket.gaierror:
return None
class FTPHandler(BaseHandler):
def ftp_open(self, req):
import ftplib
import mimetypes
host = req.host
if not host:
raise URLError('ftp error: no host given')
host, port = splitport(host)
if port is None:
port = ftplib.FTP_PORT
else:
port = int(port)
# username/password handling
user, host = splituser(host)
if user:
user, passwd = splitpasswd(user)
else:
passwd = None
host = unquote(host)
user = user or ''
passwd = passwd or ''
try:
host = socket.gethostbyname(host)
except socket.error as msg:
raise URLError(msg)
path, attrs = splitattr(req.selector)
dirs = path.split('/')
dirs = list(map(unquote, dirs))
dirs, file = dirs[:-1], dirs[-1]
if dirs and not dirs[0]:
dirs = dirs[1:]
try:
fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout)
type = file and 'I' or 'D'
for attr in attrs:
attr, value = splitvalue(attr)
if attr.lower() == 'type' and \
value in ('a', 'A', 'i', 'I', 'd', 'D'):
type = value.upper()
fp, retrlen = fw.retrfile(file, type)
headers = ""
mtype = mimetypes.guess_type(req.full_url)[0]
if mtype:
headers += "Content-type: %s\n" % mtype
if retrlen is not None and retrlen >= 0:
headers += "Content-length: %d\n" % retrlen
headers = email.message_from_string(headers)
return addinfourl(fp, headers, req.full_url)
except ftplib.all_errors as exp:
exc = URLError('ftp error: %r' % exp)
raise_with_traceback(exc)
def connect_ftp(self, user, passwd, host, port, dirs, timeout):
return ftpwrapper(user, passwd, host, port, dirs, timeout,
persistent=False)
class CacheFTPHandler(FTPHandler):
# XXX would be nice to have pluggable cache strategies
# XXX this stuff is definitely not thread safe
def __init__(self):
self.cache = {}
self.timeout = {}
self.soonest = 0
self.delay = 60
self.max_conns = 16
def setTimeout(self, t):
self.delay = t
def setMaxConns(self, m):
self.max_conns = m
def connect_ftp(self, user, passwd, host, port, dirs, timeout):
key = user, host, port, '/'.join(dirs), timeout
if key in self.cache:
self.timeout[key] = time.time() + self.delay
else:
self.cache[key] = ftpwrapper(user, passwd, host, port,
dirs, timeout)
self.timeout[key] = time.time() + self.delay
self.check_cache()
return self.cache[key]
def check_cache(self):
# first check for old ones
t = time.time()
if self.soonest <= t:
for k, v in list(self.timeout.items()):
if v < t:
self.cache[k].close()
del self.cache[k]
del self.timeout[k]
self.soonest = min(list(self.timeout.values()))
# then check the size
if len(self.cache) == self.max_conns:
for k, v in list(self.timeout.items()):
if v == self.soonest:
del self.cache[k]
del self.timeout[k]
break
self.soonest = min(list(self.timeout.values()))
def clear_cache(self):
for conn in self.cache.values():
conn.close()
self.cache.clear()
self.timeout.clear()
# Code move from the old urllib module
MAXFTPCACHE = 10 # Trim the ftp cache beyond this size
# Helper for non-unix systems
if os.name == 'nt':
from nturl2path import url2pathname, pathname2url
else:
def url2pathname(pathname):
"""OS-specific conversion from a relative URL of the 'file' scheme
to a file system path; not recommended for general use."""
return unquote(pathname)
def pathname2url(pathname):
"""OS-specific conversion from a file system path to a relative URL
of the 'file' scheme; not recommended for general use."""
return quote(pathname)
# This really consists of two pieces:
# (1) a class which handles opening of all sorts of URLs
# (plus assorted utilities etc.)
# (2) a set of functions for parsing URLs
# XXX Should these be separated out into different modules?
ftpcache = {}
class URLopener(object):
"""Class to open URLs.
This is a class rather than just a subroutine because we may need
more than one set of global protocol-specific options.
Note -- this is a base class for those who don't want the
automatic handling of errors type 302 (relocated) and 401
(authorization needed)."""
__tempfiles = None
version = "Python-urllib/%s" % __version__
# Constructor
def __init__(self, proxies=None, **x509):
msg = "%(class)s style of invoking requests is deprecated. " \
"Use newer urlopen functions/methods" % {'class': self.__class__.__name__}
warnings.warn(msg, DeprecationWarning, stacklevel=3)
if proxies is None:
proxies = getproxies()
assert hasattr(proxies, 'keys'), "proxies must be a mapping"
self.proxies = proxies
self.key_file = x509.get('key_file')
self.cert_file = x509.get('cert_file')
self.addheaders = [('User-Agent', self.version)]
self.__tempfiles = []
self.__unlink = os.unlink # See cleanup()
self.tempcache = None
# Undocumented feature: if you assign {} to tempcache,
# it is used to cache files retrieved with
# self.retrieve(). This is not enabled by default
# since it does not work for changing documents (and I
# haven't got the logic to check expiration headers
# yet).
self.ftpcache = ftpcache
# Undocumented feature: you can use a different
# ftp cache by assigning to the .ftpcache member;
# in case you want logically independent URL openers
# XXX This is not threadsafe. Bah.
def __del__(self):
self.close()
def close(self):
self.cleanup()
def cleanup(self):
# This code sometimes runs when the rest of this module
# has already been deleted, so it can't use any globals
# or import anything.
if self.__tempfiles:
for file in self.__tempfiles:
try:
self.__unlink(file)
except OSError:
pass
del self.__tempfiles[:]
if self.tempcache:
self.tempcache.clear()
def addheader(self, *args):
"""Add a header to be used by the HTTP interface only
e.g. u.addheader('Accept', 'sound/basic')"""
self.addheaders.append(args)
# External interface
def open(self, fullurl, data=None):
"""Use URLopener().open(file) instead of open(file, 'r')."""
fullurl = unwrap(to_bytes(fullurl))
fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|")
if self.tempcache and fullurl in self.tempcache:
filename, headers = self.tempcache[fullurl]
fp = open(filename, 'rb')
return addinfourl(fp, headers, fullurl)
urltype, url = splittype(fullurl)
if not urltype:
urltype = 'file'
if urltype in self.proxies:
proxy = self.proxies[urltype]
urltype, proxyhost = splittype(proxy)
host, selector = splithost(proxyhost)
url = (host, fullurl) # Signal special case to open_*()
else:
proxy = None
name = 'open_' + urltype
self.type = urltype
name = name.replace('-', '_')
if not hasattr(self, name):
if proxy:
return self.open_unknown_proxy(proxy, fullurl, data)
else:
return self.open_unknown(fullurl, data)
try:
if data is None:
return getattr(self, name)(url)
else:
return getattr(self, name)(url, data)
except HTTPError:
raise
except socket.error as msg:
raise_with_traceback(IOError('socket error', msg))
def open_unknown(self, fullurl, data=None):
"""Overridable interface to open unknown URL type."""
type, url = splittype(fullurl)
raise IOError('url error', 'unknown url type', type)
def open_unknown_proxy(self, proxy, fullurl, data=None):
"""Overridable interface to open unknown URL type."""
type, url = splittype(fullurl)
raise IOError('url error', 'invalid proxy for %s' % type, proxy)
# External interface
def retrieve(self, url, filename=None, reporthook=None, data=None):
"""retrieve(url) returns (filename, headers) for a local object
or (tempfilename, headers) for a remote object."""
url = unwrap(to_bytes(url))
if self.tempcache and url in self.tempcache:
return self.tempcache[url]
type, url1 = splittype(url)
if filename is None and (not type or type == 'file'):
try:
fp = self.open_local_file(url1)
hdrs = fp.info()
fp.close()
return url2pathname(splithost(url1)[1]), hdrs
except IOError as msg:
pass
fp = self.open(url, data)
try:
headers = fp.info()
if filename:
tfp = open(filename, 'wb')
else:
import tempfile
garbage, path = splittype(url)
garbage, path = splithost(path or "")
path, garbage = splitquery(path or "")
path, garbage = splitattr(path or "")
suffix = os.path.splitext(path)[1]
(fd, filename) = tempfile.mkstemp(suffix)
self.__tempfiles.append(filename)
tfp = os.fdopen(fd, 'wb')
try:
result = filename, headers
if self.tempcache is not None:
self.tempcache[url] = result
bs = 1024*8
size = -1
read = 0
blocknum = 0
if "content-length" in headers:
size = int(headers["Content-Length"])
if reporthook:
reporthook(blocknum, bs, size)
while 1:
block = fp.read(bs)
if not block:
break
read += len(block)
tfp.write(block)
blocknum += 1
if reporthook:
reporthook(blocknum, bs, size)
finally:
tfp.close()
finally:
fp.close()
# raise exception if actual size does not match content-length header
if size >= 0 and read < size:
raise ContentTooShortError(
"retrieval incomplete: got only %i out of %i bytes"
% (read, size), result)
return result
# Each method named open_<type> knows how to open that type of URL
def _open_generic_http(self, connection_factory, url, data):
"""Make an HTTP connection using connection_class.
This is an internal method that should be called from
open_http() or open_https().
Arguments:
- connection_factory should take a host name and return an
HTTPConnection instance.
- url is the url to retrieval or a host, relative-path pair.
- data is payload for a POST request or None.
"""
user_passwd = None
proxy_passwd= None
if isinstance(url, str):
host, selector = splithost(url)
if host:
user_passwd, host = splituser(host)
host = unquote(host)
realhost = host
else:
host, selector = url
# check whether the proxy contains authorization information
proxy_passwd, host = splituser(host)
# now we proceed with the url we want to obtain
urltype, rest = splittype(selector)
url = rest
user_passwd = None
if urltype.lower() != 'http':
realhost = None
else:
realhost, rest = splithost(rest)
if realhost:
user_passwd, realhost = splituser(realhost)
if user_passwd:
selector = "%s://%s%s" % (urltype, realhost, rest)
if proxy_bypass(realhost):
host = realhost
if not host: raise IOError('http error', 'no host given')
if proxy_passwd:
proxy_passwd = unquote(proxy_passwd)
proxy_auth = base64.b64encode(proxy_passwd.encode()).decode('ascii')
else:
proxy_auth = None
if user_passwd:
user_passwd = unquote(user_passwd)
auth = base64.b64encode(user_passwd.encode()).decode('ascii')
else:
auth = None
http_conn = connection_factory(host)
headers = {}
if proxy_auth:
headers["Proxy-Authorization"] = "Basic %s" % proxy_auth
if auth:
headers["Authorization"] = "Basic %s" % auth
if realhost:
headers["Host"] = realhost
# Add Connection:close as we don't support persistent connections yet.
# This helps in closing the socket and avoiding ResourceWarning
headers["Connection"] = "close"
for header, value in self.addheaders:
headers[header] = value
if data is not None:
headers["Content-Type"] = "application/x-www-form-urlencoded"
http_conn.request("POST", selector, data, headers)
else:
http_conn.request("GET", selector, headers=headers)
try:
response = http_conn.getresponse()
except http_client.BadStatusLine:
# something went wrong with the HTTP status line
raise URLError("http protocol error: bad status line")
# According to RFC 2616, "2xx" code indicates that the client's
# request was successfully received, understood, and accepted.
if 200 <= response.status < 300:
return addinfourl(response, response.msg, "http:" + url,
response.status)
else:
return self.http_error(
url, response.fp,
response.status, response.reason, response.msg, data)
def open_http(self, url, data=None):
"""Use HTTP protocol."""
return self._open_generic_http(http_client.HTTPConnection, url, data)
def http_error(self, url, fp, errcode, errmsg, headers, data=None):
"""Handle http errors.
Derived class can override this, or provide specific handlers
named http_error_DDD where DDD is the 3-digit error code."""
# First check if there's a specific handler for this error
name = 'http_error_%d' % errcode
if hasattr(self, name):
method = getattr(self, name)
if data is None:
result = method(url, fp, errcode, errmsg, headers)
else:
result = method(url, fp, errcode, errmsg, headers, data)
if result: return result
return self.http_error_default(url, fp, errcode, errmsg, headers)
def http_error_default(self, url, fp, errcode, errmsg, headers):
"""Default error handler: close the connection and raise IOError."""
fp.close()
raise HTTPError(url, errcode, errmsg, headers, None)
if _have_ssl:
def _https_connection(self, host):
return http_client.HTTPSConnection(host,
key_file=self.key_file,
cert_file=self.cert_file)
def open_https(self, url, data=None):
"""Use HTTPS protocol."""
return self._open_generic_http(self._https_connection, url, data)
def open_file(self, url):
"""Use local file or FTP depending on form of URL."""
if not isinstance(url, str):
raise URLError('file error: proxy support for file protocol currently not implemented')
if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/':
raise ValueError("file:// scheme is supported only on localhost")
else:
return self.open_local_file(url)
def open_local_file(self, url):
"""Use local file."""
import future.backports.email.utils as email_utils
import mimetypes
host, file = splithost(url)
localname = url2pathname(file)
try:
stats = os.stat(localname)
except OSError as e:
raise URLError(e.strerror, e.filename)
size = stats.st_size
modified = email_utils.formatdate(stats.st_mtime, usegmt=True)
mtype = mimetypes.guess_type(url)[0]
headers = email.message_from_string(
'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' %
(mtype or 'text/plain', size, modified))
if not host:
urlfile = file
if file[:1] == '/':
urlfile = 'file://' + file
return addinfourl(open(localname, 'rb'), headers, urlfile)
host, port = splitport(host)
if (not port
and socket.gethostbyname(host) in ((localhost(),) + thishost())):
urlfile = file
if file[:1] == '/':
urlfile = 'file://' + file
elif file[:2] == './':
raise ValueError("local file url may start with / or file:. Unknown url of type: %s" % url)
return addinfourl(open(localname, 'rb'), headers, urlfile)
raise URLError('local file error: not on local host')
def open_ftp(self, url):
"""Use FTP protocol."""
if not isinstance(url, str):
raise URLError('ftp error: proxy support for ftp protocol currently not implemented')
import mimetypes
host, path = splithost(url)
if not host: raise URLError('ftp error: no host given')
host, port = splitport(host)
user, host = splituser(host)
if user: user, passwd = splitpasswd(user)
else: passwd = None
host = unquote(host)
user = unquote(user or '')
passwd = unquote(passwd or '')
host = socket.gethostbyname(host)
if not port:
import ftplib
port = ftplib.FTP_PORT
else:
port = int(port)
path, attrs = splitattr(path)
path = unquote(path)
dirs = path.split('/')
dirs, file = dirs[:-1], dirs[-1]
if dirs and not dirs[0]: dirs = dirs[1:]
if dirs and not dirs[0]: dirs[0] = '/'
key = user, host, port, '/'.join(dirs)
# XXX thread unsafe!
if len(self.ftpcache) > MAXFTPCACHE:
# Prune the cache, rather arbitrarily
for k in self.ftpcache.keys():
if k != key:
v = self.ftpcache[k]
del self.ftpcache[k]
v.close()
try:
if key not in self.ftpcache:
self.ftpcache[key] = \
ftpwrapper(user, passwd, host, port, dirs)
if not file: type = 'D'
else: type = 'I'
for attr in attrs:
attr, value = splitvalue(attr)
if attr.lower() == 'type' and \
value in ('a', 'A', 'i', 'I', 'd', 'D'):
type = value.upper()
(fp, retrlen) = self.ftpcache[key].retrfile(file, type)
mtype = mimetypes.guess_type("ftp:" + url)[0]
headers = ""
if mtype:
headers += "Content-Type: %s\n" % mtype
if retrlen is not None and retrlen >= 0:
headers += "Content-Length: %d\n" % retrlen
headers = email.message_from_string(headers)
return addinfourl(fp, headers, "ftp:" + url)
except ftperrors() as exp:
raise_with_traceback(URLError('ftp error %r' % exp))
def open_data(self, url, data=None):
"""Use "data" URL."""
if not isinstance(url, str):
raise URLError('data error: proxy support for data protocol currently not implemented')
# ignore POSTed data
#
# syntax of data URLs:
# dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
# mediatype := [ type "/" subtype ] *( ";" parameter )
# data := *urlchar
# parameter := attribute "=" value
try:
[type, data] = url.split(',', 1)
except ValueError:
raise IOError('data error', 'bad data URL')
if not type:
type = 'text/plain;charset=US-ASCII'
semi = type.rfind(';')
if semi >= 0 and '=' not in type[semi:]:
encoding = type[semi+1:]
type = type[:semi]
else:
encoding = ''
msg = []
msg.append('Date: %s'%time.strftime('%a, %d %b %Y %H:%M:%S GMT',
time.gmtime(time.time())))
msg.append('Content-type: %s' % type)
if encoding == 'base64':
# XXX is this encoding/decoding ok?
data = base64.decodebytes(data.encode('ascii')).decode('latin-1')
else:
data = unquote(data)
msg.append('Content-Length: %d' % len(data))
msg.append('')
msg.append(data)
msg = '\n'.join(msg)
headers = email.message_from_string(msg)
f = io.StringIO(msg)
#f.fileno = None # needed for addinfourl
return addinfourl(f, headers, url)
class FancyURLopener(URLopener):
"""Derived class with handlers for errors we can handle (perhaps)."""
def __init__(self, *args, **kwargs):
URLopener.__init__(self, *args, **kwargs)
self.auth_cache = {}
self.tries = 0
self.maxtries = 10
def http_error_default(self, url, fp, errcode, errmsg, headers):
"""Default error handling -- don't raise an exception."""
return addinfourl(fp, headers, "http:" + url, errcode)
def http_error_302(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 302 -- relocated (temporarily)."""
self.tries += 1
if self.maxtries and self.tries >= self.maxtries:
if hasattr(self, "http_error_500"):
meth = self.http_error_500
else:
meth = self.http_error_default
self.tries = 0
return meth(url, fp, 500,
"Internal Server Error: Redirect Recursion", headers)
result = self.redirect_internal(url, fp, errcode, errmsg, headers,
data)
self.tries = 0
return result
def redirect_internal(self, url, fp, errcode, errmsg, headers, data):
if 'location' in headers:
newurl = headers['location']
elif 'uri' in headers:
newurl = headers['uri']
else:
return
fp.close()
# In case the server sent a relative URL, join with original:
newurl = urljoin(self.type + ":" + url, newurl)
urlparts = urlparse(newurl)
# For security reasons, we don't allow redirection to anything other
# than http, https and ftp.
# We are using newer HTTPError with older redirect_internal method
# This older method will get deprecated in 3.3
if urlparts.scheme not in ('http', 'https', 'ftp', ''):
raise HTTPError(newurl, errcode,
errmsg +
" Redirection to url '%s' is not allowed." % newurl,
headers, fp)
return self.open(newurl)
def http_error_301(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 301 -- also relocated (permanently)."""
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
def http_error_303(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 303 -- also relocated (essentially identical to 302)."""
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
def http_error_307(self, url, fp, errcode, errmsg, headers, data=None):
"""Error 307 -- relocated, but turn POST into error."""
if data is None:
return self.http_error_302(url, fp, errcode, errmsg, headers, data)
else:
return self.http_error_default(url, fp, errcode, errmsg, headers)
def http_error_401(self, url, fp, errcode, errmsg, headers, data=None,
retry=False):
"""Error 401 -- authentication required.
This function supports Basic authentication only."""
if 'www-authenticate' not in headers:
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
stuff = headers['www-authenticate']
match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
if not match:
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
scheme, realm = match.groups()
if scheme.lower() != 'basic':
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
if not retry:
URLopener.http_error_default(self, url, fp, errcode, errmsg,
headers)
name = 'retry_' + self.type + '_basic_auth'
if data is None:
return getattr(self,name)(url, realm)
else:
return getattr(self,name)(url, realm, data)
def http_error_407(self, url, fp, errcode, errmsg, headers, data=None,
retry=False):
"""Error 407 -- proxy authentication required.
This function supports Basic authentication only."""
if 'proxy-authenticate' not in headers:
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
stuff = headers['proxy-authenticate']
match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
if not match:
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
scheme, realm = match.groups()
if scheme.lower() != 'basic':
URLopener.http_error_default(self, url, fp,
errcode, errmsg, headers)
if not retry:
URLopener.http_error_default(self, url, fp, errcode, errmsg,
headers)
name = 'retry_proxy_' + self.type + '_basic_auth'
if data is None:
return getattr(self,name)(url, realm)
else:
return getattr(self,name)(url, realm, data)
def retry_proxy_http_basic_auth(self, url, realm, data=None):
host, selector = splithost(url)
newurl = 'http://' + host + selector
proxy = self.proxies['http']
urltype, proxyhost = splittype(proxy)
proxyhost, proxyselector = splithost(proxyhost)
i = proxyhost.find('@') + 1
proxyhost = proxyhost[i:]
user, passwd = self.get_user_passwd(proxyhost, realm, i)
if not (user or passwd): return None
proxyhost = "%s:%s@%s" % (quote(user, safe=''),
quote(passwd, safe=''), proxyhost)
self.proxies['http'] = 'http://' + proxyhost + proxyselector
if data is None:
return self.open(newurl)
else:
return self.open(newurl, data)
def retry_proxy_https_basic_auth(self, url, realm, data=None):
host, selector = splithost(url)
newurl = 'https://' + host + selector
proxy = self.proxies['https']
urltype, proxyhost = splittype(proxy)
proxyhost, proxyselector = splithost(proxyhost)
i = proxyhost.find('@') + 1
proxyhost = proxyhost[i:]
user, passwd = self.get_user_passwd(proxyhost, realm, i)
if not (user or passwd): return None
proxyhost = "%s:%s@%s" % (quote(user, safe=''),
quote(passwd, safe=''), proxyhost)
self.proxies['https'] = 'https://' + proxyhost + proxyselector
if data is None:
return self.open(newurl)
else:
return self.open(newurl, data)
def retry_http_basic_auth(self, url, realm, data=None):
host, selector = splithost(url)
i = host.find('@') + 1
host = host[i:]
user, passwd = self.get_user_passwd(host, realm, i)
if not (user or passwd): return None
host = "%s:%s@%s" % (quote(user, safe=''),
quote(passwd, safe=''), host)
newurl = 'http://' + host + selector
if data is None:
return self.open(newurl)
else:
return self.open(newurl, data)
def retry_https_basic_auth(self, url, realm, data=None):
host, selector = splithost(url)
i = host.find('@') + 1
host = host[i:]
user, passwd = self.get_user_passwd(host, realm, i)
if not (user or passwd): return None
host = "%s:%s@%s" % (quote(user, safe=''),
quote(passwd, safe=''), host)
newurl = 'https://' + host + selector
if data is None:
return self.open(newurl)
else:
return self.open(newurl, data)
def get_user_passwd(self, host, realm, clear_cache=0):
key = realm + '@' + host.lower()
if key in self.auth_cache:
if clear_cache:
del self.auth_cache[key]
else:
return self.auth_cache[key]
user, passwd = self.prompt_user_passwd(host, realm)
if user or passwd: self.auth_cache[key] = (user, passwd)
return user, passwd
def prompt_user_passwd(self, host, realm):
"""Override this in a GUI environment!"""
import getpass
try:
user = input("Enter username for %s at %s: " % (realm, host))
passwd = getpass.getpass("Enter password for %s in %s at %s: " %
(user, realm, host))
return user, passwd
except KeyboardInterrupt:
print()
return None, None
# Utility functions
_localhost = None
def localhost():
"""Return the IP address of the magic hostname 'localhost'."""
global _localhost
if _localhost is None:
_localhost = socket.gethostbyname('localhost')
return _localhost
_thishost = None
def thishost():
"""Return the IP addresses of the current host."""
global _thishost
if _thishost is None:
try:
_thishost = tuple(socket.gethostbyname_ex(socket.gethostname())[2])
except socket.gaierror:
_thishost = tuple(socket.gethostbyname_ex('localhost')[2])
return _thishost
_ftperrors = None
def ftperrors():
"""Return the set of errors raised by the FTP class."""
global _ftperrors
if _ftperrors is None:
import ftplib
_ftperrors = ftplib.all_errors
return _ftperrors
_noheaders = None
def noheaders():
"""Return an empty email Message object."""
global _noheaders
if _noheaders is None:
_noheaders = email.message_from_string("")
return _noheaders
# Utility classes
class ftpwrapper(object):
"""Class used by open_ftp() for cache of open FTP connections."""
def __init__(self, user, passwd, host, port, dirs, timeout=None,
persistent=True):
self.user = user
self.passwd = passwd
self.host = host
self.port = port
self.dirs = dirs
self.timeout = timeout
self.refcount = 0
self.keepalive = persistent
self.init()
def init(self):
import ftplib
self.busy = 0
self.ftp = ftplib.FTP()
self.ftp.connect(self.host, self.port, self.timeout)
self.ftp.login(self.user, self.passwd)
_target = '/'.join(self.dirs)
self.ftp.cwd(_target)
def retrfile(self, file, type):
import ftplib
self.endtransfer()
if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1
else: cmd = 'TYPE ' + type; isdir = 0
try:
self.ftp.voidcmd(cmd)
except ftplib.all_errors:
self.init()
self.ftp.voidcmd(cmd)
conn = None
if file and not isdir:
# Try to retrieve as a file
try:
cmd = 'RETR ' + file
conn, retrlen = self.ftp.ntransfercmd(cmd)
except ftplib.error_perm as reason:
if str(reason)[:3] != '550':
raise_with_traceback(URLError('ftp error: %r' % reason))
if not conn:
# Set transfer mode to ASCII!
self.ftp.voidcmd('TYPE A')
# Try a directory listing. Verify that directory exists.
if file:
pwd = self.ftp.pwd()
try:
try:
self.ftp.cwd(file)
except ftplib.error_perm as reason:
### Was:
# raise URLError('ftp error: %r' % reason) from reason
exc = URLError('ftp error: %r' % reason)
exc.__cause__ = reason
raise exc
finally:
self.ftp.cwd(pwd)
cmd = 'LIST ' + file
else:
cmd = 'LIST'
conn, retrlen = self.ftp.ntransfercmd(cmd)
self.busy = 1
ftpobj = addclosehook(conn.makefile('rb'), self.file_close)
self.refcount += 1
conn.close()
# Pass back both a suitably decorated object and a retrieval length
return (ftpobj, retrlen)
def endtransfer(self):
self.busy = 0
def close(self):
self.keepalive = False
if self.refcount <= 0:
self.real_close()
def file_close(self):
self.endtransfer()
self.refcount -= 1
if self.refcount <= 0 and not self.keepalive:
self.real_close()
def real_close(self):
self.endtransfer()
try:
self.ftp.close()
except ftperrors():
pass
# Proxy handling
def getproxies_environment():
"""Return a dictionary of scheme -> proxy server URL mappings.
Scan the environment for variables named <scheme>_proxy;
this seems to be the standard convention. If you need a
different way, you can pass a proxies dictionary to the
[Fancy]URLopener constructor.
"""
proxies = {}
for name, value in os.environ.items():
name = name.lower()
if value and name[-6:] == '_proxy':
proxies[name[:-6]] = value
return proxies
def proxy_bypass_environment(host):
"""Test if proxies should not be used for a particular host.
Checks the environment for a variable named no_proxy, which should
be a list of DNS suffixes separated by commas, or '*' for all hosts.
"""
no_proxy = os.environ.get('no_proxy', '') or os.environ.get('NO_PROXY', '')
# '*' is special case for always bypass
if no_proxy == '*':
return 1
# strip port off host
hostonly, port = splitport(host)
# check if the host ends with any of the DNS suffixes
no_proxy_list = [proxy.strip() for proxy in no_proxy.split(',')]
for name in no_proxy_list:
if name and (hostonly.endswith(name) or host.endswith(name)):
return 1
# otherwise, don't bypass
return 0
# This code tests an OSX specific data structure but is testable on all
# platforms
def _proxy_bypass_macosx_sysconf(host, proxy_settings):
"""
Return True iff this host shouldn't be accessed using a proxy
This function uses the MacOSX framework SystemConfiguration
to fetch the proxy information.
proxy_settings come from _scproxy._get_proxy_settings or get mocked ie:
{ 'exclude_simple': bool,
'exceptions': ['foo.bar', '*.bar.com', '127.0.0.1', '10.1', '10.0/16']
}
"""
from fnmatch import fnmatch
hostonly, port = splitport(host)
def ip2num(ipAddr):
parts = ipAddr.split('.')
parts = list(map(int, parts))
if len(parts) != 4:
parts = (parts + [0, 0, 0, 0])[:4]
return (parts[0] << 24) | (parts[1] << 16) | (parts[2] << 8) | parts[3]
# Check for simple host names:
if '.' not in host:
if proxy_settings['exclude_simple']:
return True
hostIP = None
for value in proxy_settings.get('exceptions', ()):
# Items in the list are strings like these: *.local, 169.254/16
if not value: continue
m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value)
if m is not None:
if hostIP is None:
try:
hostIP = socket.gethostbyname(hostonly)
hostIP = ip2num(hostIP)
except socket.error:
continue
base = ip2num(m.group(1))
mask = m.group(2)
if mask is None:
mask = 8 * (m.group(1).count('.') + 1)
else:
mask = int(mask[1:])
mask = 32 - mask
if (hostIP >> mask) == (base >> mask):
return True
elif fnmatch(host, value):
return True
return False
if sys.platform == 'darwin':
from _scproxy import _get_proxy_settings, _get_proxies
def proxy_bypass_macosx_sysconf(host):
proxy_settings = _get_proxy_settings()
return _proxy_bypass_macosx_sysconf(host, proxy_settings)
def getproxies_macosx_sysconf():
"""Return a dictionary of scheme -> proxy server URL mappings.
This function uses the MacOSX framework SystemConfiguration
to fetch the proxy information.
"""
return _get_proxies()
def proxy_bypass(host):
if getproxies_environment():
return proxy_bypass_environment(host)
else:
return proxy_bypass_macosx_sysconf(host)
def getproxies():
return getproxies_environment() or getproxies_macosx_sysconf()
elif os.name == 'nt':
def getproxies_registry():
"""Return a dictionary of scheme -> proxy server URL mappings.
Win32 uses the registry to store proxies.
"""
proxies = {}
try:
import winreg
except ImportError:
# Std module, so should be around - but you never know!
return proxies
try:
internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
proxyEnable = winreg.QueryValueEx(internetSettings,
'ProxyEnable')[0]
if proxyEnable:
# Returned as Unicode but problems if not converted to ASCII
proxyServer = str(winreg.QueryValueEx(internetSettings,
'ProxyServer')[0])
if '=' in proxyServer:
# Per-protocol settings
for p in proxyServer.split(';'):
protocol, address = p.split('=', 1)
# See if address has a type:// prefix
if not re.match('^([^/:]+)://', address):
address = '%s://%s' % (protocol, address)
proxies[protocol] = address
else:
# Use one setting for all protocols
if proxyServer[:5] == 'http:':
proxies['http'] = proxyServer
else:
proxies['http'] = 'http://%s' % proxyServer
proxies['https'] = 'https://%s' % proxyServer
proxies['ftp'] = 'ftp://%s' % proxyServer
internetSettings.Close()
except (WindowsError, ValueError, TypeError):
# Either registry key not found etc, or the value in an
# unexpected format.
# proxies already set up to be empty so nothing to do
pass
return proxies
def getproxies():
"""Return a dictionary of scheme -> proxy server URL mappings.
Returns settings gathered from the environment, if specified,
or the registry.
"""
return getproxies_environment() or getproxies_registry()
def proxy_bypass_registry(host):
try:
import winreg
except ImportError:
# Std modules, so should be around - but you never know!
return 0
try:
internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
proxyEnable = winreg.QueryValueEx(internetSettings,
'ProxyEnable')[0]
proxyOverride = str(winreg.QueryValueEx(internetSettings,
'ProxyOverride')[0])
# ^^^^ Returned as Unicode but problems if not converted to ASCII
except WindowsError:
return 0
if not proxyEnable or not proxyOverride:
return 0
# try to make a host list from name and IP address.
rawHost, port = splitport(host)
host = [rawHost]
try:
addr = socket.gethostbyname(rawHost)
if addr != rawHost:
host.append(addr)
except socket.error:
pass
try:
fqdn = socket.getfqdn(rawHost)
if fqdn != rawHost:
host.append(fqdn)
except socket.error:
pass
# make a check value list from the registry entry: replace the
# '<local>' string by the localhost entry and the corresponding
# canonical entry.
proxyOverride = proxyOverride.split(';')
# now check if we match one of the registry values.
for test in proxyOverride:
if test == '<local>':
if '.' not in rawHost:
return 1
test = test.replace(".", r"\.") # mask dots
test = test.replace("*", r".*") # change glob sequence
test = test.replace("?", r".") # change glob char
for val in host:
if re.match(test, val, re.I):
return 1
return 0
def proxy_bypass(host):
"""Return a dictionary of scheme -> proxy server URL mappings.
Returns settings gathered from the environment, if specified,
or the registry.
"""
if getproxies_environment():
return proxy_bypass_environment(host)
else:
return proxy_bypass_registry(host)
else:
# By default use environment variables
getproxies = getproxies_environment
proxy_bypass = proxy_bypass_environment
| lgpl-3.0 |
brkrishna/freelance | bolly_reviews/process_MZ.py | 1 | 3684 | #-------------------------------------------------------------------------------
# Name: process_MZ
# Purpose:
#
# Author: Ramakrishna
#
# Created: 17/04/2014
# Copyright: (c) Ramakrishna 2014
# Licence: <your licence>
#-------------------------------------------------------------------------------
from bs4 import BeautifulSoup
from datetime import date
import re
import my_caching
def process(source_cd, base_url, data):
try:
record= []
anchors = []
unique_anchors = []
page_content = data.find('div', {'class' : 'hollywood_middle_container border_radius'})
if page_content != None:
review_anchors = page_content.find_all('a', {'rel':'nofollow'})
for review_anchor in review_anchors:
anchors.append(review_anchor['href'])
unique_anchors = set(anchors)
for url in unique_anchors:
row = {}
if url != None:
response = my_caching.get_content(source_cd, url)
if response != None:
soup = BeautifulSoup(response)
page = soup.find('div', {'class':'posts-single'})
if page != None:
title = page.find('h2')
if title != None:
title = title.text.strip()
row['name'] = title
row['source_cd'] = source_cd
row['rvw_link'] = url
row['rvw_smy'] = ''
metadata = page.find('div', {'class':'categories-single'})
if metadata != None:
if metadata != None:
critic = metadata.find('a')
if critic != None:
critic = critic.text.strip()
row['critic'] = critic
year = metadata.text.strip()
if year != None:
year = re.findall(r'\d{4}', year)
if len(year) > 0:
row['year'] = year[0]
review = page.find('div', {'class': 'postdescription-single'})
if review != None:
pColl = review.find_all('p')
if pColl != None:
for p in pColl:
rvw_smy = p.text.strip()
if rvw_smy != '':
row['rvw_smy'] = rvw_smy
pColl = review.find_all('li')
if pColl != None:
for p in pColl:
p = p.text.strip()
if 'Ratings' in p:
rating = p[p.find(':') + 1 : p.find('/')]
if rating != '':
row['rating'] = rating
row['max_rtng'] = 5
break
record.append(row)
return record
except Exception as e:
print(e.__doc__)
print(e.args)
| gpl-2.0 |
jawatech/emacs-24.5 | js/node_modules/node-gyp/gyp/tools/pretty_vcproj.py | 2637 | 9586 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Make the format of a vcproj really pretty.
This script normalize and sort an xml. It also fetches all the properties
inside linked vsprops and include them explicitly in the vcproj.
It outputs the resulting xml to stdout.
"""
__author__ = 'nsylvain (Nicolas Sylvain)'
import os
import sys
from xml.dom.minidom import parse
from xml.dom.minidom import Node
REPLACEMENTS = dict()
ARGUMENTS = None
class CmpTuple(object):
"""Compare function between 2 tuple."""
def __call__(self, x, y):
return cmp(x[0], y[0])
class CmpNode(object):
"""Compare function between 2 xml nodes."""
def __call__(self, x, y):
def get_string(node):
node_string = "node"
node_string += node.nodeName
if node.nodeValue:
node_string += node.nodeValue
if node.attributes:
# We first sort by name, if present.
node_string += node.getAttribute("Name")
all_nodes = []
for (name, value) in node.attributes.items():
all_nodes.append((name, value))
all_nodes.sort(CmpTuple())
for (name, value) in all_nodes:
node_string += name
node_string += value
return node_string
return cmp(get_string(x), get_string(y))
def PrettyPrintNode(node, indent=0):
if node.nodeType == Node.TEXT_NODE:
if node.data.strip():
print '%s%s' % (' '*indent, node.data.strip())
return
if node.childNodes:
node.normalize()
# Get the number of attributes
attr_count = 0
if node.attributes:
attr_count = node.attributes.length
# Print the main tag
if attr_count == 0:
print '%s<%s>' % (' '*indent, node.nodeName)
else:
print '%s<%s' % (' '*indent, node.nodeName)
all_attributes = []
for (name, value) in node.attributes.items():
all_attributes.append((name, value))
all_attributes.sort(CmpTuple())
for (name, value) in all_attributes:
print '%s %s="%s"' % (' '*indent, name, value)
print '%s>' % (' '*indent)
if node.nodeValue:
print '%s %s' % (' '*indent, node.nodeValue)
for sub_node in node.childNodes:
PrettyPrintNode(sub_node, indent=indent+2)
print '%s</%s>' % (' '*indent, node.nodeName)
def FlattenFilter(node):
"""Returns a list of all the node and sub nodes."""
node_list = []
if (node.attributes and
node.getAttribute('Name') == '_excluded_files'):
# We don't add the "_excluded_files" filter.
return []
for current in node.childNodes:
if current.nodeName == 'Filter':
node_list.extend(FlattenFilter(current))
else:
node_list.append(current)
return node_list
def FixFilenames(filenames, current_directory):
new_list = []
for filename in filenames:
if filename:
for key in REPLACEMENTS:
filename = filename.replace(key, REPLACEMENTS[key])
os.chdir(current_directory)
filename = filename.strip('"\' ')
if filename.startswith('$'):
new_list.append(filename)
else:
new_list.append(os.path.abspath(filename))
return new_list
def AbsoluteNode(node):
"""Makes all the properties we know about in this node absolute."""
if node.attributes:
for (name, value) in node.attributes.items():
if name in ['InheritedPropertySheets', 'RelativePath',
'AdditionalIncludeDirectories',
'IntermediateDirectory', 'OutputDirectory',
'AdditionalLibraryDirectories']:
# We want to fix up these paths
path_list = value.split(';')
new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
node.setAttribute(name, ';'.join(new_list))
if not value:
node.removeAttribute(name)
def CleanupVcproj(node):
"""For each sub node, we call recursively this function."""
for sub_node in node.childNodes:
AbsoluteNode(sub_node)
CleanupVcproj(sub_node)
# Normalize the node, and remove all extranous whitespaces.
for sub_node in node.childNodes:
if sub_node.nodeType == Node.TEXT_NODE:
sub_node.data = sub_node.data.replace("\r", "")
sub_node.data = sub_node.data.replace("\n", "")
sub_node.data = sub_node.data.rstrip()
# Fix all the semicolon separated attributes to be sorted, and we also
# remove the dups.
if node.attributes:
for (name, value) in node.attributes.items():
sorted_list = sorted(value.split(';'))
unique_list = []
for i in sorted_list:
if not unique_list.count(i):
unique_list.append(i)
node.setAttribute(name, ';'.join(unique_list))
if not value:
node.removeAttribute(name)
if node.childNodes:
node.normalize()
# For each node, take a copy, and remove it from the list.
node_array = []
while node.childNodes and node.childNodes[0]:
# Take a copy of the node and remove it from the list.
current = node.childNodes[0]
node.removeChild(current)
# If the child is a filter, we want to append all its children
# to this same list.
if current.nodeName == 'Filter':
node_array.extend(FlattenFilter(current))
else:
node_array.append(current)
# Sort the list.
node_array.sort(CmpNode())
# Insert the nodes in the correct order.
for new_node in node_array:
# But don't append empty tool node.
if new_node.nodeName == 'Tool':
if new_node.attributes and new_node.attributes.length == 1:
# This one was empty.
continue
if new_node.nodeName == 'UserMacro':
continue
node.appendChild(new_node)
def GetConfiguationNodes(vcproj):
#TODO(nsylvain): Find a better way to navigate the xml.
nodes = []
for node in vcproj.childNodes:
if node.nodeName == "Configurations":
for sub_node in node.childNodes:
if sub_node.nodeName == "Configuration":
nodes.append(sub_node)
return nodes
def GetChildrenVsprops(filename):
dom = parse(filename)
if dom.documentElement.attributes:
vsprops = dom.documentElement.getAttribute('InheritedPropertySheets')
return FixFilenames(vsprops.split(';'), os.path.dirname(filename))
return []
def SeekToNode(node1, child2):
# A text node does not have properties.
if child2.nodeType == Node.TEXT_NODE:
return None
# Get the name of the current node.
current_name = child2.getAttribute("Name")
if not current_name:
# There is no name. We don't know how to merge.
return None
# Look through all the nodes to find a match.
for sub_node in node1.childNodes:
if sub_node.nodeName == child2.nodeName:
name = sub_node.getAttribute("Name")
if name == current_name:
return sub_node
# No match. We give up.
return None
def MergeAttributes(node1, node2):
# No attributes to merge?
if not node2.attributes:
return
for (name, value2) in node2.attributes.items():
# Don't merge the 'Name' attribute.
if name == 'Name':
continue
value1 = node1.getAttribute(name)
if value1:
# The attribute exist in the main node. If it's equal, we leave it
# untouched, otherwise we concatenate it.
if value1 != value2:
node1.setAttribute(name, ';'.join([value1, value2]))
else:
# The attribute does nto exist in the main node. We append this one.
node1.setAttribute(name, value2)
# If the attribute was a property sheet attributes, we remove it, since
# they are useless.
if name == 'InheritedPropertySheets':
node1.removeAttribute(name)
def MergeProperties(node1, node2):
MergeAttributes(node1, node2)
for child2 in node2.childNodes:
child1 = SeekToNode(node1, child2)
if child1:
MergeProperties(child1, child2)
else:
node1.appendChild(child2.cloneNode(True))
def main(argv):
"""Main function of this vcproj prettifier."""
global ARGUMENTS
ARGUMENTS = argv
# check if we have exactly 1 parameter.
if len(argv) < 2:
print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
'[key2=value2]' % argv[0])
return 1
# Parse the keys
for i in range(2, len(argv)):
(key, value) = argv[i].split('=')
REPLACEMENTS[key] = value
# Open the vcproj and parse the xml.
dom = parse(argv[1])
# First thing we need to do is find the Configuration Node and merge them
# with the vsprops they include.
for configuration_node in GetConfiguationNodes(dom.documentElement):
# Get the property sheets associated with this configuration.
vsprops = configuration_node.getAttribute('InheritedPropertySheets')
# Fix the filenames to be absolute.
vsprops_list = FixFilenames(vsprops.strip().split(';'),
os.path.dirname(argv[1]))
# Extend the list of vsprops with all vsprops contained in the current
# vsprops.
for current_vsprops in vsprops_list:
vsprops_list.extend(GetChildrenVsprops(current_vsprops))
# Now that we have all the vsprops, we need to merge them.
for current_vsprops in vsprops_list:
MergeProperties(configuration_node,
parse(current_vsprops).documentElement)
# Now that everything is merged, we need to cleanup the xml.
CleanupVcproj(dom.documentElement)
# Finally, we use the prett xml function to print the vcproj back to the
# user.
#print dom.toprettyxml(newl="\n")
PrettyPrintNode(dom.documentElement)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| apache-2.0 |
Fillerix99/autokey | src/lib/gtkui/dialogs.py | 47 | 22887 | # -*- coding: utf-8 -*-
# Copyright (C) 2011 Chris Dekter
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging, sys, os, re
from gi.repository import Gtk, Gdk
#import gettext
import locale
GETTEXT_DOMAIN = 'autokey'
locale.setlocale(locale.LC_ALL, '')
#for module in Gtk.glade, gettext:
# module.bindtextdomain(GETTEXT_DOMAIN)
# module.textdomain(GETTEXT_DOMAIN)
__all__ = ["validate", "EMPTY_FIELD_REGEX", "AbbrSettingsDialog", "HotkeySettingsDialog", "WindowFilterSettingsDialog", "RecordDialog"]
from autokey import model, iomediator
import configwindow
WORD_CHAR_OPTIONS = {
"All non-word" : model.DEFAULT_WORDCHAR_REGEX,
"Space and Enter" : r"[^ \n]",
"Tab" : r"[^\t]"
}
WORD_CHAR_OPTIONS_ORDERED = ["All non-word", "Space and Enter", "Tab"]
EMPTY_FIELD_REGEX = re.compile(r"^ *$", re.UNICODE)
def validate(expression, message, widget, parent):
if not expression:
dlg = Gtk.MessageDialog(parent, Gtk.DialogFlags.MODAL|Gtk.DialogFlags.DESTROY_WITH_PARENT, Gtk.MessageType.WARNING,
Gtk.ButtonsType.OK, message)
dlg.run()
dlg.destroy()
if widget is not None:
widget.grab_focus()
return expression
class DialogBase:
def __init__(self):
self.connect("close", self.on_close)
self.connect("delete_event", self.on_close)
def on_close(self, widget, data=None):
self.hide()
return True
def on_cancel(self, widget, data=None):
self.load(self.targetItem)
self.ui.response(Gtk.ResponseType.CANCEL)
self.hide()
def on_ok(self, widget, data=None):
if self.valid():
self.response(Gtk.ResponseType.OK)
self.hide()
def __getattr__(self, attr):
# Magic fudge to allow us to pretend to be the ui class we encapsulate
return getattr(self.ui, attr)
def on_response(self, widget, responseId):
self.closure(responseId)
if responseId < 0:
self.hide()
self.emit_stop_by_name('response')
class AbbrSettingsDialog(DialogBase):
def __init__(self, parent, configManager, closure):
builder = configwindow.get_ui("abbrsettings.xml")
self.ui = builder.get_object("abbrsettings")
builder.connect_signals(self)
self.ui.set_transient_for(parent)
self.configManager = configManager
self.closure = closure
self.abbrList = builder.get_object("abbrList")
self.addButton = builder.get_object("addButton")
self.removeButton = builder.get_object("removeButton")
self.wordCharCombo = builder.get_object("wordCharCombo")
self.removeTypedCheckbox = builder.get_object("removeTypedCheckbox")
self.omitTriggerCheckbox = builder.get_object("omitTriggerCheckbox")
self.matchCaseCheckbox = builder.get_object("matchCaseCheckbox")
self.ignoreCaseCheckbox = builder.get_object("ignoreCaseCheckbox")
self.triggerInsideCheckbox = builder.get_object("triggerInsideCheckbox")
self.immediateCheckbox = builder.get_object("immediateCheckbox")
DialogBase.__init__(self)
# set up list view
store = Gtk.ListStore(str)
self.abbrList.set_model(store)
column1 = Gtk.TreeViewColumn(_("Abbreviations"))
textRenderer = Gtk.CellRendererText()
textRenderer.set_property("editable", True)
textRenderer.connect("edited", self.on_cell_modified)
textRenderer.connect("editing-canceled", self.on_cell_editing_cancelled)
column1.pack_end(textRenderer, True)
column1.add_attribute(textRenderer, "text", 0)
column1.set_sizing(Gtk.TreeViewColumnSizing.FIXED)
self.abbrList.append_column(column1)
for item in WORD_CHAR_OPTIONS_ORDERED:
self.wordCharCombo.append_text(item)
def load(self, item):
self.targetItem = item
self.abbrList.get_model().clear()
if model.TriggerMode.ABBREVIATION in item.modes:
for abbr in item.abbreviations:
self.abbrList.get_model().append((abbr.encode("utf-8"),))
self.removeButton.set_sensitive(True)
firstIter = self.abbrList.get_model().get_iter_first()
self.abbrList.get_selection().select_iter(firstIter)
else:
self.removeButton.set_sensitive(False)
self.removeTypedCheckbox.set_active(item.backspace)
self.__resetWordCharCombo()
wordCharRegex = item.get_word_chars()
if wordCharRegex in WORD_CHAR_OPTIONS.values():
# Default wordchar regex used
for desc, regex in WORD_CHAR_OPTIONS.iteritems():
if item.get_word_chars() == regex:
self.wordCharCombo.set_active(WORD_CHAR_OPTIONS_ORDERED.index(desc))
break
else:
# Custom wordchar regex used
self.wordCharCombo.append_text(model.extract_wordchars(wordCharRegex).encode("utf-8"))
self.wordCharCombo.set_active(len(WORD_CHAR_OPTIONS))
if isinstance(item, model.Folder):
self.omitTriggerCheckbox.hide()
else:
self.omitTriggerCheckbox.show()
self.omitTriggerCheckbox.set_active(item.omitTrigger)
if isinstance(item, model.Phrase):
self.matchCaseCheckbox.show()
self.matchCaseCheckbox.set_active(item.matchCase)
else:
self.matchCaseCheckbox.hide()
self.ignoreCaseCheckbox.set_active(item.ignoreCase)
self.triggerInsideCheckbox.set_active(item.triggerInside)
self.immediateCheckbox.set_active(item.immediate)
def save(self, item):
item.modes.append(model.TriggerMode.ABBREVIATION)
item.clear_abbreviations()
item.abbreviations = self.get_abbrs()
item.backspace = self.removeTypedCheckbox.get_active()
option = self.wordCharCombo.get_active_text()
if option in WORD_CHAR_OPTIONS:
item.set_word_chars(WORD_CHAR_OPTIONS[option])
else:
item.set_word_chars(model.make_wordchar_re(option))
if not isinstance(item, model.Folder):
item.omitTrigger = self.omitTriggerCheckbox.get_active()
if isinstance(item, model.Phrase):
item.matchCase = self.matchCaseCheckbox.get_active()
item.ignoreCase = self.ignoreCaseCheckbox.get_active()
item.triggerInside = self.triggerInsideCheckbox.get_active()
item.immediate = self.immediateCheckbox.get_active()
def reset(self):
self.abbrList.get_model().clear()
self.__resetWordCharCombo()
self.removeButton.set_sensitive(False)
self.wordCharCombo.set_active(0)
self.omitTriggerCheckbox.set_active(False)
self.removeTypedCheckbox.set_active(True)
self.matchCaseCheckbox.set_active(False)
self.ignoreCaseCheckbox.set_active(False)
self.triggerInsideCheckbox.set_active(False)
self.immediateCheckbox.set_active(False)
def __resetWordCharCombo(self):
self.wordCharCombo.remove_all()
for item in WORD_CHAR_OPTIONS_ORDERED:
self.wordCharCombo.append_text(item)
self.wordCharCombo.set_active(0)
def get_abbrs(self):
ret = []
model = self.abbrList.get_model()
i = iter(model)
try:
while True:
text = model.get_value(i.next().iter, 0)
ret.append(text.decode("utf-8"))
except StopIteration:
pass
return list(set(ret))
def get_abbrs_readable(self):
abbrs = self.get_abbrs()
if len(abbrs) == 1:
return abbrs[0].encode("utf-8")
else:
return "[%s]" % ','.join([a.encode("utf-8") for a in abbrs])
def valid(self):
if not validate(len(self.get_abbrs()) > 0, _("You must specify at least one abbreviation"),
self.addButton, self.ui): return False
return True
def reset_focus(self):
self.addButton.grab_focus()
# Signal handlers
def on_cell_editing_cancelled(self, renderer, data=None):
model, curIter = self.abbrList.get_selection().get_selected()
oldText = model.get_value(curIter, 0) or ""
self.on_cell_modified(renderer, None, oldText)
def on_cell_modified(self, renderer, path, newText, data=None):
model, curIter = self.abbrList.get_selection().get_selected()
oldText = model.get_value(curIter, 0) or ""
if EMPTY_FIELD_REGEX.match(newText) and EMPTY_FIELD_REGEX.match(oldText):
self.on_removeButton_clicked(renderer)
else:
model.set(curIter, 0, newText)
def on_addButton_clicked(self, widget, data=None):
model = self.abbrList.get_model()
newIter = model.append()
self.abbrList.set_cursor(model.get_path(newIter), self.abbrList.get_column(0), True)
self.removeButton.set_sensitive(True)
def on_removeButton_clicked(self, widget, data=None):
model, curIter = self.abbrList.get_selection().get_selected()
model.remove(curIter)
if model.get_iter_first() is None:
self.removeButton.set_sensitive(False)
else:
self.abbrList.get_selection().select_iter(model.get_iter_first())
def on_abbrList_cursorchanged(self, widget, data=None):
pass
def on_ignoreCaseCheckbox_stateChanged(self, widget, data=None):
if not self.ignoreCaseCheckbox.get_active():
self.matchCaseCheckbox.set_active(False)
def on_matchCaseCheckbox_stateChanged(self, widget, data=None):
if self.matchCaseCheckbox.get_active():
self.ignoreCaseCheckbox.set_active(True)
def on_immediateCheckbox_stateChanged(self, widget, data=None):
if self.immediateCheckbox.get_active():
self.omitTriggerCheckbox.set_active(False)
self.omitTriggerCheckbox.set_sensitive(False)
self.wordCharCombo.set_sensitive(False)
else:
self.omitTriggerCheckbox.set_sensitive(True)
self.wordCharCombo.set_sensitive(True)
class HotkeySettingsDialog(DialogBase):
KEY_MAP = {
' ' : "<space>",
}
REVERSE_KEY_MAP = {}
for key, value in KEY_MAP.iteritems():
REVERSE_KEY_MAP[value] = key
def __init__(self, parent, configManager, closure):
builder = configwindow.get_ui("hotkeysettings.xml")
self.ui = builder.get_object("hotkeysettings")
builder.connect_signals(self)
self.ui.set_transient_for(parent)
self.configManager = configManager
self.closure = closure
self.key = None
self.controlButton = builder.get_object("controlButton")
self.altButton = builder.get_object("altButton")
self.shiftButton = builder.get_object("shiftButton")
self.superButton = builder.get_object("superButton")
self.hyperButton = builder.get_object("hyperButton")
self.metaButton = builder.get_object("metaButton")
self.setButton = builder.get_object("setButton")
self.keyLabel = builder.get_object("keyLabel")
DialogBase.__init__(self)
def load(self, item):
self.targetItem = item
self.setButton.set_sensitive(True)
if model.TriggerMode.HOTKEY in item.modes:
self.controlButton.set_active(iomediator.Key.CONTROL in item.modifiers)
self.altButton.set_active(iomediator.Key.ALT in item.modifiers)
self.shiftButton.set_active(iomediator.Key.SHIFT in item.modifiers)
self.superButton.set_active(iomediator.Key.SUPER in item.modifiers)
self.hyperButton.set_active(iomediator.Key.HYPER in item.modifiers)
self.metaButton.set_active(iomediator.Key.META in item.modifiers)
key = item.hotKey
if key in self.KEY_MAP:
keyText = self.KEY_MAP[key]
else:
keyText = key
self._setKeyLabel(keyText)
self.key = keyText
else:
self.reset()
def save(self, item):
item.modes.append(model.TriggerMode.HOTKEY)
# Build modifier list
modifiers = self.build_modifiers()
keyText = self.key
if keyText in self.REVERSE_KEY_MAP:
key = self.REVERSE_KEY_MAP[keyText]
else:
key = keyText
assert key != None, "Attempt to set hotkey with no key"
item.set_hotkey(modifiers, key)
def reset(self):
self.controlButton.set_active(False)
self.altButton.set_active(False)
self.shiftButton.set_active(False)
self.superButton.set_active(False)
self.hyperButton.set_active(False)
self.metaButton.set_active(False)
self._setKeyLabel(_("(None)"))
self.key = None
self.setButton.set_sensitive(True)
def set_key(self, key, modifiers=[]):
Gdk.threads_enter()
if self.KEY_MAP.has_key(key):
key = self.KEY_MAP[key]
self._setKeyLabel(key)
self.key = key
self.controlButton.set_active(iomediator.Key.CONTROL in modifiers)
self.altButton.set_active(iomediator.Key.ALT in modifiers)
self.shiftButton.set_active(iomediator.Key.SHIFT in modifiers)
self.superButton.set_active(iomediator.Key.SUPER in modifiers)
self.hyperButton.set_active(iomediator.Key.HYPER in modifiers)
self.metaButton.set_active(iomediator.Key.META in modifiers)
self.setButton.set_sensitive(True)
Gdk.threads_leave()
def cancel_grab(self):
Gdk.threads_enter()
self.setButton.set_sensitive(True)
self._setKeyLabel(self.key)
Gdk.threads_leave()
def build_modifiers(self):
modifiers = []
if self.controlButton.get_active():
modifiers.append(iomediator.Key.CONTROL)
if self.altButton.get_active():
modifiers.append(iomediator.Key.ALT)
if self.shiftButton.get_active():
modifiers.append(iomediator.Key.SHIFT)
if self.superButton.get_active():
modifiers.append(iomediator.Key.SUPER)
if self.hyperButton.get_active():
modifiers.append(iomediator.Key.HYPER)
if self.metaButton.get_active():
modifiers.append(iomediator.Key.META)
modifiers.sort()
return modifiers
def _setKeyLabel(self, key):
self.keyLabel.set_text(_("Key: ") + key)
def valid(self):
if not validate(self.key is not None, _("You must specify a key for the hotkey."),
None, self.ui): return False
return True
def on_setButton_pressed(self, widget, data=None):
self.setButton.set_sensitive(False)
self.keyLabel.set_text(_("Press a key..."))
self.grabber = iomediator.KeyGrabber(self)
self.grabber.start()
class GlobalHotkeyDialog(HotkeySettingsDialog):
def load(self, item):
self.targetItem = item
if item.enabled:
self.controlButton.set_active(iomediator.Key.CONTROL in item.modifiers)
self.altButton.set_active(iomediator.Key.ALT in item.modifiers)
self.shiftButton.set_active(iomediator.Key.SHIFT in item.modifiers)
self.superButton.set_active(iomediator.Key.SUPER in item.modifiers)
self.hyperButton.set_active(iomediator.Key.HYPER in item.modifiers)
self.metaButton.set_active(iomediator.Key.META in item.modifiers)
key = item.hotKey
if key in self.KEY_MAP:
keyText = self.KEY_MAP[key]
else:
keyText = key
self._setKeyLabel(keyText)
self.key = keyText
else:
self.reset()
def save(self, item):
# Build modifier list
modifiers = self.build_modifiers()
keyText = self.key
if keyText in self.REVERSE_KEY_MAP:
key = self.REVERSE_KEY_MAP[keyText]
else:
key = keyText
assert key != None, "Attempt to set hotkey with no key"
item.set_hotkey(modifiers, key)
def valid(self):
configManager = self.configManager
modifiers = self.build_modifiers()
regex = self.targetItem.get_applicable_regex()
pattern = None
if regex is not None: pattern = regex.pattern
unique, conflicting = configManager.check_hotkey_unique(modifiers, self.key, pattern, self.targetItem)
if not validate(unique, _("The hotkey is already in use for %s.") % conflicting, None,
self.ui): return False
if not validate(self.key is not None, _("You must specify a key for the hotkey."),
None, self.ui): return False
return True
class WindowFilterSettingsDialog(DialogBase):
def __init__(self, parent, closure):
builder = configwindow.get_ui("windowfiltersettings.xml")
self.ui = builder.get_object("windowfiltersettings")
builder.connect_signals(self)
self.ui.set_transient_for(parent)
self.closure = closure
self.triggerRegexEntry = builder.get_object("triggerRegexEntry")
self.recursiveButton = builder.get_object("recursiveButton")
self.detectButton = builder.get_object("detectButton")
DialogBase.__init__(self)
def load(self, item):
self.targetItem = item
if not isinstance(item, model.Folder):
self.recursiveButton.hide()
else:
self.recursiveButton.show()
if not item.has_filter():
self.reset()
else:
self.triggerRegexEntry.set_text(item.get_filter_regex())
self.recursiveButton.set_active(item.isRecursive)
def save(self, item):
item.set_window_titles(self.get_filter_text())
item.set_filter_recursive(self.get_is_recursive())
def reset(self):
self.triggerRegexEntry.set_text("")
self.recursiveButton.set_active(False)
def get_filter_text(self):
return self.triggerRegexEntry.get_text().decode("utf-8")
def get_is_recursive(self):
return self.recursiveButton.get_active()
def valid(self):
return True
def reset_focus(self):
self.triggerRegexEntry.grab_focus()
def on_response(self, widget, responseId):
self.closure(responseId)
def receive_window_info(self, info):
Gdk.threads_enter()
dlg = DetectDialog(self.ui)
dlg.populate(info)
response = dlg.run()
if response == Gtk.ResponseType.OK:
self.triggerRegexEntry.set_text(dlg.get_choice().encode("utf-8"))
self.detectButton.set_sensitive(True)
Gdk.threads_leave()
def on_detectButton_pressed(self, widget, data=None):
#self.__dlg =
widget.set_sensitive(False)
self.grabber = iomediator.WindowGrabber(self)
self.grabber.start()
class DetectDialog(DialogBase):
def __init__(self, parent):
builder = configwindow.get_ui("detectdialog.xml")
self.ui = builder.get_object("detectdialog")
builder.connect_signals(self)
self.ui.set_transient_for(parent)
self.classLabel = builder.get_object("classLabel")
self.titleLabel = builder.get_object("titleLabel")
self.classRadioButton = builder.get_object("classRadioButton")
self.titleRadioButton = builder.get_object("titleRadioButton")
DialogBase.__init__(self)
def populate(self, windowInfo):
self.titleLabel.set_text(_("Window title: %s") % windowInfo[0].encode("utf-8"))
self.classLabel.set_text(_("Window class: %s") % windowInfo[1].encode("utf-8"))
self.windowInfo = windowInfo
def get_choice(self):
if self.classRadioButton.get_active():
return self.windowInfo[1]
else:
return self.windowInfo[0]
def on_cancel(self, widget, data=None):
self.ui.response(Gtk.ResponseType.CANCEL)
self.hide()
def on_ok(self, widget, data=None):
self.response(Gtk.ResponseType.OK)
self.hide()
class RecordDialog(DialogBase):
def __init__(self, parent, closure):
self.closure = closure
builder = configwindow.get_ui("recorddialog.xml")
self.ui = builder.get_object("recorddialog")
builder.connect_signals(self)
self.ui.set_transient_for(parent)
self.keyboardButton = builder.get_object("keyboardButton")
self.mouseButton = builder.get_object("mouseButton")
self.spinButton = builder.get_object("spinButton")
DialogBase.__init__(self)
def get_record_keyboard(self):
return self.keyboardButton.get_active()
def get_record_mouse(self):
return self.mouseButton.get_active()
def get_delay(self):
return self.spinButton.get_value_as_int()
def on_response(self, widget, responseId):
self.closure(responseId, self.get_record_keyboard(), self.get_record_mouse(), self.get_delay())
def on_cancel(self, widget, data=None):
self.ui.response(Gtk.ResponseType.CANCEL)
self.hide()
def valid(self):
return True
| gpl-3.0 |
indashnet/InDashNet.Open.UN2000 | android/external/chromium_org/tools/perf/metrics/media.py | 23 | 2314 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
from metrics import Metric
class MediaMetric(Metric):
"""MediaMetric class injects and calls JS responsible for recording metrics.
Default media metrics are collected for every media element in the page,
such as decoded_frame_count, dropped_frame_count, decoded_video_bytes, and
decoded_audio_bytes.
"""
def __init__(self, tab):
super(MediaMetric, self).__init__()
with open(os.path.join(os.path.dirname(__file__), 'media.js')) as f:
js = f.read()
tab.ExecuteJavaScript(js)
self._results = None
def Start(self, page, tab):
"""Create the media metrics for all media elements in the document."""
tab.ExecuteJavaScript('window.__createMediaMetricsForDocument()')
def Stop(self, page, tab):
self._results = tab.EvaluateJavaScript('window.__getAllMetrics()')
def AddResults(self, tab, results):
"""Reports all recorded metrics as Telemetry perf results."""
assert self._results
for media_metric in self._results:
self._AddResultsForMediaElement(media_metric, results)
def _AddResultsForMediaElement(self, media_metric, results):
"""Reports metrics for one media element.
Media metrics contain an ID identifying the media element and values:
media_metric = {
'id': 'video_1',
'metrics': {
'time_to_play': 120,
'decoded_bytes': 13233,
...
}
}
"""
def AddOneResult(metric, unit):
metrics = media_metric['metrics']
for m in metrics:
if m.startswith(metric):
special_label = m[len(metric):]
results.Add(trace + special_label, unit, str(metrics[m]),
chart_name=metric, data_type='default')
trace = media_metric['id']
if not trace:
logging.error('Metrics ID is missing in results.')
return
AddOneResult('decoded_audio_bytes', 'bytes')
AddOneResult('decoded_video_bytes', 'bytes')
AddOneResult('decoded_frame_count', 'frames')
AddOneResult('dropped_frame_count', 'frames')
AddOneResult('playback_time', 'sec')
AddOneResult('seek', 'sec')
AddOneResult('time_to_play', 'sec')
| apache-2.0 |
reiaaoyama/exabgp | lib/exabgp/bgp/message/update/attribute/nexthop.py | 2 | 1598 | # encoding: utf-8
"""
nexthop.py
Created by Thomas Mangin on 2009-11-05.
Copyright (c) 2009-2015 Exa Networks. All rights reserved.
"""
from exabgp.protocol.family import AFI
from exabgp.protocol.ip import IP
from exabgp.protocol.ip import NoNextHop
from exabgp.bgp.message.update.attribute.attribute import Attribute
# ================================================================== NextHop (3)
# The inheritance order is important and attribute MUST be first for the righ register to be called
# At least until we rename them to be more explicit
@Attribute.register()
class NextHop (Attribute,IP):
ID = Attribute.CODE.NEXT_HOP
FLAG = Attribute.Flag.TRANSITIVE
CACHING = True
SELF = False
# XXX: This is a bad API, as it works on non-raw data
def __init__ (self, string, packed=None):
self.init(string,packed)
def __eq__ (self, other):
return \
self.ID == other.ID and \
self.FLAG == other.FLAG and \
self._packed == other.ton()
def __ne__ (self, other):
return not self.__eq__(other)
def ton (self, negotiated=None):
return self._packed
def pack (self, negotiated=None):
return self._attribute(self.ton())
@classmethod
def unpack (cls, data, negotiated=None):
if not data:
return NoNextHop
return IP.unpack(data,NextHop)
def __repr__ (self):
return IP.__repr__(self)
class NextHopSelf (NextHop):
SELF = True
def __init__ (self, afi):
self.afi = afi
def __repr__ (self):
return 'self'
def ipv4 (self):
return self.afi == AFI.ipv4
def pack (self,negotiated):
return self._attribute(negotiated.nexthopself(self.afi).ton())
| bsd-3-clause |
sameetb-cuelogic/edx-platform-test | common/djangoapps/student/tests/test_auto_auth.py | 21 | 7470 | from django.test import TestCase
from django.test.client import Client
from django.contrib.auth.models import User
from django_comment_common.models import (
Role, FORUM_ROLE_ADMINISTRATOR, FORUM_ROLE_MODERATOR, FORUM_ROLE_STUDENT)
from django_comment_common.utils import seed_permissions_roles
from student.models import CourseEnrollment, UserProfile
from util.testing import UrlResetMixin
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.locator import CourseLocator
from mock import patch
import ddt
@ddt.ddt
class AutoAuthEnabledTestCase(UrlResetMixin, TestCase):
"""
Tests for the Auto auth view that we have for load testing.
"""
COURSE_ID_MONGO = 'edX/Test101/2014_Spring'
COURSE_ID_SPLIT = 'course-v1:edX+Test101+2014_Spring'
COURSE_IDS_DDT = (
(COURSE_ID_MONGO, SlashSeparatedCourseKey.from_deprecated_string(COURSE_ID_MONGO)),
(COURSE_ID_SPLIT, SlashSeparatedCourseKey.from_deprecated_string(COURSE_ID_SPLIT)),
(COURSE_ID_MONGO, CourseLocator.from_string(COURSE_ID_MONGO)),
(COURSE_ID_SPLIT, CourseLocator.from_string(COURSE_ID_SPLIT)),
)
@patch.dict("django.conf.settings.FEATURES", {"AUTOMATIC_AUTH_FOR_TESTING": True})
def setUp(self):
# Patching the settings.FEATURES['AUTOMATIC_AUTH_FOR_TESTING']
# value affects the contents of urls.py,
# so we need to call super.setUp() which reloads urls.py (because
# of the UrlResetMixin)
super(AutoAuthEnabledTestCase, self).setUp()
self.url = '/auto_auth'
self.client = Client()
def test_create_user(self):
"""
Test that user gets created when visiting the page.
"""
self._auto_auth()
self.assertEqual(User.objects.count(), 1)
self.assertTrue(User.objects.all()[0].is_active)
def test_create_same_user(self):
self._auto_auth(username='test')
self._auto_auth(username='test')
self.assertEqual(User.objects.count(), 1)
def test_create_multiple_users(self):
"""
Test to make sure multiple users are created.
"""
self._auto_auth()
self._auto_auth()
self.assertEqual(User.objects.all().count(), 2)
def test_create_defined_user(self):
"""
Test that the user gets created with the correct attributes
when they are passed as parameters on the auto-auth page.
"""
self._auto_auth(
username='robot', password='test',
email='robot@edx.org', full_name="Robot Name"
)
# Check that the user has the correct info
user = User.objects.get(username='robot')
self.assertEqual(user.username, 'robot')
self.assertTrue(user.check_password('test'))
self.assertEqual(user.email, 'robot@edx.org')
# Check that the user has a profile
user_profile = UserProfile.objects.get(user=user)
self.assertEqual(user_profile.name, "Robot Name")
# By default, the user should not be global staff
self.assertFalse(user.is_staff)
def test_create_staff_user(self):
# Create a staff user
self._auto_auth(username='test', staff='true')
user = User.objects.get(username='test')
self.assertTrue(user.is_staff)
# Revoke staff privileges
self._auto_auth(username='test', staff='false')
user = User.objects.get(username='test')
self.assertFalse(user.is_staff)
@ddt.data(*COURSE_IDS_DDT)
@ddt.unpack
def test_course_enrollment(self, course_id, course_key):
# Create a user and enroll in a course
self._auto_auth(username='test', course_id=course_id)
# Check that a course enrollment was created for the user
self.assertEqual(CourseEnrollment.objects.count(), 1)
enrollment = CourseEnrollment.objects.get(course_id=course_key)
self.assertEqual(enrollment.user.username, "test")
@ddt.data(*COURSE_IDS_DDT)
@ddt.unpack
def test_double_enrollment(self, course_id, course_key):
# Create a user and enroll in a course
self._auto_auth(username='test', course_id=course_id)
# Make the same call again, re-enrolling the student in the same course
self._auto_auth(username='test', course_id=course_id)
# Check that only one course enrollment was created for the user
self.assertEqual(CourseEnrollment.objects.count(), 1)
enrollment = CourseEnrollment.objects.get(course_id=course_key)
self.assertEqual(enrollment.user.username, "test")
@ddt.data(*COURSE_IDS_DDT)
@ddt.unpack
def test_set_roles(self, course_id, course_key):
seed_permissions_roles(course_key)
course_roles = dict((r.name, r) for r in Role.objects.filter(course_id=course_key))
self.assertEqual(len(course_roles), 4) # sanity check
# Student role is assigned by default on course enrollment.
self._auto_auth(username='a_student', course_id=course_id)
user = User.objects.get(username='a_student')
user_roles = user.roles.all()
self.assertEqual(len(user_roles), 1)
self.assertEqual(user_roles[0], course_roles[FORUM_ROLE_STUDENT])
self._auto_auth(username='a_moderator', course_id=course_id, roles='Moderator')
user = User.objects.get(username='a_moderator')
user_roles = user.roles.all()
self.assertEqual(
set(user_roles),
set([course_roles[FORUM_ROLE_STUDENT],
course_roles[FORUM_ROLE_MODERATOR]]))
# check multiple roles work.
self._auto_auth(username='an_admin', course_id=course_id,
roles='{},{}'.format(FORUM_ROLE_MODERATOR, FORUM_ROLE_ADMINISTRATOR))
user = User.objects.get(username='an_admin')
user_roles = user.roles.all()
self.assertEqual(
set(user_roles),
set([course_roles[FORUM_ROLE_STUDENT],
course_roles[FORUM_ROLE_MODERATOR],
course_roles[FORUM_ROLE_ADMINISTRATOR]]))
def _auto_auth(self, **params):
"""
Make a request to the auto-auth end-point and check
that the response is successful.
"""
response = self.client.get(self.url, params)
self.assertEqual(response.status_code, 200)
# Check that session and CSRF are set in the response
for cookie in ['csrftoken', 'sessionid']:
self.assertIn(cookie, response.cookies) # pylint: disable=maybe-no-member
self.assertTrue(response.cookies[cookie].value) # pylint: disable=maybe-no-member
class AutoAuthDisabledTestCase(UrlResetMixin, TestCase):
"""
Test that the page is inaccessible with default settings
"""
@patch.dict("django.conf.settings.FEATURES", {"AUTOMATIC_AUTH_FOR_TESTING": False})
def setUp(self):
# Patching the settings.FEATURES['AUTOMATIC_AUTH_FOR_TESTING']
# value affects the contents of urls.py,
# so we need to call super.setUp() which reloads urls.py (because
# of the UrlResetMixin)
super(AutoAuthDisabledTestCase, self).setUp()
self.url = '/auto_auth'
self.client = Client()
def test_auto_auth_disabled(self):
"""
Make sure automatic authentication is disabled.
"""
response = self.client.get(self.url)
self.assertEqual(response.status_code, 404)
| agpl-3.0 |
google/glazier | glazier/lib/bitlocker.py | 1 | 2550 | # python3
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bitlocker management functionality."""
import logging
import subprocess
from glazier.lib import constants
from glazier.lib import powershell
SUPPORTED_MODES = ['ps_tpm', 'bde_tpm']
class BitlockerError(Exception):
pass
class Bitlocker(object):
"""Manage Bitlocker related operations on the local host."""
def __init__(self, mode: str):
self._mode = mode
def _LaunchSubproc(self, command: str):
"""Launch a subprocess.
Args:
command: A command string to pass to subprocess.call()
Raises:
BitlockerError: An unexpected exit code from manage-bde.
"""
logging.info('Running BitLocker command: %s', command)
exit_code = subprocess.call(command, shell=True)
if exit_code != 0:
raise BitlockerError('Unexpected exit code from Bitlocker: %s.' %
str(exit_code))
def _PsTpm(self):
"""Enable TPM mode using Powershell (Win8 +)."""
ps = powershell.PowerShell()
try:
ps.RunCommand(['$ErrorActionPreference=\'Stop\'', ';', 'Enable-BitLocker',
'C:', '-TpmProtector', '-UsedSpaceOnly',
'-SkipHardwareTest ', '>>',
r'%s\enable-bitlocker.txt' % constants.SYS_LOGS_PATH])
ps.RunCommand(['$ErrorActionPreference=\'Stop\'', ';',
'Add-BitLockerKeyProtector', 'C:',
'-RecoveryPasswordProtector', '>NUL'])
except powershell.PowerShellError as e:
raise BitlockerError('Error enabling Bitlocker via Powershell: %s.' %
str(e))
def Enable(self):
"""Enable bitlocker."""
if self._mode == 'ps_tpm':
self._PsTpm()
elif self._mode == 'bde_tpm':
self._LaunchSubproc(r'C:\Windows\System32\cmd.exe /c '
r'C:\Windows\System32\manage-bde.exe -on c: -rp '
'>NUL')
else:
raise BitlockerError('Unknown mode: %s.' % self._mode)
| apache-2.0 |
blueboxgroup/ansible | contrib/inventory/ssh_config.py | 160 | 3979 | #!/usr/bin/env python
# (c) 2014, Tomas Karasek <tomas.karasek@digile.fi>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Dynamic inventory script which lets you use aliases from ~/.ssh/config.
#
# There were some issues with various Paramiko versions. I took a deeper look
# and tested heavily. Now, ansible parses this alright with Paramiko versions
# 1.7.2 to 1.15.2.
#
# It prints inventory based on parsed ~/.ssh/config. You can refer to hosts
# with their alias, rather than with the IP or hostname. It takes advantage
# of the ansible_ssh_{host,port,user,private_key_file}.
#
# If you have in your .ssh/config:
# Host git
# HostName git.domain.org
# User tkarasek
# IdentityFile /home/tomk/keys/thekey
#
# You can do
# $ ansible git -m ping
#
# Example invocation:
# ssh_config.py --list
# ssh_config.py --host <alias>
import argparse
import os.path
import sys
import paramiko
try:
import json
except ImportError:
import simplejson as json
SSH_CONF = '~/.ssh/config'
_key = 'ssh_config'
_ssh_to_ansible = [('user', 'ansible_ssh_user'),
('hostname', 'ansible_ssh_host'),
('identityfile', 'ansible_ssh_private_key_file'),
('port', 'ansible_ssh_port')]
def get_config():
if not os.path.isfile(os.path.expanduser(SSH_CONF)):
return {}
with open(os.path.expanduser(SSH_CONF)) as f:
cfg = paramiko.SSHConfig()
cfg.parse(f)
ret_dict = {}
for d in cfg._config:
if type(d['host']) is list:
alias = d['host'][0]
else:
alias = d['host']
if ('?' in alias) or ('*' in alias):
continue
_copy = dict(d)
del _copy['host']
if 'config' in _copy:
ret_dict[alias] = _copy['config']
else:
ret_dict[alias] = _copy
return ret_dict
def print_list():
cfg = get_config()
meta = {'hostvars': {}}
for alias, attributes in cfg.items():
tmp_dict = {}
for ssh_opt, ans_opt in _ssh_to_ansible:
if ssh_opt in attributes:
# If the attribute is a list, just take the first element.
# Private key is returned in a list for some reason.
attr = attributes[ssh_opt]
if type(attr) is list:
attr = attr[0]
tmp_dict[ans_opt] = attr
if tmp_dict:
meta['hostvars'][alias] = tmp_dict
print(json.dumps({_key: list(set(meta['hostvars'].keys())), '_meta': meta}))
def print_host(host):
cfg = get_config()
print(json.dumps(cfg[host]))
def get_args(args_list):
parser = argparse.ArgumentParser(
description='ansible inventory script parsing .ssh/config')
mutex_group = parser.add_mutually_exclusive_group(required=True)
help_list = 'list all hosts from .ssh/config inventory'
mutex_group.add_argument('--list', action='store_true', help=help_list)
help_host = 'display variables for a host'
mutex_group.add_argument('--host', help=help_host)
return parser.parse_args(args_list)
def main(args_list):
args = get_args(args_list)
if args.list:
print_list()
if args.host:
print_host(args.host)
if __name__ == '__main__':
main(sys.argv[1:])
| gpl-3.0 |
MediaSapiens/autonormix | django/core/mail/backends/smtp.py | 36 | 3655 | """SMTP email backend class."""
import smtplib
import socket
import threading
from django.conf import settings
from django.core.mail.backends.base import BaseEmailBackend
from django.core.mail.utils import DNS_NAME
class EmailBackend(BaseEmailBackend):
"""
A wrapper that manages the SMTP network connection.
"""
def __init__(self, host=None, port=None, username=None, password=None,
use_tls=None, fail_silently=False, **kwargs):
super(EmailBackend, self).__init__(fail_silently=fail_silently)
self.host = host or settings.EMAIL_HOST
self.port = port or settings.EMAIL_PORT
self.username = username or settings.EMAIL_HOST_USER
self.password = password or settings.EMAIL_HOST_PASSWORD
if use_tls is None:
self.use_tls = settings.EMAIL_USE_TLS
else:
self.use_tls = use_tls
self.connection = None
self._lock = threading.RLock()
def open(self):
"""
Ensures we have a connection to the email server. Returns whether or
not a new connection was required (True or False).
"""
if self.connection:
# Nothing to do if the connection is already open.
return False
try:
# If local_hostname is not specified, socket.getfqdn() gets used.
# For performance, we use the cached FQDN for local_hostname.
self.connection = smtplib.SMTP(self.host, self.port,
local_hostname=DNS_NAME.get_fqdn())
if self.use_tls:
self.connection.ehlo()
self.connection.starttls()
self.connection.ehlo()
if self.username and self.password:
self.connection.login(self.username, self.password)
return True
except:
if not self.fail_silently:
raise
def close(self):
"""Closes the connection to the email server."""
try:
try:
self.connection.quit()
except socket.sslerror:
# This happens when calling quit() on a TLS connection
# sometimes.
self.connection.close()
except:
if self.fail_silently:
return
raise
finally:
self.connection = None
def send_messages(self, email_messages):
"""
Sends one or more EmailMessage objects and returns the number of email
messages sent.
"""
if not email_messages:
return
self._lock.acquire()
try:
new_conn_created = self.open()
if not self.connection:
# We failed silently on open().
# Trying to send would be pointless.
return
num_sent = 0
for message in email_messages:
sent = self._send(message)
if sent:
num_sent += 1
if new_conn_created:
self.close()
finally:
self._lock.release()
return num_sent
def _send(self, email_message):
"""A helper method that does the actual sending."""
if not email_message.recipients():
return False
try:
self.connection.sendmail(email_message.from_email,
email_message.recipients(),
email_message.message().as_string())
except:
if not self.fail_silently:
raise
return False
return True
| bsd-3-clause |
talon-one/talon_one.py | test/test_saml_login_endpoint.py | 1 | 2043 | # coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import talon_one
from talon_one.models.saml_login_endpoint import SamlLoginEndpoint # noqa: E501
from talon_one.rest import ApiException
class TestSamlLoginEndpoint(unittest.TestCase):
"""SamlLoginEndpoint unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test SamlLoginEndpoint
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = talon_one.models.saml_login_endpoint.SamlLoginEndpoint() # noqa: E501
if include_optional :
return SamlLoginEndpoint(
name = '0',
login_url = '0'
)
else :
return SamlLoginEndpoint(
name = '0',
login_url = '0',
)
def testSamlLoginEndpoint(self):
"""Test SamlLoginEndpoint"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| mit |
chatcannon/scipy | scipy/weave/_dumb_shelve.py | 100 | 1380 | from __future__ import division, print_function, absolute_import
from shelve import Shelf
try:
import zlib
except ImportError:
# Some python installations don't have zlib.
pass
import pickle
class DbfilenameShelf(Shelf):
"""Shelf implementation using the "anydbm" generic dbm interface.
This is initialized with the filename for the dbm database.
See the module's __doc__ string for an overview of the interface.
"""
def __init__(self, filename, flag='c'):
from . import _dumbdbm_patched
Shelf.__init__(self, _dumbdbm_patched.open(filename, flag))
def __getitem__(self, key):
compressed = self.dict[key]
try:
r = zlib.decompress(compressed)
except zlib.error:
r = compressed
except NameError:
r = compressed
return pickle.loads(r)
def __setitem__(self, key, value):
s = pickle.dumps(value,1)
try:
self.dict[key] = zlib.compress(s)
except NameError:
#zlib doesn't exist, leave it uncompressed.
self.dict[key] = s
def open(filename, flag='c'):
"""Open a persistent dictionary for reading and writing.
Argument is the filename for the dbm database.
See the module's __doc__ string for an overview of the interface.
"""
return DbfilenameShelf(filename, flag)
| bsd-3-clause |
ldts/zephyr | scripts/dts/extract/reg.py | 1 | 4066 | #
# Copyright (c) 2018 Bobby Noelte
#
# SPDX-License-Identifier: Apache-2.0
#
from copy import deepcopy
from extract.globals import *
from extract.directive import DTDirective
##
# @brief Manage reg directive.
#
class DTReg(DTDirective):
##
# @brief Extract reg directive info
#
# @param node_path Path to node owning the
# reg definition.
# @param names (unused)
# @param def_label Define label string of node owning the
# compatible definition.
#
def extract(self, node_path, names, def_label, div):
binding = get_binding(node_path)
reg = reduced[node_path]['props']['reg']
if type(reg) is not list: reg = [ reg, ]
(nr_address_cells, nr_size_cells) = get_addr_size_cells(node_path)
if 'parent' in binding:
bus = binding['parent']['bus']
if bus == 'spi':
cs_gpios = None
try:
cs_gpios = deepcopy(find_parent_prop(node_path, 'cs-gpios'))
except:
pass
if cs_gpios:
extract_controller(node_path, "cs-gpios", cs_gpios, reg[0], def_label, "cs-gpio", True)
extract_cells(node_path, "cs-gpios", cs_gpios, None, reg[0], def_label, "cs-gpio", True)
# generate defines
l_base = [def_label]
l_addr = [str_to_label("BASE_ADDRESS")]
l_size = ["SIZE"]
index = 0
props = list(reg)
while props:
prop_def = {}
prop_alias = {}
addr = 0
size = 0
# Check is defined should be indexed (_0, _1)
if index == 0 and len(props) < 3:
# 1 element (len 2) or no element (len 0) in props
l_idx = []
else:
l_idx = [str(index)]
try:
name = [names.pop(0).upper()]
except:
name = []
for x in range(nr_address_cells):
addr += props.pop(0) << (32 * (nr_address_cells - x - 1))
for x in range(nr_size_cells):
size += props.pop(0) << (32 * (nr_size_cells - x - 1))
addr += translate_addr(addr, node_path,
nr_address_cells, nr_size_cells)
l_addr_fqn = '_'.join(l_base + l_addr + l_idx)
l_size_fqn = '_'.join(l_base + l_size + l_idx)
if nr_address_cells:
prop_def[l_addr_fqn] = hex(addr)
add_compat_alias(node_path, '_'.join(l_addr + l_idx), l_addr_fqn, prop_alias)
if nr_size_cells:
prop_def[l_size_fqn] = int(size / div)
add_compat_alias(node_path, '_'.join(l_size + l_idx), l_size_fqn, prop_alias)
if len(name):
if nr_address_cells:
prop_alias['_'.join(l_base + name + l_addr)] = l_addr_fqn
add_compat_alias(node_path, '_'.join(name + l_addr), l_addr_fqn, prop_alias)
if nr_size_cells:
prop_alias['_'.join(l_base + name + l_size)] = l_size_fqn
add_compat_alias(node_path, '_'.join(name + l_size), l_size_fqn, prop_alias)
# generate defs for node aliases
if node_path in aliases:
add_prop_aliases(
node_path,
lambda alias:
'_'.join([str_to_label(alias)] + l_addr + l_idx),
l_addr_fqn,
prop_alias)
if nr_size_cells:
add_prop_aliases(
node_path,
lambda alias:
'_'.join([str_to_label(alias)] + l_size + l_idx),
l_size_fqn,
prop_alias)
insert_defs(node_path, prop_def, prop_alias)
# increment index for definition creation
index += 1
##
# @brief Management information for registers.
reg = DTReg()
| apache-2.0 |
CroissanceCommune/autonomie | autonomie/forms/tasks/invoice.py | 1 | 22709 | # -*- coding: utf-8 -*-
# * Copyright (C) 2012-2013 Croissance Commune
# * Authors:
# * Arezki Feth <f.a@majerti.fr>;
# * Miotte Julien <j.m@majerti.fr>;
# * Pettier Gabriel;
# * TJEBBES Gaston <g.t@majerti.fr>
#
# This file is part of Autonomie : Progiciel de gestion de CAE.
#
# Autonomie is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Autonomie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Autonomie. If not, see <http://www.gnu.org/licenses/>.
#
"""
form schemas for invoices related views
"""
import functools
import colander
import deform
import deform_extensions
from colanderalchemy import SQLAlchemySchemaNode
from pyramid.security import has_permission
from autonomie.models.task import (
invoice,
Estimation,
)
from autonomie.models.tva import (
Product,
Tva,
)
from autonomie.models.payments import (
BankAccount,
PaymentMode,
)
from autonomie.models.task.invoice import (
Invoice,
CancelInvoice,
Payment,
INVOICE_STATES,
)
from autonomie.utils.strings import format_amount
from autonomie import forms
from autonomie.forms.company import (
customer_filter_node_factory,
company_filter_node_factory,
)
from autonomie.forms.custom_types import (
AmountType,
)
from autonomie.forms.widgets import FixedLenSequenceWidget
from autonomie.forms.widgets import CleanMappingWidget
from autonomie.forms.payments import (
get_amount_topay,
deferred_amount_default,
deferred_payment_mode_widget,
deferred_payment_mode_validator,
deferred_bank_widget,
deferred_bank_validator,
)
from autonomie.forms.tasks.lists import (
PeriodSchema,
AmountRangeSchema,
)
from autonomie.forms.tasks.task import get_add_edit_task_schema
PAID_STATUS_OPTIONS = (
("all", u"Filtrer par statut de paiement", ),
("paid", u"Les factures payées", ),
("notpaid", u"Seulement les impayés", )
)
STATUS_OPTIONS = (
('all', u"Filtrer par statut", ),
('draft', u"Brouillon"),
('wait', u"En attente de validation"),
('invalid', u"Invalide"),
('valid', u"Valide"),
)
TYPE_OPTIONS = (
("both", u"Filtrer par factures/avoirs", ),
('invoice', u"Seulement les factures", ),
('cancelinvoice', u"Seulement les avoirs",),
)
AMOUNT_PRECISION = 5
# 5€ => 500000 in db format
PAYMENT_EPSILON = 5 * 10 ** AMOUNT_PRECISION
PAYMENT_SUM_EPSILON = 0.1 * 10 ** AMOUNT_PRECISION
def get_product_choices():
"""
Return data structure for product code select widget options
"""
return [(p.id, u"{0} ({1} - {2})".format(
p.name, p.compte_cg, p.tva.name),)
for p in Product.query()]
@colander.deferred
def deferred_product_validator(node, kw):
options = [option[0] for option in get_product_choices()]
return colander.OneOf(options)
@colander.deferred
def deferred_product_widget(node, kw):
"""
return a widget for product selection
"""
products = get_product_choices()
wid = deform.widget.SelectWidget(values=products)
return wid
def product_match_tva_validator(form, line_value):
product_id = line_value.get('product_id')
product = Product.get(product_id)
if product.tva.value != line_value['tva']:
exc = colander.Invalid(
form,
u"Le code produit doit correspondre à la TVA associée",
)
raise exc
@colander.deferred
def deferred_financial_year_widget(node, kw):
request = kw['request']
if has_permission('manage', request.context, request):
return deform.widget.TextInputWidget(mask='9999')
else:
return deform.widget.HiddenWidget()
FINANCIAL_YEAR = colander.SchemaNode(
colander.Integer(),
name="financial_year",
title=u"Année comptable de référence",
widget=deferred_financial_year_widget,
default=forms.default_year,
)
class FinancialYearSchema(colander.MappingSchema):
"""
colander Schema for financial year setting
"""
financial_year = FINANCIAL_YEAR
class ProductTaskLine(colander.MappingSchema):
"""
A single estimation line
"""
id = colander.SchemaNode(
colander.Integer(),
widget=deform.widget.HiddenWidget(),
missing=u"",
css_class="span0"
)
description = colander.SchemaNode(
colander.String(),
widget=deform.widget.TextInputWidget(readonly=True),
missing=u'',
css_class='col-md-3',
)
tva = colander.SchemaNode(
AmountType(),
widget=deform_extensions.DisabledInput(),
css_class='col-md-1',
title=u'TVA',
)
product_id = colander.SchemaNode(
colander.Integer(),
widget=deferred_product_widget,
validator=deferred_product_validator,
missing="",
css_class="col-md-2",
title=u"Code produit",
)
class ProductTaskLines(colander.SequenceSchema):
taskline = ProductTaskLine(
missing="",
title=u"",
validator=product_match_tva_validator,
widget=CleanMappingWidget(),
)
class SetProductsSchema(colander.MappingSchema):
"""
Form schema used to configure Products
"""
lines = ProductTaskLines(
widget=FixedLenSequenceWidget(),
missing="",
title=u''
)
def get_set_product_schema(lines):
"""
Return a product schema matching the number of lines
"""
schema = SetProductsSchema()
schema['lines'].widget.min_len = len(lines)
schema['lines'].widget.max_len = len(lines)
return schema
# INVOICE LIST RELATED SCHEMAS
def get_list_schema(is_global=False, excludes=()):
"""
Return a schema for invoice listing
is_global
If True, customer select is only related to the current company
"""
schema = forms.lists.BaseListsSchema().clone()
if 'paid_status' not in excludes:
schema.insert(
0,
colander.SchemaNode(
colander.String(),
name='paid_status',
widget=deform.widget.SelectWidget(values=PAID_STATUS_OPTIONS),
validator=colander.OneOf([s[0] for s in PAID_STATUS_OPTIONS]),
missing='all',
default='all',
)
)
if 'status' not in excludes:
schema.insert(
0,
colander.SchemaNode(
colander.String(),
name='status',
widget=deform.widget.SelectWidget(values=STATUS_OPTIONS),
validator=colander.OneOf([s[0] for s in STATUS_OPTIONS]),
missing='all',
default='all',
)
)
schema.insert(
0,
colander.SchemaNode(
colander.String(),
name='doctype',
widget=deform.widget.SelectWidget(values=TYPE_OPTIONS),
validator=colander.OneOf([s[0] for s in TYPE_OPTIONS]),
missing='both',
default='both',
)
)
if 'customer' not in excludes:
schema.insert(0, customer_filter_node_factory(
is_admin=is_global,
name='customer_id',
with_invoice=True,
))
if 'company_id' not in excludes:
schema.insert(
0,
company_filter_node_factory(name='company_id')
)
schema.insert(
0,
PeriodSchema(
name='period',
title="",
validator=colander.Function(
forms.range_validator,
msg=u"La date de début doit précéder la date de début"
),
widget=CleanMappingWidget(),
missing=colander.drop,
)
)
schema.insert(
0,
AmountRangeSchema(
name='ttc',
title="",
validator=colander.Function(
forms.range_validator,
msg=u"Le montant de départ doit être inférieur ou égale \
à celui de la fin"
),
widget=CleanMappingWidget(),
missing=colander.drop,
)
)
if 'year' not in excludes:
def get_year_options(kw):
values = invoice.get_invoice_years(kw)
values.insert(0, u'')
return values
node = forms.year_select_node(
name='year',
query_func=get_year_options,
missing=-1,
description=u"Année fiscale"
)
schema.insert(0, node)
schema['search'].description = u"Identifiant du document"
return schema
def range_validator(form, value):
"""
Validate that end is higher or equal than start
"""
if value['end'] > 0 and value['start'] > value['end']:
exc = colander.Invalid(
form,
u"Le numéro de début doit être plus petit ou égal à celui de fin"
)
exc['start'] = u"Doit être inférieur au numéro de fin"
raise exc
class InvoicesPdfExport(colander.MappingSchema):
"""
Schema for invoice bulk export
"""
year = forms.year_select_node(
title=u"Année comptable",
query_func=invoice.get_invoice_years
)
start = colander.SchemaNode(
colander.Integer(),
title=u"Numéro de début",
description=u"Numéro à partir duquel exporter",
)
end = colander.SchemaNode(
colander.Integer(),
title=u"Numéro de fin",
description=u"Numéro jusqu'auquel exporter \
(dernier document si vide)",
missing=-1,
)
pdfexportSchema = InvoicesPdfExport(
title=u"Exporter un ensemble de factures dans un fichier pdf",
validator=range_validator,
)
@colander.deferred
def deferred_bank_remittance_id_default(node, kw):
"""
default value for the payment amount
"""
return format_amount(
get_amount_topay(kw),
precision=AMOUNT_PRECISION,
grouping=False
)
@colander.deferred
def deferred_total_validator(node, kw):
"""
validate the amount to keep the sum under the total
"""
topay = get_amount_topay(kw)
max_msg = u"Le montant ne doit pas dépasser %s (total ttc - somme \
des paiements + montant d'un éventuel avoir)" % (
format_amount(topay, precision=AMOUNT_PRECISION, grouping=False)
)
min_msg = u"Le montant doit être positif"
# We insert a large epsilon to allow larger payments to be registered
max_value = topay + PAYMENT_EPSILON
return colander.Range(
min=0, max=max_value, min_err=min_msg, max_err=max_msg,
)
@colander.deferred
def deferred_tva_id_validator(node, kw):
ctx = kw['request'].context
if isinstance(ctx, Payment):
invoice = ctx.parent
else:
invoice = ctx
values = []
for tva_value in invoice.topay_by_tvas().keys():
values.append(Tva.by_value(tva_value))
def validator(node, value):
if value not in [v.id for v in values]:
raise colander.Invalid(
node,
u"Ce taux de tva n'est pas utilisé dans la facture",
)
return validator
class PaymentSchema(colander.MappingSchema):
"""
colander schema for payment recording
"""
come_from = forms.come_from_node()
bank_remittance_id = colander.SchemaNode(
colander.String(),
title=u"Identifiant de la remise en banque",
description=u"Ce champ est un indicateur permettant de \
retrouver la remise en banque à laquelle cet encaissement est associé",
default=deferred_bank_remittance_id_default,
)
amount = colander.SchemaNode(
AmountType(5),
title=u"Montant de l'encaissement",
validator=deferred_total_validator,
default=deferred_amount_default,
)
date = forms.today_node()
mode = colander.SchemaNode(
colander.String(),
title=u"Mode de paiement",
widget=deferred_payment_mode_widget,
validator=deferred_payment_mode_validator,
)
bank_id = colander.SchemaNode(
colander.Integer(),
title=u"Banque",
missing=colander.drop,
widget=deferred_bank_widget,
validator=deferred_bank_validator,
default=forms.get_deferred_default(BankAccount),
)
tva_id = colander.SchemaNode(
colander.Integer(),
title=u"Tva liée à cet encaissement",
widget=forms.get_deferred_select(
Tva, mandatory=True, keys=('id', 'name')
),
validator=deferred_tva_id_validator
)
resulted = colander.SchemaNode(
colander.Boolean(),
title=u"Soldé",
description="""Indique que le document est soldé (
ne recevra plus de paiement), si le montant indiqué correspond au
montant de la facture celle-ci est soldée automatiquement""",
default=False,
)
class TvaPayment(colander.MappingSchema):
amount = colander.SchemaNode(
AmountType(5),
title=u"Montant de l'encaissement",
)
tva_id = colander.SchemaNode(
colander.Integer(),
title=u"Tva liée à cet encaissement",
widget=forms.get_deferred_select(
Tva, mandatory=True, keys=('id', 'name')
),
validator=deferred_tva_id_validator
)
@colander.deferred
def deferred_amount_by_tva_validation(node, kw):
invoice = kw['request'].context
tva_parts = invoice.tva_ttc_parts()
def validate_amount_by_tva(values):
tva_id = values.get('tva_id')
tva = Tva.get(tva_id)
if tva is None:
return u"Tva inconnue"
amount = values.get('amount')
# Fix #433 : encaissement et tva multiples
# Add a tolerance for 5 € of difference
if amount > tva_parts[tva.value] + PAYMENT_EPSILON:
return u"Le montant de l'encaissement doit être inférieur à la \
part de cette Tva dans la facture"
return True
return colander.Function(validate_amount_by_tva)
@colander.deferred
def deferred_payment_amount_validation(node, kw):
"""
Validate that the remittance amount is equal to the sum of the tva parts
"""
def validate_sum_of_tvapayments(values):
"""
Validate the sum of the tva payments is equal to the bank_remittance_id
"""
tva_sum = sum([tvap['amount'] for tvap in values['tvas']])
bank_remittance_id = values['payment_amount']
diff = abs(tva_sum - bank_remittance_id)
if diff >= PAYMENT_SUM_EPSILON:
return u"Le montant du paiement doit correspondre à la somme \
des encaissements correspondant"
return True
return colander.Function(validate_sum_of_tvapayments)
class TvaPaymentSequence(colander.SequenceSchema):
tvas = TvaPayment(title=u'', validator=deferred_amount_by_tva_validation)
class MultiplePaymentSchema(colander.MappingSchema):
"""
colander schema for payment recording
"""
come_from = forms.come_from_node()
bank_remittance_id = colander.SchemaNode(
colander.String(),
title=u"Identifiant de la remise en banque",
default=deferred_bank_remittance_id_default,
)
payment_amount = colander.SchemaNode(
AmountType(5),
title=u"Montant du paiement",
description=u"Ce champ permet de contrôler que la somme des \
encaissements saisis dans ce formulaire correspondent bien au montant du \
paiement.",
validator=deferred_total_validator,
default=deferred_amount_default,
)
date = forms.today_node(title=u"Date de la remise")
mode = colander.SchemaNode(
colander.String(),
title=u"Mode de paiement",
widget=deferred_payment_mode_widget,
validator=deferred_payment_mode_validator,
)
bank_id = colander.SchemaNode(
colander.Integer(),
title=u"Banque",
missing=colander.drop,
widget=deferred_bank_widget,
default=forms.get_deferred_default(BankAccount),
)
tvas = TvaPaymentSequence(title=u'Encaissements par taux de Tva')
resulted = colander.SchemaNode(
colander.Boolean(),
title=u"Soldé",
description="""Indique que le document est soldé (
ne recevra plus de paiement), si le montant indiqué correspond au
montant de la facture celle-ci est soldée automatiquement""",
default=False,
)
def get_payment_schema(request):
"""
Returns the schema for payment registration
"""
invoice = request.context
tva_module = request.config.get('receipts_active_tva_module')
num_tvas = len(invoice.get_tvas().keys())
# Only one tva
if num_tvas == 1:
schema = PaymentSchema().clone()
if not tva_module or tva_module == '0':
schema['tva_id'].widget = deform.widget.HiddenWidget()
return schema
else:
schema = MultiplePaymentSchema(
validator=deferred_payment_amount_validation
).clone()
schema['tvas'].widget = deform.widget.SequenceWidget(
min_len=1,
max_len=num_tvas,
orderable=False,
)
return schema
@colander.deferred
def deferred_estimation_widget(node, kw):
"""
Return a select for estimation selection
"""
query = Estimation.query()
query = query.filter_by(project_id=kw['request'].context.project_id)
choices = [(e.id, e.name) for e in query]
choices.insert(0, ('', 'Aucun devis'))
return deform.widget.SelectWidget(values=choices)
class EstimationAttachSchema(colander.Schema):
estimation_id = colander.SchemaNode(
colander.Integer(),
widget=deferred_estimation_widget,
missing=colander.drop,
title=u"Devis à rattacher à cette facture",
)
def _customize_invoice_schema(schema):
"""
Add form schema customization to the given Invoice edition schema
:param obj schema: The schema to edit
"""
customize = functools.partial(forms.customize_field, schema)
customize(
"paid_status",
widget=deform.widget.SelectWidget(values=INVOICE_STATES),
validator=colander.OneOf(dict(INVOICE_STATES).keys())
)
customize(
'financial_year',
widget=deform.widget.TextInputWidget(mask='9999')
)
customize('estimation_id', missing=colander.drop)
return schema
def _customize_cancelinvoice_schema(schema):
"""
Add form schema customization to the given Invoice edition schema
:param obj schema: The schema to edit
"""
customize = functools.partial(forms.customize_field, schema)
customize('invoice_id', missing=colander.required)
customize(
'financial_year',
widget=deform.widget.TextInputWidget(mask='9999')
)
return schema
def _customize_payment_schema(schema):
"""
Add form schema customization to the given payment edition schema
:param obj schema: The schema to edit
"""
customize = functools.partial(forms.customize_field, schema)
customize(
"mode",
validator=forms.get_deferred_select_validator(
PaymentMode, id_key='label'
),
missing=colander.required
)
customize("amount", typ=AmountType(5), missing=colander.required)
customize("bank_remittance_id", missing=colander.required)
customize("date", missing=colander.required)
customize("task_id", missing=colander.required)
customize(
"bank_id",
validator=forms.get_deferred_select_validator(BankAccount),
missing=colander.required,
)
customize(
"tva_id",
validator=forms.get_deferred_select_validator(Tva),
missing=colander.drop,
)
customize("user_id", missing=colander.required)
return schema
def get_add_edit_invoice_schema(isadmin=False, includes=None, **kw):
"""
Return add edit schema for Invoice edition
:param bool isadmin: Are we asking for an admin schema ?
:param tuple includes: Field that should be included in the schema
:rtype: `colanderalchemy.SQLAlchemySchemaNode`
"""
schema = get_add_edit_task_schema(
Invoice, isadmin=isadmin, includes=includes, **kw
)
schema = _customize_invoice_schema(schema)
return schema
def get_add_edit_cancelinvoice_schema(isadmin=False, includes=None, **kw):
"""
Return add edit schema for CancelInvoice edition
:param bool isadmin: Are we asking for an admin schema ?
:param tuple includes: Field that should be included in the schema
:rtype: `colanderalchemy.SQLAlchemySchemaNode`
"""
schema = get_add_edit_task_schema(
CancelInvoice, isadmin=isadmin, includes=includes, **kw
)
schema = _customize_cancelinvoice_schema(schema)
return schema
def get_add_edit_payment_schema(includes=None):
"""
Return add edit schema for Payment edition
:param tuple includes: Field that should be included in the schema
:rtype: `colanderalchemy.SQLAlchemySchemaNode`
"""
schema = SQLAlchemySchemaNode(Payment, includes=includes)
schema = _customize_payment_schema(schema)
return schema
def validate_invoice(invoice_object, request):
"""
Globally validate an invoice_object
:param obj invoice_object: An instance of Invoice
:param obj request: The pyramid request
:raises: colander.Invalid
try:
validate_invoice(est, self.request)
except colander.Invalid as err:
error_messages = err.messages
"""
schema = get_add_edit_invoice_schema()
schema = schema.bind(request=request)
appstruct = invoice_object.__json__(request)
cstruct = schema.deserialize(appstruct)
return cstruct
def validate_cancelinvoice(cancelinvoice_object, request):
"""
Globally validate an cancelinvoice_object
:param obj cancelinvoice_object: An instance of CancelInvoice
:param obj request: The pyramid request
:raises: colander.Invalid
try:
validate_cancelinvoice(est, self.request)
except colander.Invalid as err:
error_messages = err.messages
"""
schema = get_add_edit_cancelinvoice_schema()
schema = schema.bind(request=request)
appstruct = cancelinvoice_object.__json__(request)
cstruct = schema.deserialize(appstruct)
return cstruct
| gpl-3.0 |
TheDegree0/menescraper | menescraper/menescraper/lib/python2.7/site-packages/setuptools/tests/test_packageindex.py | 377 | 7625 | """Package Index Tests
"""
import sys
import os
import unittest
import pkg_resources
from setuptools.compat import urllib2, httplib, HTTPError, unicode, pathname2url
import distutils.errors
import setuptools.package_index
from setuptools.tests.server import IndexServer
class TestPackageIndex(unittest.TestCase):
def test_bad_url_bad_port(self):
index = setuptools.package_index.PackageIndex()
url = 'http://127.0.0.1:0/nonesuch/test_package_index'
try:
v = index.open_url(url)
except Exception:
v = sys.exc_info()[1]
self.assertTrue(url in str(v))
else:
self.assertTrue(isinstance(v, HTTPError))
def test_bad_url_typo(self):
# issue 16
# easy_install inquant.contentmirror.plone breaks because of a typo
# in its home URL
index = setuptools.package_index.PackageIndex(
hosts=('www.example.com',)
)
url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk'
try:
v = index.open_url(url)
except Exception:
v = sys.exc_info()[1]
self.assertTrue(url in str(v))
else:
self.assertTrue(isinstance(v, HTTPError))
def test_bad_url_bad_status_line(self):
index = setuptools.package_index.PackageIndex(
hosts=('www.example.com',)
)
def _urlopen(*args):
raise httplib.BadStatusLine('line')
index.opener = _urlopen
url = 'http://example.com'
try:
v = index.open_url(url)
except Exception:
v = sys.exc_info()[1]
self.assertTrue('line' in str(v))
else:
raise AssertionError('Should have raise here!')
def test_bad_url_double_scheme(self):
"""
A bad URL with a double scheme should raise a DistutilsError.
"""
index = setuptools.package_index.PackageIndex(
hosts=('www.example.com',)
)
# issue 20
url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk'
try:
index.open_url(url)
except distutils.errors.DistutilsError:
error = sys.exc_info()[1]
msg = unicode(error)
assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg
return
raise RuntimeError("Did not raise")
def test_bad_url_screwy_href(self):
index = setuptools.package_index.PackageIndex(
hosts=('www.example.com',)
)
# issue #160
if sys.version_info[0] == 2 and sys.version_info[1] == 7:
# this should not fail
url = 'http://example.com'
page = ('<a href="http://www.famfamfam.com]('
'http://www.famfamfam.com/">')
index.process_index(url, page)
def test_url_ok(self):
index = setuptools.package_index.PackageIndex(
hosts=('www.example.com',)
)
url = 'file:///tmp/test_package_index'
self.assertTrue(index.url_ok(url, True))
def test_links_priority(self):
"""
Download links from the pypi simple index should be used before
external download links.
https://bitbucket.org/tarek/distribute/issue/163
Usecase :
- someone uploads a package on pypi, a md5 is generated
- someone manually copies this link (with the md5 in the url) onto an
external page accessible from the package page.
- someone reuploads the package (with a different md5)
- while easy_installing, an MD5 error occurs because the external link
is used
-> Setuptools should use the link from pypi, not the external one.
"""
if sys.platform.startswith('java'):
# Skip this test on jython because binding to :0 fails
return
# start an index server
server = IndexServer()
server.start()
index_url = server.base_url() + 'test_links_priority/simple/'
# scan a test index
pi = setuptools.package_index.PackageIndex(index_url)
requirement = pkg_resources.Requirement.parse('foobar')
pi.find_packages(requirement)
server.stop()
# the distribution has been found
self.assertTrue('foobar' in pi)
# we have only one link, because links are compared without md5
self.assertTrue(len(pi['foobar'])==1)
# the link should be from the index
self.assertTrue('correct_md5' in pi['foobar'][0].location)
def test_parse_bdist_wininst(self):
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
'reportlab-2.5.win32-py2.4.exe'), ('reportlab-2.5', '2.4', 'win32'))
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
'reportlab-2.5.win32.exe'), ('reportlab-2.5', None, 'win32'))
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
'reportlab-2.5.win-amd64-py2.7.exe'), ('reportlab-2.5', '2.7', 'win-amd64'))
self.assertEqual(setuptools.package_index.parse_bdist_wininst(
'reportlab-2.5.win-amd64.exe'), ('reportlab-2.5', None, 'win-amd64'))
def test__vcs_split_rev_from_url(self):
"""
Test the basic usage of _vcs_split_rev_from_url
"""
vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url
url, rev = vsrfu('https://example.com/bar@2995')
self.assertEqual(url, 'https://example.com/bar')
self.assertEqual(rev, '2995')
def test_local_index(self):
"""
local_open should be able to read an index from the file system.
"""
f = open('index.html', 'w')
f.write('<div>content</div>')
f.close()
try:
url = 'file:' + pathname2url(os.getcwd()) + '/'
res = setuptools.package_index.local_open(url)
finally:
os.remove('index.html')
assert 'content' in res.read()
class TestContentCheckers(unittest.TestCase):
def test_md5(self):
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
checker.feed('You should probably not be using MD5'.encode('ascii'))
self.assertEqual(checker.hash.hexdigest(),
'f12895fdffbd45007040d2e44df98478')
self.assertTrue(checker.is_valid())
def test_other_fragment(self):
"Content checks should succeed silently if no hash is present"
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#something%20completely%20different')
checker.feed('anything'.encode('ascii'))
self.assertTrue(checker.is_valid())
def test_blank_md5(self):
"Content checks should succeed if a hash is empty"
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=')
checker.feed('anything'.encode('ascii'))
self.assertTrue(checker.is_valid())
def test_get_hash_name_md5(self):
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
self.assertEqual(checker.hash_name, 'md5')
def test_report(self):
checker = setuptools.package_index.HashChecker.from_url(
'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478')
rep = checker.report(lambda x: x, 'My message about %s')
self.assertEqual(rep, 'My message about md5')
| gpl-2.0 |
18F/regulations-parser | tests/layer_section_by_section_tests.py | 11 | 5436 | from unittest import TestCase
from regparser.layer.section_by_section import SectionBySection
from regparser.tree.struct import Node
class LayerSectionBySectionTest(TestCase):
def test_process(self):
notice1 = {
"document_number": "111-22",
"fr_volume": 22,
"cfr_part": "100",
'publication_date': '2008-08-08',
"section_by_section": [{
"title": "",
"labels": ["100-22-b-2"],
"paragraphs": ["AAA"],
"page": 7677,
"children": []
}, {
"title": "",
"labels": ["100-22-b"],
"paragraphs": ["BBB"],
"page": 7676,
"children": []
}]
}
notice2 = {
"document_number": "111-23",
"fr_volume": 23,
"cfr_part": "100",
'publication_date': '2009-09-09',
"section_by_section": [{
"title": "",
"paragraphs": [],
"children": [{
"title": "",
"labels": ["100-22-b-2"],
"paragraphs": ["CCC"],
"page": 5454,
"children": []
}]
}]
}
s = SectionBySection(None, notices=[notice1, notice2])
self.assertEqual(None, s.process(Node(label=['100', '55'])))
self.assertEqual(s.process(Node(label=['100', '22', 'b'])),
[{"reference": ('111-22', '100-22-b'),
"publication_date": "2008-08-08",
"fr_volume": 22,
"fr_page": 7676}])
self.assertEqual(s.process(Node(label=['100', '22', 'b', '2'])), [
{"reference": ('111-22', '100-22-b-2'),
"publication_date": "2008-08-08",
"fr_volume": 22,
"fr_page": 7677},
{"reference": ('111-23', '100-22-b-2'),
"publication_date": "2009-09-09",
"fr_volume": 23,
"fr_page": 5454}])
def test_process_empty(self):
notice = {
"document_number": "111-22",
"fr_volume": 22,
"cfr_part": "100",
'publication_date': '2008-08-08',
"section_by_section": [{
"title": "",
"labels": ["100-22-a"],
"paragraphs": [],
"page": 7676,
"children": []
}, {
"title": "",
"label": "100-22-b",
"paragraphs": ["BBB"],
"page": 7677,
"children": []
}, {
"title": "",
"label": "100-22-c",
"paragraphs": [],
"page": 7678,
"children": [{
"label": "100-22-c-1",
"title": "",
"paragraphs": ["123"],
"page": 7679,
"children": []
}]
}, {
"title": "",
"label": "100-22-d",
"paragraphs": [],
"page": 7680,
"children": [{
"title": "",
"paragraphs": ["234"],
"page": 7681,
"children": []
}]
}]
}
s = SectionBySection(None, notices=[notice])
self.assertEqual(None, s.process(Node(label=['100-22-b-2'])))
self.assertEqual(None, s.process(Node(label=['100-22-c'])))
def test_process_order(self):
notice1 = {
"document_number": "111-22",
"fr_volume": 22,
"cfr_part": "100",
"publication_date": "2010-10-10",
"section_by_section": [{
"title": "",
"labels": ["100-22-b-2"],
"paragraphs": ["AAA"],
"page": 7676,
"children": []
}]
}
notice2 = {
"document_number": "111-23",
"fr_volume": 23,
"cfr_part": "100",
"publication_date": "2009-09-09",
"section_by_section": [{
"title": "",
"labels": ["100-22-b-2"],
"paragraphs": ["CCC"],
"page": 5454,
"children": []
}]
}
s = SectionBySection(None, notices=[notice1, notice2])
self.assertEqual(s.process(Node(label=['100', '22', 'b', '2'])), [
{"reference": ('111-23', '100-22-b-2'),
"publication_date": "2009-09-09",
"fr_volume": 23,
"fr_page": 5454},
{"reference": ('111-22', '100-22-b-2'),
"publication_date": "2010-10-10",
"fr_volume": 22,
"fr_page": 7676}])
def test_no_section_by_section(self):
"""Not all notices have a section-by-section analysis section. Verify
that the parser doesn't explode in these cases"""
notice = {
"document_number": "111-22",
"fr_volume": 22,
"cfr_part": "100",
"publication_date": "2010-10-10"
}
s = SectionBySection(None, notices=[notice])
self.assertEqual(None, s.process(Node(label=['100', '22'])))
| cc0-1.0 |
tmhorne/celtx | config/tests/unit-Expression.py | 1 | 1718 | import unittest
import sys
import os.path
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from Expression import Expression, Context
class TestContext(unittest.TestCase):
"""
Unit tests for the Context class
"""
def setUp(self):
self.c = Context()
self.c['FAIL'] = 'PASS'
def test_string_literal(self):
"""test string literal, fall-through for undefined var in a Context"""
self.assertEqual(self.c['PASS'], 'PASS')
def test_variable(self):
"""test value for defined var in the Context class"""
self.assertEqual(self.c['FAIL'], 'PASS')
def test_in(self):
"""test 'var in context' to not fall for fallback"""
self.assert_('FAIL' in self.c)
self.assert_('PASS' not in self.c)
class TestExpression(unittest.TestCase):
"""
Unit tests for the Expression class
evaluate() is called with a context {FAIL: 'PASS'}
"""
def setUp(self):
self.c = Context()
self.c['FAIL'] = 'PASS'
def test_string_literal(self):
"""Test for a string literal in an Expression"""
self.assertEqual(Expression('PASS').evaluate(self.c), 'PASS')
def test_variable(self):
"""Test for variable value in an Expression"""
self.assertEqual(Expression('FAIL').evaluate(self.c), 'PASS')
def test_not(self):
"""Test for the ! operator"""
self.assert_(Expression('!0').evaluate(self.c))
self.assert_(not Expression('!1').evaluate(self.c))
def test_equals(self):
""" Test for the == operator"""
self.assert_(Expression('FAIL == PASS').evaluate(self.c))
def test_notequals(self):
""" Test for the != operator"""
self.assert_(Expression('FAIL != 1').evaluate(self.c))
if __name__ == '__main__':
unittest.main()
| mpl-2.0 |
pcarrier-packaging/deb-phantomjs | src/breakpad/src/tools/gyp/test/sibling/gyptest-all.py | 151 | 1061 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('build/all.gyp', chdir='src')
test.build('build/all.gyp', test.ALL, chdir='src')
chdir = 'src/build'
# The top-level Makefile is in the directory where gyp was run.
# TODO(mmoss) Should the Makefile go in the directory of the passed in .gyp
# file? What about when passing in multiple .gyp files? Would sub-project
# Makefiles (see http://codereview.chromium.org/340008 comments) solve this?
if test.format == 'make':
chdir = 'src'
if test.format == 'xcode':
chdir = 'src/prog1'
test.run_built_executable('prog1',
chdir=chdir,
stdout="Hello from prog1.c\n")
if test.format == 'xcode':
chdir = 'src/prog2'
test.run_built_executable('prog2',
chdir=chdir,
stdout="Hello from prog2.c\n")
test.pass_test()
| bsd-3-clause |
pli3/enigma2-git | lib/python/Components/Converter/ValueToPixmap.py | 25 | 1216 | from Components.Converter.Converter import Converter
from Components.Element import cached, ElementError
from Tools.Directories import fileExists, SCOPE_SKIN_IMAGE, SCOPE_CURRENT_SKIN, resolveFilename
from Tools.LoadPixmap import LoadPixmap
class ValueToPixmap(Converter, object):
LANGUAGE_CODE = 0
PATH = 1
def __init__(self, type):
Converter.__init__(self, type)
if type == "LanguageCode":
self.type = self.LANGUAGE_CODE
elif type == "Path":
self.type = self.PATH
else:
raise ElementError("'%s' is not <LanguageCode|Path> for ValueToPixmap converter" % type)
@cached
def getPixmap(self):
if self.source:
val = self.source.text
if val in (None, ""):
return None
if self.type == self.PATH:
return LoadPixmap(val)
if self.type == self.LANGUAGE_CODE:
png = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "countries/" + val[3:].lower() + ".png"))
if png == None:
png = LoadPixmap(cached=True, path=resolveFilename(SCOPE_SKIN_IMAGE, "countries/missing.png"))
return png
return None
pixmap = property(getPixmap)
def changed(self, what):
if what[0] != self.CHANGED_SPECIFIC or what[1] == self.type:
Converter.changed(self, what)
| gpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.