repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
|---|---|---|---|---|---|
pplatek/odoo
|
addons/account_asset/account_asset_invoice.py
|
193
|
3070
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_invoice(osv.osv):
_inherit = 'account.invoice'
def action_number(self, cr, uid, ids, *args):
result = super(account_invoice, self).action_number(cr, uid, ids, *args)
for inv in self.browse(cr, uid, ids):
self.pool.get('account.invoice.line').asset_create(cr, uid, inv.invoice_line)
return result
def line_get_convert(self, cr, uid, x, part, date, context=None):
res = super(account_invoice, self).line_get_convert(cr, uid, x, part, date, context=context)
res['asset_id'] = x.get('asset_id', False)
return res
class account_invoice_line(osv.osv):
_inherit = 'account.invoice.line'
_columns = {
'asset_category_id': fields.many2one('account.asset.category', 'Asset Category'),
}
def asset_create(self, cr, uid, lines, context=None):
context = context or {}
asset_obj = self.pool.get('account.asset.asset')
for line in lines:
if line.asset_category_id:
vals = {
'name': line.name,
'code': line.invoice_id.number or False,
'category_id': line.asset_category_id.id,
'purchase_value': line.price_subtotal,
'period_id': line.invoice_id.period_id.id,
'partner_id': line.invoice_id.partner_id.id,
'company_id': line.invoice_id.company_id.id,
'currency_id': line.invoice_id.currency_id.id,
'purchase_date' : line.invoice_id.date_invoice,
}
changed_vals = asset_obj.onchange_category_id(cr, uid, [], vals['category_id'], context=context)
vals.update(changed_vals['value'])
asset_id = asset_obj.create(cr, uid, vals, context=context)
if line.asset_category_id.open_asset:
asset_obj.validate(cr, uid, [asset_id], context=context)
return True
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
frankyrumple/smc
|
modules/requests/packages/urllib3/util/url.py
|
375
|
5760
|
from collections import namedtuple
from ..exceptions import LocationParseError
url_attrs = ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment']
class Url(namedtuple('Url', url_attrs)):
"""
Datastructure for representing an HTTP URL. Used as a return value for
:func:`parse_url`.
"""
slots = ()
def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
query=None, fragment=None):
return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
query, fragment)
@property
def hostname(self):
"""For backwards-compatibility with urlparse. We're nice like that."""
return self.host
@property
def request_uri(self):
"""Absolute path including the query string."""
uri = self.path or '/'
if self.query is not None:
uri += '?' + self.query
return uri
@property
def netloc(self):
"""Network location including host and port"""
if self.port:
return '%s:%d' % (self.host, self.port)
return self.host
@property
def url(self):
"""
Convert self into a url
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port will have : removed).
Example: ::
>>> U = parse_url('http://google.com/mail/')
>>> U.url
'http://google.com/mail/'
>>> Url('http', 'username:password', 'host.com', 80,
... '/path', 'query', 'fragment').url
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = self
url = ''
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url += scheme + '://'
if auth is not None:
url += auth + '@'
if host is not None:
url += host
if port is not None:
url += ':' + str(port)
if path is not None:
url += path
if query is not None:
url += '?' + query
if fragment is not None:
url += '#' + fragment
return url
def __str__(self):
return self.url
def split_first(s, delims):
"""
Given a string and an iterable of delimiters, split on the first found
delimiter. Return two split parts and the matched delimiter.
If not found, then the first part is the full input string.
Example::
>>> split_first('foo/bar?baz', '?/=')
('foo', 'bar?baz', '/')
>>> split_first('foo/bar?baz', '123')
('foo/bar?baz', '', None)
Scales linearly with number of delims. Not ideal for large number of delims.
"""
min_idx = None
min_delim = None
for d in delims:
idx = s.find(d)
if idx < 0:
continue
if min_idx is None or idx < min_idx:
min_idx = idx
min_delim = d
if min_idx is None or min_idx < 0:
return s, '', None
return s[:min_idx], s[min_idx+1:], min_delim
def parse_url(url):
"""
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
performed to parse incomplete urls. Fields not provided will be None.
Partly backwards-compatible with :mod:`urlparse`.
Example::
>>> parse_url('http://google.com/mail/')
Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
>>> parse_url('google.com:80')
Url(scheme=None, host='google.com', port=80, path=None, ...)
>>> parse_url('/foo?bar')
Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
"""
# While this code has overlap with stdlib's urlparse, it is much
# simplified for our needs and less annoying.
# Additionally, this implementations does silly things to be optimal
# on CPython.
if not url:
# Empty
return Url()
scheme = None
auth = None
host = None
port = None
path = None
fragment = None
query = None
# Scheme
if '://' in url:
scheme, url = url.split('://', 1)
# Find the earliest Authority Terminator
# (http://tools.ietf.org/html/rfc3986#section-3.2)
url, path_, delim = split_first(url, ['/', '?', '#'])
if delim:
# Reassemble the path
path = delim + path_
# Auth
if '@' in url:
# Last '@' denotes end of auth part
auth, url = url.rsplit('@', 1)
# IPv6
if url and url[0] == '[':
host, url = url.split(']', 1)
host += ']'
# Port
if ':' in url:
_host, port = url.split(':', 1)
if not host:
host = _host
if port:
# If given, ports must be integers.
if not port.isdigit():
raise LocationParseError(url)
port = int(port)
else:
# Blank ports are cool, too. (rfc3986#section-3.2.3)
port = None
elif not host and url:
host = url
if not path:
return Url(scheme, auth, host, port, path, query, fragment)
# Fragment
if '#' in path:
path, fragment = path.split('#', 1)
# Query
if '?' in path:
path, query = path.split('?', 1)
return Url(scheme, auth, host, port, path, query, fragment)
def get_host(url):
"""
Deprecated. Use :func:`.parse_url` instead.
"""
p = parse_url(url)
return p.scheme or 'http', p.hostname, p.port
|
mit
|
Yannig/ansible
|
lib/ansible/plugins/action/ce_template.py
|
65
|
3803
|
#
# Copyright 2015 Peter Sprygada <psprygada@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import time
import glob
import urlparse
from ansible.module_utils._text import to_text
from ansible.plugins.action.ce import ActionModule as _ActionModule
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
try:
self._handle_template()
except (ValueError, AttributeError) as exc:
return dict(failed=True, msg=exc.message)
result = super(ActionModule, self).run(tmp, task_vars)
if self._task.args.get('backup') and result.get('__backup__'):
# User requested backup and no error occurred in module.
# NOTE: If there is a parameter error, __backup__ key may not be in results.
self._write_backup(task_vars['inventory_hostname'], result['__backup__'])
if '__backup__' in result:
del result['__backup__']
return result
def _get_working_path(self):
cwd = self._loader.get_basedir()
if self._task._role is not None:
cwd = self._task._role._role_path
return cwd
def _write_backup(self, host, contents):
backup_path = self._get_working_path() + '/backup'
if not os.path.exists(backup_path):
os.mkdir(backup_path)
for fn in glob.glob('%s/%s*' % (backup_path, host)):
os.remove(fn)
tstamp = time.strftime("%Y-%m-%d@%H:%M:%S", time.localtime(time.time()))
filename = '%s/%s_config.%s' % (backup_path, host, tstamp)
open(filename, 'w').write(contents)
def _handle_template(self):
src = self._task.args.get('src')
if not src:
raise ValueError('missing required arguments: src')
working_path = self._get_working_path()
if os.path.isabs(src) or urlparse.urlsplit(src).scheme:
source = src
else:
source = self._loader.path_dwim_relative(working_path, 'templates', src)
if not source:
source = self._loader.path_dwim_relative(working_path, src)
if not os.path.exists(source):
return
try:
with open(source, 'r') as f:
template_data = to_text(f.read())
except IOError:
return dict(failed=True, msg='unable to load src file')
# Create a template search path in the following order:
# [working_path, self_role_path, dependent_role_paths, dirname(source)]
searchpath = [working_path]
if self._task._role is not None:
searchpath.append(self._task._role._role_path)
if hasattr(self._task, "_block:"):
dep_chain = self._task._block.get_dep_chain()
if dep_chain is not None:
for role in dep_chain:
searchpath.append(role._role_path)
searchpath.append(os.path.dirname(source))
self._templar.environment.loader.searchpath = searchpath
self._task.args['src'] = self._templar.template(template_data)
|
gpl-3.0
|
anton-golubkov/Garland
|
src/gui/image_convert.py
|
1
|
3100
|
#-------------------------------------------------------------------------------
# Copyright (c) 2011 Anton Golubkov.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the GNU Public License v2.0
# which accompanies this distribution, and is available at
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
#
# Contributors:
# Anton Golubkov - initial API and implementation
#-------------------------------------------------------------------------------
#!/usr/bin/python
# -*- coding: utf-8 -*-
from PySide import QtGui, QtCore
import cv
import PIL.Image
import StringIO
def pilimage_to_iplimage(pil_image):
# Check zero size of image
if pil_image.size == (0, 0):
return cv.CreateImage( (0, 0), cv.IPL_DEPTH_8U, 3)
ipl_image = cv.CreateImageHeader(pil_image.size, cv.IPL_DEPTH_8U, 3)
# The image rotation and reversion of the string is to swap RGB into BGR,
# that is used in OpenCV video encoding.
cv.SetData(ipl_image, pil_image.rotate(180).tostring()[::-1])
return ipl_image
def pilimage_to_qimage(pil_image):
# Check zero size of image
if pil_image.size == (0, 0):
return QtGui.QImage()
# There is something strange bug appears,
# when returning QImage created from PIL data
# This function changed to buffer IO operations
strio = StringIO.StringIO()
pil_image.save(strio, "PNG")
strio.seek(0)
image = QtGui.QImage()
image.loadFromData(strio.read())
return image
def iplimage_to_pilimage(ipl_image):
"""Converts an ipl_image into a PIL.Image
This function may be obsolete. Use ipl_image.as_pil_image() instead.
"""
# Check zero size of image
if cv.GetSize(ipl_image) == (0, 0):
return PIL.Image.Image()
# Check 1-channel input image and convert it to 3-channel
if( ipl_image.nChannels == 1):
image3c = cv.CreateImage(cv.GetSize(ipl_image), cv.IPL_DEPTH_8U, 3)
cv.Merge(ipl_image, ipl_image, ipl_image, None, image3c)
ipl_image = image3c
size = cv.GetSize(ipl_image)
data = ipl_image.tostring()
im_pil = PIL.Image.fromstring(
"RGB", size, data,
'raw', "BGR")
return im_pil
def iplimage_to_qimage( ipl_image):
pil_image = iplimage_to_pilimage(ipl_image)
qimage = pilimage_to_qimage(pil_image)
return qimage
def qimage_to_iplimage( qimage):
pil_image = qimage_to_pilimage(qimage)
ipl_image = pilimage_to_iplimage(pil_image)
return ipl_image
def qimage_to_pilimage(qimage):
# Check zero size of image
if (qimage.width(), qimage.height()) == (0, 0):
return PIL.Image.Image()
ba = QtCore.QByteArray()
buffer = QtCore.QBuffer(ba)
buffer.open(QtCore.QIODevice.WriteOnly)
qimage.save( buffer, "PNG" )
strio = StringIO.StringIO()
strio.write(buffer.data())
buffer.close()
strio.seek(0)
pil_image = PIL.Image.open(strio)
return pil_image
|
lgpl-2.1
|
michaeljohn32/odoomrp-wip
|
mrp_product_variants_operations/__openerp__.py
|
17
|
1511
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
{
"name": "MRP - Product variants with MRP operations",
"version": "1.0",
"depends": [
"mrp_product_variants",
"mrp_operations_extension",
],
"author": "OdooMRP team,"
"AvanzOSC,"
"Serv. Tecnol. Avanzados - Pedro M. Baeza",
"website": "http://www.odoomrp.com",
"contributors": [
"Oihane Crucelaegui <oihanecrucelaegi@avanzosc.es>",
"Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>",
"Ana Juaristi <ajuaristio@gmail.com>",
],
"category": "Custom Module",
"summary": "",
"data": [],
"installable": True,
"auto_install": True,
}
|
agpl-3.0
|
tquilian/exelearningTest
|
twisted/enterprise/sqlreflector.py
|
17
|
11849
|
# -*- test-case-name: twisted.test.test_reflector -*-
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
from twisted.enterprise import reflector
from twisted.enterprise.util import DBError, getKeyColumn, quote, safe
from twisted.enterprise.util import _TableInfo
from twisted.enterprise.row import RowObject
from twisted.python import reflect
class SQLReflector(reflector.Reflector):
"""I reflect on a database and load RowObjects from it.
In order to do this, I interrogate a relational database to
extract schema information and interface with RowObject class
objects that can interact with specific tables.
"""
populated = 0
conditionalLabels = {
reflector.EQUAL : "=",
reflector.LESSTHAN : "<",
reflector.GREATERTHAN : ">",
reflector.LIKE : "like"
}
def __init__(self, dbpool, rowClasses):
"""Initialize me against a database.
"""
reflector.Reflector.__init__(self, rowClasses)
self.dbpool = dbpool
def _populate(self):
self._transPopulateSchema()
def _transPopulateSchema(self):
"""Used to construct the row classes in a single interaction.
"""
for rc in self.rowClasses:
if not issubclass(rc, RowObject):
raise DBError("Stub class (%s) is not derived from RowObject" % reflect.qual(rc.rowClass))
self._populateSchemaFor(rc)
self.populated = 1
def _populateSchemaFor(self, rc):
"""Construct all the SQL templates for database operations on
<tableName> and populate the class <rowClass> with that info.
"""
attributes = ("rowColumns", "rowKeyColumns", "rowTableName" )
for att in attributes:
if not hasattr(rc, att):
raise DBError("RowClass %s must have class variable: %s" % (rc, att))
tableInfo = _TableInfo(rc)
tableInfo.updateSQL = self.buildUpdateSQL(tableInfo)
tableInfo.insertSQL = self.buildInsertSQL(tableInfo)
tableInfo.deleteSQL = self.buildDeleteSQL(tableInfo)
self.populateSchemaFor(tableInfo)
def escape_string(self, text):
"""Escape a string for use in an SQL statement. The default
implementation escapes ' with '' and \ with \\. Redefine this
function in a subclass if your database server uses different
escaping rules.
"""
return safe(text)
def quote_value(self, value, type):
"""Format a value for use in an SQL statement.
@param value: a value to format as data in SQL.
@param type: a key in util.dbTypeMap.
"""
return quote(value, type, string_escaper=self.escape_string)
def loadObjectsFrom(self, tableName, parentRow=None, data=None,
whereClause=None, forceChildren=0):
"""Load a set of RowObjects from a database.
Create a set of python objects of <rowClass> from the contents
of a table populated with appropriate data members.
Example::
| class EmployeeRow(row.RowObject):
| pass
|
| def gotEmployees(employees):
| for emp in employees:
| emp.manager = "fred smith"
| manager.updateRow(emp)
|
| reflector.loadObjectsFrom("employee",
| data = userData,
| whereClause = [("manager" , EQUAL, "fred smith")]
| ).addCallback(gotEmployees)
NOTE: the objects and all children should be loaded in a single transaction.
NOTE: can specify a parentRow _OR_ a whereClause.
"""
if parentRow and whereClause:
raise DBError("Must specify one of parentRow _OR_ whereClause")
if parentRow:
info = self.getTableInfo(parentRow)
relationship = info.getRelationshipFor(tableName)
whereClause = self.buildWhereClause(relationship, parentRow)
elif whereClause:
pass
else:
whereClause = []
return self.dbpool.runInteraction(self._rowLoader, tableName,
parentRow, data, whereClause,
forceChildren)
def _rowLoader(self, transaction, tableName, parentRow, data,
whereClause, forceChildren):
"""immediate loading of rowobjects from the table with the whereClause.
"""
tableInfo = self.schema[tableName]
# Build the SQL for the query
sql = "SELECT "
first = 1
for column, type in tableInfo.rowColumns:
if first:
first = 0
else:
sql = sql + ","
sql = sql + " %s" % column
sql = sql + " FROM %s " % (tableName)
if whereClause:
sql += " WHERE "
first = 1
for wItem in whereClause:
if first:
first = 0
else:
sql += " AND "
(columnName, cond, value) = wItem
t = self.findTypeFor(tableName, columnName)
quotedValue = self.quote_value(value, t)
sql += "%s %s %s" % (columnName, self.conditionalLabels[cond],
quotedValue)
# execute the query
transaction.execute(sql)
rows = transaction.fetchall()
# construct the row objects
results = []
newRows = []
for args in rows:
kw = {}
for i in range(0,len(args)):
ColumnName = tableInfo.rowColumns[i][0].lower()
for attr, type in tableInfo.rowClass.rowColumns:
if attr.lower() == ColumnName:
kw[attr] = args[i]
break
# find the row in the cache or add it
resultObject = self.findInCache(tableInfo.rowClass, kw)
if not resultObject:
meth = tableInfo.rowFactoryMethod[0]
resultObject = meth(tableInfo.rowClass, data, kw)
self.addToCache(resultObject)
newRows.append(resultObject)
results.append(resultObject)
# add these rows to the parentRow if required
if parentRow:
self.addToParent(parentRow, newRows, tableName)
# load children or each of these rows if required
for relationship in tableInfo.relationships:
if not forceChildren and not relationship.autoLoad:
continue
for row in results:
# build where clause
childWhereClause = self.buildWhereClause(relationship, row)
# load the children immediately, but do nothing with them
self._rowLoader(transaction,
relationship.childRowClass.rowTableName,
row, data, childWhereClause, forceChildren)
return results
def findTypeFor(self, tableName, columnName):
tableInfo = self.schema[tableName]
columnName = columnName.lower()
for column, type in tableInfo.rowColumns:
if column.lower() == columnName:
return type
def buildUpdateSQL(self, tableInfo):
"""(Internal) Build SQL template to update a RowObject.
Returns: SQL that is used to contruct a rowObject class.
"""
sql = "UPDATE %s SET" % tableInfo.rowTableName
# build update attributes
first = 1
for column, type in tableInfo.rowColumns:
if getKeyColumn(tableInfo.rowClass, column):
continue
if not first:
sql = sql + ", "
sql = sql + " %s = %s" % (column, "%s")
first = 0
# build where clause
first = 1
sql = sql + " WHERE "
for keyColumn, type in tableInfo.rowKeyColumns:
if not first:
sql = sql + " AND "
sql = sql + " %s = %s " % (keyColumn, "%s")
first = 0
return sql
def buildInsertSQL(self, tableInfo):
"""(Internal) Build SQL template to insert a new row.
Returns: SQL that is used to insert a new row for a rowObject
instance not created from the database.
"""
sql = "INSERT INTO %s (" % tableInfo.rowTableName
# build column list
first = 1
for column, type in tableInfo.rowColumns:
if not first:
sql = sql + ", "
sql = sql + column
first = 0
sql = sql + " ) VALUES ("
# build values list
first = 1
for column, type in tableInfo.rowColumns:
if not first:
sql = sql + ", "
sql = sql + "%s"
first = 0
sql = sql + ")"
return sql
def buildDeleteSQL(self, tableInfo):
"""Build the SQL template to delete a row from the table.
"""
sql = "DELETE FROM %s " % tableInfo.rowTableName
# build where clause
first = 1
sql = sql + " WHERE "
for keyColumn, type in tableInfo.rowKeyColumns:
if not first:
sql = sql + " AND "
sql = sql + " %s = %s " % (keyColumn, "%s")
first = 0
return sql
def updateRowSQL(self, rowObject):
"""Build SQL to update the contents of rowObject.
"""
args = []
tableInfo = self.schema[rowObject.rowTableName]
# build update attributes
for column, type in tableInfo.rowColumns:
if not getKeyColumn(rowObject.__class__, column):
args.append(self.quote_value(rowObject.findAttribute(column),
type))
# build where clause
for keyColumn, type in tableInfo.rowKeyColumns:
args.append(self.quote_value(rowObject.findAttribute(keyColumn),
type))
return self.getTableInfo(rowObject).updateSQL % tuple(args)
def updateRow(self, rowObject):
"""Update the contents of rowObject to the database.
"""
sql = self.updateRowSQL(rowObject)
rowObject.setDirty(0)
return self.dbpool.runOperation(sql)
def insertRowSQL(self, rowObject):
"""Build SQL to insert the contents of rowObject.
"""
args = []
tableInfo = self.schema[rowObject.rowTableName]
# build values
for column, type in tableInfo.rowColumns:
args.append(self.quote_value(rowObject.findAttribute(column),type))
return self.getTableInfo(rowObject).insertSQL % tuple(args)
def insertRow(self, rowObject):
"""Insert a new row for rowObject.
"""
rowObject.setDirty(0)
sql = self.insertRowSQL(rowObject)
return self.dbpool.runOperation(sql)
def deleteRowSQL(self, rowObject):
"""Build SQL to delete rowObject from the database.
"""
args = []
tableInfo = self.schema[rowObject.rowTableName]
# build where clause
for keyColumn, type in tableInfo.rowKeyColumns:
args.append(self.quote_value(rowObject.findAttribute(keyColumn),
type))
return self.getTableInfo(rowObject).deleteSQL % tuple(args)
def deleteRow(self, rowObject):
"""Delete the row for rowObject from the database.
"""
sql = self.deleteRowSQL(rowObject)
self.removeFromCache(rowObject)
return self.dbpool.runOperation(sql)
__all__ = ['SQLReflector']
|
gpl-2.0
|
wuhengzhi/chromium-crosswalk
|
tools/grit/grit/node/io_unittest.py
|
52
|
6149
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Unit tests for io.FileNode'''
import os
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
import os
import StringIO
import unittest
from grit.node import misc
from grit.node import io
from grit.node import empty
from grit import grd_reader
from grit import util
class FileNodeUnittest(unittest.TestCase):
def testGetPath(self):
root = misc.GritNode()
root.StartParsing(u'grit', None)
root.HandleAttribute(u'latest_public_release', u'0')
root.HandleAttribute(u'current_release', u'1')
root.HandleAttribute(u'base_dir', ur'..\resource')
translations = empty.TranslationsNode()
translations.StartParsing(u'translations', root)
root.AddChild(translations)
file_node = io.FileNode()
file_node.StartParsing(u'file', translations)
file_node.HandleAttribute(u'path', ur'flugel\kugel.pdf')
translations.AddChild(file_node)
root.EndParsing()
self.failUnless(root.ToRealPath(file_node.GetInputPath()) ==
util.normpath(
os.path.join(ur'../resource', ur'flugel/kugel.pdf')))
def VerifyCliquesContainEnglishAndFrenchAndNothingElse(self, cliques):
for clique in cliques:
self.failUnlessEquals(len(clique[0].clique), 2)
self.failUnless('en' in cliques[i][0].clique)
self.failUnless('fr' in cliques[i][0].clique)
def testLoadTranslations(self):
xml = '''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en-US" current_release="3" base_dir=".">
<translations>
<file path="generated_resources_fr.xtb" lang="fr" />
</translations>
<release seq="3">
<messages>
<message name="ID_HELLO">Hello!</message>
<message name="ID_HELLO_USER">Hello <ph name="USERNAME">%s<ex>Joi</ex></ph></message>
</messages>
</release>
</grit>'''
grd = grd_reader.Parse(StringIO.StringIO(xml),
util.PathFromRoot('grit/testdata'))
grd.SetOutputLanguage('en')
grd.RunGatherers()
self.VerifyCliquesContainEnglishAndFrenchAndNothingElse(grd.GetCliques())
def testIffyness(self):
grd = grd_reader.Parse(StringIO.StringIO('''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en-US" current_release="3" base_dir=".">
<translations>
<if expr="lang == 'fr'">
<file path="generated_resources_fr.xtb" lang="fr" />
</if>
</translations>
<release seq="3">
<messages>
<message name="ID_HELLO">Hello!</message>
<message name="ID_HELLO_USER">Hello <ph name="USERNAME">%s<ex>Joi</ex></ph></message>
</messages>
</release>
</grit>'''), util.PathFromRoot('grit/testdata'))
grd.SetOutputLanguage('en')
grd.RunGatherers()
grd.SetOutputLanguage('fr')
grd.RunGatherers()
def testConditionalLoadTranslations(self):
xml = '''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en-US" current_release="3"
base_dir=".">
<translations>
<if expr="True">
<file path="generated_resources_fr.xtb" lang="fr" />
</if>
<if expr="False">
<file path="no_such_file.xtb" lang="de" />
</if>
</translations>
<release seq="3">
<messages>
<message name="ID_HELLO">Hello!</message>
<message name="ID_HELLO_USER">Hello <ph name="USERNAME">%s<ex>
Joi</ex></ph></message>
</messages>
</release>
</grit>'''
grd = grd_reader.Parse(StringIO.StringIO(xml),
util.PathFromRoot('grit/testdata'))
grd.SetOutputLanguage('en')
grd.RunGatherers()
self.VerifyCliquesContainEnglishAndFrenchAndNothingElse(grd.GetCliques())
def testConditionalOutput(self):
xml = '''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en-US" current_release="3"
base_dir=".">
<outputs>
<output filename="resource.h" type="rc_header" />
<output filename="en/generated_resources.rc" type="rc_all"
lang="en" />
<if expr="pp_if('NOT_TRUE')">
<output filename="de/generated_resources.rc" type="rc_all"
lang="de" />
</if>
</outputs>
<release seq="3">
<messages>
<message name="ID_HELLO">Hello!</message>
</messages>
</release>
</grit>'''
grd = grd_reader.Parse(StringIO.StringIO(xml),
util.PathFromRoot('grit/test/data'),
defines={})
grd.SetOutputLanguage('en')
grd.RunGatherers()
outputs = grd.GetChildrenOfType(io.OutputNode)
active = set(grd.ActiveDescendants())
self.failUnless(outputs[0] in active)
self.failUnless(outputs[0].GetType() == 'rc_header')
self.failUnless(outputs[1] in active)
self.failUnless(outputs[1].GetType() == 'rc_all')
self.failUnless(outputs[2] not in active)
self.failUnless(outputs[2].GetType() == 'rc_all')
# Verify that 'iw' and 'no' language codes in xtb files are mapped to 'he' and
# 'nb'.
def testLangCodeMapping(self):
grd = grd_reader.Parse(StringIO.StringIO('''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en-US" current_release="3" base_dir=".">
<translations>
<file path="generated_resources_no.xtb" lang="nb" />
<file path="generated_resources_iw.xtb" lang="he" />
</translations>
<release seq="3">
<messages></messages>
</release>
</grit>'''), util.PathFromRoot('grit/testdata'))
grd.SetOutputLanguage('en')
grd.RunGatherers()
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
cblecker/test-infra
|
scenarios/kubernetes_bazel.py
|
12
|
10166
|
#!/usr/bin/env python
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs bazel build/test for current repo."""
import argparse
import os
import subprocess
import sys
ORIG_CWD = os.getcwd()
def test_infra(*paths):
"""Return path relative to root of test-infra repo."""
return os.path.join(ORIG_CWD, os.path.dirname(__file__), '..', *paths)
def check(*cmd):
"""Log and run the command, raising on errors."""
print >>sys.stderr, 'Run:', cmd
subprocess.check_call(cmd)
def check_output(*cmd):
"""Log and run the command, raising on errors, return output"""
print >>sys.stderr, 'Run:', cmd
return subprocess.check_output(cmd)
class Bazel(object):
def __init__(self, cfgs):
self.cfgs = cfgs or []
def _commands(self, cmd, *args, **kw):
commands = ['bazel', cmd]
if self.cfgs and kw.get('config', True):
commands.extend(['--config=%s' % c for c in self.cfgs])
if args:
commands.extend(args)
return commands
def check(self, cmd, *args, **kw):
"""wrapper for check('bazel', *cmd)."""
check(*self._commands(cmd, *args, **kw))
def check_output(self, cmd, *args, **kw):
"""wrapper for check_output('bazel', *cmd)."""
return check_output(*self._commands(cmd, *args, **kw))
def query(self, kind, selected_pkgs, changed_pkgs):
"""
Run a bazel query against target kind, include targets from args.
Returns a list of kind objects from bazel query.
"""
# Changes are calculated and no packages found, return empty list.
if changed_pkgs == []:
return []
selection = '//...'
if selected_pkgs:
# targets without a '-' operator prefix are implicitly additive
# when specifying build targets
selection = selected_pkgs[0]
for pkg in selected_pkgs[1:]:
if pkg.startswith('-'):
selection += ' '+pkg
else:
selection += ' +'+pkg
changes = '//...'
if changed_pkgs:
changes = 'set(%s)' % ' '.join(changed_pkgs)
query_pat = 'kind(%s, rdeps(%s, %s)) except attr(\'tags\', \'manual\', //...)'
return [target for target in self.check_output(
'query',
'--keep_going',
'--noshow_progress',
query_pat % (kind, selection, changes),
config=False,
).split('\n') if target.startswith("//")]
def upload_string(gcs_path, text):
"""Uploads text to gcs_path"""
cmd = ['gsutil', '-q', '-h', 'Content-Type:text/plain', 'cp', '-', gcs_path]
print >>sys.stderr, 'Run:', cmd, 'stdin=%s'%text
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE)
proc.communicate(input=text)
def echo_result(res):
"""echo error message bazed on value of res"""
echo_map = {
0:'Success',
1:'Build failed',
2:'Bad environment or flags',
3:'Build passed, tests failed or timed out',
4:'Build passed, no tests found',
5:'Interrupted'
}
print echo_map.get(res, 'Unknown exit code : %s' % res)
def get_version():
"""Return kubernetes version"""
# The check for version in bazel-genfiles can be removed once everyone is
# off of versions before 0.25.0.
# https://github.com/bazelbuild/bazel/issues/8651
if os.path.isfile('bazel-genfiles/version'):
with open('bazel-genfiles/version') as fp:
return fp.read().strip()
with open('bazel-bin/version') as fp:
return fp.read().strip()
def get_changed(base, pull):
"""Get affected packages between base sha and pull sha."""
diff = check_output(
'git', 'diff', '--name-only',
'--diff-filter=d', '%s...%s' % (base, pull))
return check_output(
'bazel', 'query',
'--noshow_progress',
'set(%s)' % diff).split('\n')
def clean_file_in_dir(dirname, filename):
"""Recursively remove all file with filename in dirname."""
for parent, _, filenames in os.walk(dirname):
for name in filenames:
if name == filename:
os.remove(os.path.join(parent, name))
def main(args):
"""Trigger a bazel build/test run, and upload results."""
# pylint:disable=too-many-branches, too-many-statements, too-many-locals
if args.install:
for install in args.install:
if not os.path.isfile(install):
raise ValueError('Invalid install path: %s' % install)
check('pip', 'install', '-r', install)
bazel = Bazel(args.config)
bazel.check('version', config=False)
res = 0
try:
affected = None
if args.affected:
base = os.getenv('PULL_BASE_SHA', '')
pull = os.getenv('PULL_PULL_SHA', 'HEAD')
if not base:
raise ValueError('PULL_BASE_SHA must be set!')
affected = get_changed(base, pull)
build_pkgs = []
manual_build_targets = []
test_pkgs = []
manual_test_targets = []
if args.build:
build_pkgs = args.build.split(' ')
if args.manual_build:
manual_build_targets = args.manual_build.split(' ')
if args.test:
test_pkgs = args.test.split(' ')
if args.manual_test:
manual_test_targets = args.manual_test.split(' ')
buildables = []
if build_pkgs or manual_build_targets or affected:
buildables = bazel.query('.*_binary', build_pkgs, affected) + manual_build_targets
if args.release:
buildables.extend(args.release.split(' '))
if buildables:
bazel.check('build', *buildables)
else:
# Call bazel build regardless, to establish bazel symlinks
bazel.check('build')
# clean up previous test.xml
clean_file_in_dir('./bazel-testlogs', 'test.xml')
if test_pkgs or manual_test_targets or affected:
tests = bazel.query('test', test_pkgs, affected) + manual_test_targets
if tests:
if args.test_args:
tests = args.test_args + tests
bazel.check('test', *tests)
except subprocess.CalledProcessError as exp:
res = exp.returncode
if args.push or args.release and res == 0:
version = get_version()
if not version:
print 'Kubernetes version missing; not uploading ci artifacts.'
res = 1
else:
try:
if args.version_suffix:
version += args.version_suffix
gcs_build = '%s/%s' % (args.gcs, version)
bazel.check('run', '//:push-build', '--', gcs_build)
# log push-build location to path child jobs can find
# (gs://<shared-bucket>/$PULL_REFS/bazel-build-location.txt)
pull_refs = os.getenv('PULL_REFS', '')
gcs_shared = os.path.join(args.gcs_shared, pull_refs, 'bazel-build-location.txt')
if pull_refs:
upload_string(gcs_shared, gcs_build)
if args.publish_version:
upload_string(args.publish_version, version)
except subprocess.CalledProcessError as exp:
res = exp.returncode
# Coalesce test results into one file for upload.
check(test_infra('hack/coalesce.py'))
echo_result(res)
if res != 0:
sys.exit(res)
def create_parser():
"""Create argparser."""
parser = argparse.ArgumentParser()
parser.add_argument(
'--affected', action='store_true',
help='If build/test affected targets. Filtered by --build and --test flags.')
parser.add_argument(
'--build', help='Bazel build target patterns, split by one space')
parser.add_argument(
'--manual-build',
help='Bazel build targets that should always be manually included, split by one space'
)
parser.add_argument(
'--config', action='append', help='--config=foo rules to apply to bazel commands')
# TODO(krzyzacy): Convert to bazel build rules
parser.add_argument(
'--install', action="append", help='Python dependency(s) that need to be installed')
parser.add_argument(
'--push', action='store_true', help='Push release without building it')
parser.add_argument(
'--release', help='Run bazel build, and push release build to --gcs bucket')
parser.add_argument(
'--gcs-shared',
default="gs://kubernetes-jenkins/shared-results/",
help='If $PULL_REFS is set push build location to this bucket')
parser.add_argument(
'--publish-version',
help='publish GCS file here with the build version, like ci/latest.txt',
)
parser.add_argument(
'--test', help='Bazel test target patterns, split by one space')
parser.add_argument(
'--manual-test',
help='Bazel test targets that should always be manually included, split by one space'
)
parser.add_argument(
'--test-args', action="append", help='Bazel test args')
parser.add_argument(
'--gcs',
default='gs://kubernetes-release-dev/bazel',
help='GCS path for where to push build')
parser.add_argument(
'--version-suffix',
help='version suffix for build pushing')
return parser
def parse_args(args=None):
"""Return parsed args."""
parser = create_parser()
return parser.parse_args(args)
if __name__ == '__main__':
main(parse_args())
|
apache-2.0
|
sankhesh/VTK
|
ThirdParty/Twisted/twisted/trial/_dist/managercommands.py
|
45
|
1560
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Commands for reporting test success of failure to the manager.
@since: 12.3
"""
from twisted.protocols.amp import Command, String, Boolean, ListOf
class AddSuccess(Command):
"""
Add a success.
"""
arguments = [('testName', String())]
response = [('success', Boolean())]
class AddError(Command):
"""
Add an error.
"""
arguments = [('testName', String()), ('error', String()),
('errorClass', String()), ('frames', ListOf(String()))]
response = [('success', Boolean())]
class AddFailure(Command):
"""
Add a failure.
"""
arguments = [('testName', String()), ('fail', String()),
('failClass', String()), ('frames', ListOf(String()))]
response = [('success', Boolean())]
class AddSkip(Command):
"""
Add a skip.
"""
arguments = [('testName', String()), ('reason', String())]
response = [('success', Boolean())]
class AddExpectedFailure(Command):
"""
Add an expected failure.
"""
arguments = [('testName', String()), ('error', String()),
('todo', String())]
response = [('success', Boolean())]
class AddUnexpectedSuccess(Command):
"""
Add an unexpected success.
"""
arguments = [('testName', String()), ('todo', String())]
response = [('success', Boolean())]
class TestWrite(Command):
"""
Write test log.
"""
arguments = [('out', String())]
response = [('success', Boolean())]
|
bsd-3-clause
|
singingwolfboy/invoke
|
invoke/vendor/yaml3/scanner.py
|
235
|
51879
|
# Scanner produces tokens of the following types:
# STREAM-START
# STREAM-END
# DIRECTIVE(name, value)
# DOCUMENT-START
# DOCUMENT-END
# BLOCK-SEQUENCE-START
# BLOCK-MAPPING-START
# BLOCK-END
# FLOW-SEQUENCE-START
# FLOW-MAPPING-START
# FLOW-SEQUENCE-END
# FLOW-MAPPING-END
# BLOCK-ENTRY
# FLOW-ENTRY
# KEY
# VALUE
# ALIAS(value)
# ANCHOR(value)
# TAG(value)
# SCALAR(value, plain, style)
#
# Read comments in the Scanner code for more details.
#
__all__ = ['Scanner', 'ScannerError']
from .error import MarkedYAMLError
from .tokens import *
class ScannerError(MarkedYAMLError):
pass
class SimpleKey:
# See below simple keys treatment.
def __init__(self, token_number, required, index, line, column, mark):
self.token_number = token_number
self.required = required
self.index = index
self.line = line
self.column = column
self.mark = mark
class Scanner:
def __init__(self):
"""Initialize the scanner."""
# It is assumed that Scanner and Reader will have a common descendant.
# Reader do the dirty work of checking for BOM and converting the
# input data to Unicode. It also adds NUL to the end.
#
# Reader supports the following methods
# self.peek(i=0) # peek the next i-th character
# self.prefix(l=1) # peek the next l characters
# self.forward(l=1) # read the next l characters and move the pointer.
# Had we reached the end of the stream?
self.done = False
# The number of unclosed '{' and '['. `flow_level == 0` means block
# context.
self.flow_level = 0
# List of processed tokens that are not yet emitted.
self.tokens = []
# Add the STREAM-START token.
self.fetch_stream_start()
# Number of tokens that were emitted through the `get_token` method.
self.tokens_taken = 0
# The current indentation level.
self.indent = -1
# Past indentation levels.
self.indents = []
# Variables related to simple keys treatment.
# A simple key is a key that is not denoted by the '?' indicator.
# Example of simple keys:
# ---
# block simple key: value
# ? not a simple key:
# : { flow simple key: value }
# We emit the KEY token before all keys, so when we find a potential
# simple key, we try to locate the corresponding ':' indicator.
# Simple keys should be limited to a single line and 1024 characters.
# Can a simple key start at the current position? A simple key may
# start:
# - at the beginning of the line, not counting indentation spaces
# (in block context),
# - after '{', '[', ',' (in the flow context),
# - after '?', ':', '-' (in the block context).
# In the block context, this flag also signifies if a block collection
# may start at the current position.
self.allow_simple_key = True
# Keep track of possible simple keys. This is a dictionary. The key
# is `flow_level`; there can be no more that one possible simple key
# for each level. The value is a SimpleKey record:
# (token_number, required, index, line, column, mark)
# A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow),
# '[', or '{' tokens.
self.possible_simple_keys = {}
# Public methods.
def check_token(self, *choices):
# Check if the next token is one of the given types.
while self.need_more_tokens():
self.fetch_more_tokens()
if self.tokens:
if not choices:
return True
for choice in choices:
if isinstance(self.tokens[0], choice):
return True
return False
def peek_token(self):
# Return the next token, but do not delete if from the queue.
while self.need_more_tokens():
self.fetch_more_tokens()
if self.tokens:
return self.tokens[0]
def get_token(self):
# Return the next token.
while self.need_more_tokens():
self.fetch_more_tokens()
if self.tokens:
self.tokens_taken += 1
return self.tokens.pop(0)
# Private methods.
def need_more_tokens(self):
if self.done:
return False
if not self.tokens:
return True
# The current token may be a potential simple key, so we
# need to look further.
self.stale_possible_simple_keys()
if self.next_possible_simple_key() == self.tokens_taken:
return True
def fetch_more_tokens(self):
# Eat whitespaces and comments until we reach the next token.
self.scan_to_next_token()
# Remove obsolete possible simple keys.
self.stale_possible_simple_keys()
# Compare the current indentation and column. It may add some tokens
# and decrease the current indentation level.
self.unwind_indent(self.column)
# Peek the next character.
ch = self.peek()
# Is it the end of stream?
if ch == '\0':
return self.fetch_stream_end()
# Is it a directive?
if ch == '%' and self.check_directive():
return self.fetch_directive()
# Is it the document start?
if ch == '-' and self.check_document_start():
return self.fetch_document_start()
# Is it the document end?
if ch == '.' and self.check_document_end():
return self.fetch_document_end()
# TODO: support for BOM within a stream.
#if ch == '\uFEFF':
# return self.fetch_bom() <-- issue BOMToken
# Note: the order of the following checks is NOT significant.
# Is it the flow sequence start indicator?
if ch == '[':
return self.fetch_flow_sequence_start()
# Is it the flow mapping start indicator?
if ch == '{':
return self.fetch_flow_mapping_start()
# Is it the flow sequence end indicator?
if ch == ']':
return self.fetch_flow_sequence_end()
# Is it the flow mapping end indicator?
if ch == '}':
return self.fetch_flow_mapping_end()
# Is it the flow entry indicator?
if ch == ',':
return self.fetch_flow_entry()
# Is it the block entry indicator?
if ch == '-' and self.check_block_entry():
return self.fetch_block_entry()
# Is it the key indicator?
if ch == '?' and self.check_key():
return self.fetch_key()
# Is it the value indicator?
if ch == ':' and self.check_value():
return self.fetch_value()
# Is it an alias?
if ch == '*':
return self.fetch_alias()
# Is it an anchor?
if ch == '&':
return self.fetch_anchor()
# Is it a tag?
if ch == '!':
return self.fetch_tag()
# Is it a literal scalar?
if ch == '|' and not self.flow_level:
return self.fetch_literal()
# Is it a folded scalar?
if ch == '>' and not self.flow_level:
return self.fetch_folded()
# Is it a single quoted scalar?
if ch == '\'':
return self.fetch_single()
# Is it a double quoted scalar?
if ch == '\"':
return self.fetch_double()
# It must be a plain scalar then.
if self.check_plain():
return self.fetch_plain()
# No? It's an error. Let's produce a nice error message.
raise ScannerError("while scanning for the next token", None,
"found character %r that cannot start any token" % ch,
self.get_mark())
# Simple keys treatment.
def next_possible_simple_key(self):
# Return the number of the nearest possible simple key. Actually we
# don't need to loop through the whole dictionary. We may replace it
# with the following code:
# if not self.possible_simple_keys:
# return None
# return self.possible_simple_keys[
# min(self.possible_simple_keys.keys())].token_number
min_token_number = None
for level in self.possible_simple_keys:
key = self.possible_simple_keys[level]
if min_token_number is None or key.token_number < min_token_number:
min_token_number = key.token_number
return min_token_number
def stale_possible_simple_keys(self):
# Remove entries that are no longer possible simple keys. According to
# the YAML specification, simple keys
# - should be limited to a single line,
# - should be no longer than 1024 characters.
# Disabling this procedure will allow simple keys of any length and
# height (may cause problems if indentation is broken though).
for level in list(self.possible_simple_keys):
key = self.possible_simple_keys[level]
if key.line != self.line \
or self.index-key.index > 1024:
if key.required:
raise ScannerError("while scanning a simple key", key.mark,
"could not found expected ':'", self.get_mark())
del self.possible_simple_keys[level]
def save_possible_simple_key(self):
# The next token may start a simple key. We check if it's possible
# and save its position. This function is called for
# ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'.
# Check if a simple key is required at the current position.
required = not self.flow_level and self.indent == self.column
# A simple key is required only if it is the first token in the current
# line. Therefore it is always allowed.
assert self.allow_simple_key or not required
# The next token might be a simple key. Let's save it's number and
# position.
if self.allow_simple_key:
self.remove_possible_simple_key()
token_number = self.tokens_taken+len(self.tokens)
key = SimpleKey(token_number, required,
self.index, self.line, self.column, self.get_mark())
self.possible_simple_keys[self.flow_level] = key
def remove_possible_simple_key(self):
# Remove the saved possible key position at the current flow level.
if self.flow_level in self.possible_simple_keys:
key = self.possible_simple_keys[self.flow_level]
if key.required:
raise ScannerError("while scanning a simple key", key.mark,
"could not found expected ':'", self.get_mark())
del self.possible_simple_keys[self.flow_level]
# Indentation functions.
def unwind_indent(self, column):
## In flow context, tokens should respect indentation.
## Actually the condition should be `self.indent >= column` according to
## the spec. But this condition will prohibit intuitively correct
## constructions such as
## key : {
## }
#if self.flow_level and self.indent > column:
# raise ScannerError(None, None,
# "invalid intendation or unclosed '[' or '{'",
# self.get_mark())
# In the flow context, indentation is ignored. We make the scanner less
# restrictive then specification requires.
if self.flow_level:
return
# In block context, we may need to issue the BLOCK-END tokens.
while self.indent > column:
mark = self.get_mark()
self.indent = self.indents.pop()
self.tokens.append(BlockEndToken(mark, mark))
def add_indent(self, column):
# Check if we need to increase indentation.
if self.indent < column:
self.indents.append(self.indent)
self.indent = column
return True
return False
# Fetchers.
def fetch_stream_start(self):
# We always add STREAM-START as the first token and STREAM-END as the
# last token.
# Read the token.
mark = self.get_mark()
# Add STREAM-START.
self.tokens.append(StreamStartToken(mark, mark,
encoding=self.encoding))
def fetch_stream_end(self):
# Set the current intendation to -1.
self.unwind_indent(-1)
# Reset simple keys.
self.remove_possible_simple_key()
self.allow_simple_key = False
self.possible_simple_keys = {}
# Read the token.
mark = self.get_mark()
# Add STREAM-END.
self.tokens.append(StreamEndToken(mark, mark))
# The steam is finished.
self.done = True
def fetch_directive(self):
# Set the current intendation to -1.
self.unwind_indent(-1)
# Reset simple keys.
self.remove_possible_simple_key()
self.allow_simple_key = False
# Scan and add DIRECTIVE.
self.tokens.append(self.scan_directive())
def fetch_document_start(self):
self.fetch_document_indicator(DocumentStartToken)
def fetch_document_end(self):
self.fetch_document_indicator(DocumentEndToken)
def fetch_document_indicator(self, TokenClass):
# Set the current intendation to -1.
self.unwind_indent(-1)
# Reset simple keys. Note that there could not be a block collection
# after '---'.
self.remove_possible_simple_key()
self.allow_simple_key = False
# Add DOCUMENT-START or DOCUMENT-END.
start_mark = self.get_mark()
self.forward(3)
end_mark = self.get_mark()
self.tokens.append(TokenClass(start_mark, end_mark))
def fetch_flow_sequence_start(self):
self.fetch_flow_collection_start(FlowSequenceStartToken)
def fetch_flow_mapping_start(self):
self.fetch_flow_collection_start(FlowMappingStartToken)
def fetch_flow_collection_start(self, TokenClass):
# '[' and '{' may start a simple key.
self.save_possible_simple_key()
# Increase the flow level.
self.flow_level += 1
# Simple keys are allowed after '[' and '{'.
self.allow_simple_key = True
# Add FLOW-SEQUENCE-START or FLOW-MAPPING-START.
start_mark = self.get_mark()
self.forward()
end_mark = self.get_mark()
self.tokens.append(TokenClass(start_mark, end_mark))
def fetch_flow_sequence_end(self):
self.fetch_flow_collection_end(FlowSequenceEndToken)
def fetch_flow_mapping_end(self):
self.fetch_flow_collection_end(FlowMappingEndToken)
def fetch_flow_collection_end(self, TokenClass):
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Decrease the flow level.
self.flow_level -= 1
# No simple keys after ']' or '}'.
self.allow_simple_key = False
# Add FLOW-SEQUENCE-END or FLOW-MAPPING-END.
start_mark = self.get_mark()
self.forward()
end_mark = self.get_mark()
self.tokens.append(TokenClass(start_mark, end_mark))
def fetch_flow_entry(self):
# Simple keys are allowed after ','.
self.allow_simple_key = True
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Add FLOW-ENTRY.
start_mark = self.get_mark()
self.forward()
end_mark = self.get_mark()
self.tokens.append(FlowEntryToken(start_mark, end_mark))
def fetch_block_entry(self):
# Block context needs additional checks.
if not self.flow_level:
# Are we allowed to start a new entry?
if not self.allow_simple_key:
raise ScannerError(None, None,
"sequence entries are not allowed here",
self.get_mark())
# We may need to add BLOCK-SEQUENCE-START.
if self.add_indent(self.column):
mark = self.get_mark()
self.tokens.append(BlockSequenceStartToken(mark, mark))
# It's an error for the block entry to occur in the flow context,
# but we let the parser detect this.
else:
pass
# Simple keys are allowed after '-'.
self.allow_simple_key = True
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Add BLOCK-ENTRY.
start_mark = self.get_mark()
self.forward()
end_mark = self.get_mark()
self.tokens.append(BlockEntryToken(start_mark, end_mark))
def fetch_key(self):
# Block context needs additional checks.
if not self.flow_level:
# Are we allowed to start a key (not nessesary a simple)?
if not self.allow_simple_key:
raise ScannerError(None, None,
"mapping keys are not allowed here",
self.get_mark())
# We may need to add BLOCK-MAPPING-START.
if self.add_indent(self.column):
mark = self.get_mark()
self.tokens.append(BlockMappingStartToken(mark, mark))
# Simple keys are allowed after '?' in the block context.
self.allow_simple_key = not self.flow_level
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Add KEY.
start_mark = self.get_mark()
self.forward()
end_mark = self.get_mark()
self.tokens.append(KeyToken(start_mark, end_mark))
def fetch_value(self):
# Do we determine a simple key?
if self.flow_level in self.possible_simple_keys:
# Add KEY.
key = self.possible_simple_keys[self.flow_level]
del self.possible_simple_keys[self.flow_level]
self.tokens.insert(key.token_number-self.tokens_taken,
KeyToken(key.mark, key.mark))
# If this key starts a new block mapping, we need to add
# BLOCK-MAPPING-START.
if not self.flow_level:
if self.add_indent(key.column):
self.tokens.insert(key.token_number-self.tokens_taken,
BlockMappingStartToken(key.mark, key.mark))
# There cannot be two simple keys one after another.
self.allow_simple_key = False
# It must be a part of a complex key.
else:
# Block context needs additional checks.
# (Do we really need them? They will be catched by the parser
# anyway.)
if not self.flow_level:
# We are allowed to start a complex value if and only if
# we can start a simple key.
if not self.allow_simple_key:
raise ScannerError(None, None,
"mapping values are not allowed here",
self.get_mark())
# If this value starts a new block mapping, we need to add
# BLOCK-MAPPING-START. It will be detected as an error later by
# the parser.
if not self.flow_level:
if self.add_indent(self.column):
mark = self.get_mark()
self.tokens.append(BlockMappingStartToken(mark, mark))
# Simple keys are allowed after ':' in the block context.
self.allow_simple_key = not self.flow_level
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Add VALUE.
start_mark = self.get_mark()
self.forward()
end_mark = self.get_mark()
self.tokens.append(ValueToken(start_mark, end_mark))
def fetch_alias(self):
# ALIAS could be a simple key.
self.save_possible_simple_key()
# No simple keys after ALIAS.
self.allow_simple_key = False
# Scan and add ALIAS.
self.tokens.append(self.scan_anchor(AliasToken))
def fetch_anchor(self):
# ANCHOR could start a simple key.
self.save_possible_simple_key()
# No simple keys after ANCHOR.
self.allow_simple_key = False
# Scan and add ANCHOR.
self.tokens.append(self.scan_anchor(AnchorToken))
def fetch_tag(self):
# TAG could start a simple key.
self.save_possible_simple_key()
# No simple keys after TAG.
self.allow_simple_key = False
# Scan and add TAG.
self.tokens.append(self.scan_tag())
def fetch_literal(self):
self.fetch_block_scalar(style='|')
def fetch_folded(self):
self.fetch_block_scalar(style='>')
def fetch_block_scalar(self, style):
# A simple key may follow a block scalar.
self.allow_simple_key = True
# Reset possible simple key on the current level.
self.remove_possible_simple_key()
# Scan and add SCALAR.
self.tokens.append(self.scan_block_scalar(style))
def fetch_single(self):
self.fetch_flow_scalar(style='\'')
def fetch_double(self):
self.fetch_flow_scalar(style='"')
def fetch_flow_scalar(self, style):
# A flow scalar could be a simple key.
self.save_possible_simple_key()
# No simple keys after flow scalars.
self.allow_simple_key = False
# Scan and add SCALAR.
self.tokens.append(self.scan_flow_scalar(style))
def fetch_plain(self):
# A plain scalar could be a simple key.
self.save_possible_simple_key()
# No simple keys after plain scalars. But note that `scan_plain` will
# change this flag if the scan is finished at the beginning of the
# line.
self.allow_simple_key = False
# Scan and add SCALAR. May change `allow_simple_key`.
self.tokens.append(self.scan_plain())
# Checkers.
def check_directive(self):
# DIRECTIVE: ^ '%' ...
# The '%' indicator is already checked.
if self.column == 0:
return True
def check_document_start(self):
# DOCUMENT-START: ^ '---' (' '|'\n')
if self.column == 0:
if self.prefix(3) == '---' \
and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
return True
def check_document_end(self):
# DOCUMENT-END: ^ '...' (' '|'\n')
if self.column == 0:
if self.prefix(3) == '...' \
and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
return True
def check_block_entry(self):
# BLOCK-ENTRY: '-' (' '|'\n')
return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
def check_key(self):
# KEY(flow context): '?'
if self.flow_level:
return True
# KEY(block context): '?' (' '|'\n')
else:
return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
def check_value(self):
# VALUE(flow context): ':'
if self.flow_level:
return True
# VALUE(block context): ':' (' '|'\n')
else:
return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
def check_plain(self):
# A plain scalar may start with any non-space character except:
# '-', '?', ':', ',', '[', ']', '{', '}',
# '#', '&', '*', '!', '|', '>', '\'', '\"',
# '%', '@', '`'.
#
# It may also start with
# '-', '?', ':'
# if it is followed by a non-space character.
#
# Note that we limit the last rule to the block context (except the
# '-' character) because we want the flow context to be space
# independent.
ch = self.peek()
return ch not in '\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'\"%@`' \
or (self.peek(1) not in '\0 \t\r\n\x85\u2028\u2029'
and (ch == '-' or (not self.flow_level and ch in '?:')))
# Scanners.
def scan_to_next_token(self):
# We ignore spaces, line breaks and comments.
# If we find a line break in the block context, we set the flag
# `allow_simple_key` on.
# The byte order mark is stripped if it's the first character in the
# stream. We do not yet support BOM inside the stream as the
# specification requires. Any such mark will be considered as a part
# of the document.
#
# TODO: We need to make tab handling rules more sane. A good rule is
# Tabs cannot precede tokens
# BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END,
# KEY(block), VALUE(block), BLOCK-ENTRY
# So the checking code is
# if <TAB>:
# self.allow_simple_keys = False
# We also need to add the check for `allow_simple_keys == True` to
# `unwind_indent` before issuing BLOCK-END.
# Scanners for block, flow, and plain scalars need to be modified.
if self.index == 0 and self.peek() == '\uFEFF':
self.forward()
found = False
while not found:
while self.peek() == ' ':
self.forward()
if self.peek() == '#':
while self.peek() not in '\0\r\n\x85\u2028\u2029':
self.forward()
if self.scan_line_break():
if not self.flow_level:
self.allow_simple_key = True
else:
found = True
def scan_directive(self):
# See the specification for details.
start_mark = self.get_mark()
self.forward()
name = self.scan_directive_name(start_mark)
value = None
if name == 'YAML':
value = self.scan_yaml_directive_value(start_mark)
end_mark = self.get_mark()
elif name == 'TAG':
value = self.scan_tag_directive_value(start_mark)
end_mark = self.get_mark()
else:
end_mark = self.get_mark()
while self.peek() not in '\0\r\n\x85\u2028\u2029':
self.forward()
self.scan_directive_ignored_line(start_mark)
return DirectiveToken(name, value, start_mark, end_mark)
def scan_directive_name(self, start_mark):
# See the specification for details.
length = 0
ch = self.peek(length)
while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
or ch in '-_':
length += 1
ch = self.peek(length)
if not length:
raise ScannerError("while scanning a directive", start_mark,
"expected alphabetic or numeric character, but found %r"
% ch, self.get_mark())
value = self.prefix(length)
self.forward(length)
ch = self.peek()
if ch not in '\0 \r\n\x85\u2028\u2029':
raise ScannerError("while scanning a directive", start_mark,
"expected alphabetic or numeric character, but found %r"
% ch, self.get_mark())
return value
def scan_yaml_directive_value(self, start_mark):
# See the specification for details.
while self.peek() == ' ':
self.forward()
major = self.scan_yaml_directive_number(start_mark)
if self.peek() != '.':
raise ScannerError("while scanning a directive", start_mark,
"expected a digit or '.', but found %r" % self.peek(),
self.get_mark())
self.forward()
minor = self.scan_yaml_directive_number(start_mark)
if self.peek() not in '\0 \r\n\x85\u2028\u2029':
raise ScannerError("while scanning a directive", start_mark,
"expected a digit or ' ', but found %r" % self.peek(),
self.get_mark())
return (major, minor)
def scan_yaml_directive_number(self, start_mark):
# See the specification for details.
ch = self.peek()
if not ('0' <= ch <= '9'):
raise ScannerError("while scanning a directive", start_mark,
"expected a digit, but found %r" % ch, self.get_mark())
length = 0
while '0' <= self.peek(length) <= '9':
length += 1
value = int(self.prefix(length))
self.forward(length)
return value
def scan_tag_directive_value(self, start_mark):
# See the specification for details.
while self.peek() == ' ':
self.forward()
handle = self.scan_tag_directive_handle(start_mark)
while self.peek() == ' ':
self.forward()
prefix = self.scan_tag_directive_prefix(start_mark)
return (handle, prefix)
def scan_tag_directive_handle(self, start_mark):
# See the specification for details.
value = self.scan_tag_handle('directive', start_mark)
ch = self.peek()
if ch != ' ':
raise ScannerError("while scanning a directive", start_mark,
"expected ' ', but found %r" % ch, self.get_mark())
return value
def scan_tag_directive_prefix(self, start_mark):
# See the specification for details.
value = self.scan_tag_uri('directive', start_mark)
ch = self.peek()
if ch not in '\0 \r\n\x85\u2028\u2029':
raise ScannerError("while scanning a directive", start_mark,
"expected ' ', but found %r" % ch, self.get_mark())
return value
def scan_directive_ignored_line(self, start_mark):
# See the specification for details.
while self.peek() == ' ':
self.forward()
if self.peek() == '#':
while self.peek() not in '\0\r\n\x85\u2028\u2029':
self.forward()
ch = self.peek()
if ch not in '\0\r\n\x85\u2028\u2029':
raise ScannerError("while scanning a directive", start_mark,
"expected a comment or a line break, but found %r"
% ch, self.get_mark())
self.scan_line_break()
def scan_anchor(self, TokenClass):
# The specification does not restrict characters for anchors and
# aliases. This may lead to problems, for instance, the document:
# [ *alias, value ]
# can be interpteted in two ways, as
# [ "value" ]
# and
# [ *alias , "value" ]
# Therefore we restrict aliases to numbers and ASCII letters.
start_mark = self.get_mark()
indicator = self.peek()
if indicator == '*':
name = 'alias'
else:
name = 'anchor'
self.forward()
length = 0
ch = self.peek(length)
while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
or ch in '-_':
length += 1
ch = self.peek(length)
if not length:
raise ScannerError("while scanning an %s" % name, start_mark,
"expected alphabetic or numeric character, but found %r"
% ch, self.get_mark())
value = self.prefix(length)
self.forward(length)
ch = self.peek()
if ch not in '\0 \t\r\n\x85\u2028\u2029?:,]}%@`':
raise ScannerError("while scanning an %s" % name, start_mark,
"expected alphabetic or numeric character, but found %r"
% ch, self.get_mark())
end_mark = self.get_mark()
return TokenClass(value, start_mark, end_mark)
def scan_tag(self):
# See the specification for details.
start_mark = self.get_mark()
ch = self.peek(1)
if ch == '<':
handle = None
self.forward(2)
suffix = self.scan_tag_uri('tag', start_mark)
if self.peek() != '>':
raise ScannerError("while parsing a tag", start_mark,
"expected '>', but found %r" % self.peek(),
self.get_mark())
self.forward()
elif ch in '\0 \t\r\n\x85\u2028\u2029':
handle = None
suffix = '!'
self.forward()
else:
length = 1
use_handle = False
while ch not in '\0 \r\n\x85\u2028\u2029':
if ch == '!':
use_handle = True
break
length += 1
ch = self.peek(length)
handle = '!'
if use_handle:
handle = self.scan_tag_handle('tag', start_mark)
else:
handle = '!'
self.forward()
suffix = self.scan_tag_uri('tag', start_mark)
ch = self.peek()
if ch not in '\0 \r\n\x85\u2028\u2029':
raise ScannerError("while scanning a tag", start_mark,
"expected ' ', but found %r" % ch, self.get_mark())
value = (handle, suffix)
end_mark = self.get_mark()
return TagToken(value, start_mark, end_mark)
def scan_block_scalar(self, style):
# See the specification for details.
if style == '>':
folded = True
else:
folded = False
chunks = []
start_mark = self.get_mark()
# Scan the header.
self.forward()
chomping, increment = self.scan_block_scalar_indicators(start_mark)
self.scan_block_scalar_ignored_line(start_mark)
# Determine the indentation level and go to the first non-empty line.
min_indent = self.indent+1
if min_indent < 1:
min_indent = 1
if increment is None:
breaks, max_indent, end_mark = self.scan_block_scalar_indentation()
indent = max(min_indent, max_indent)
else:
indent = min_indent+increment-1
breaks, end_mark = self.scan_block_scalar_breaks(indent)
line_break = ''
# Scan the inner part of the block scalar.
while self.column == indent and self.peek() != '\0':
chunks.extend(breaks)
leading_non_space = self.peek() not in ' \t'
length = 0
while self.peek(length) not in '\0\r\n\x85\u2028\u2029':
length += 1
chunks.append(self.prefix(length))
self.forward(length)
line_break = self.scan_line_break()
breaks, end_mark = self.scan_block_scalar_breaks(indent)
if self.column == indent and self.peek() != '\0':
# Unfortunately, folding rules are ambiguous.
#
# This is the folding according to the specification:
if folded and line_break == '\n' \
and leading_non_space and self.peek() not in ' \t':
if not breaks:
chunks.append(' ')
else:
chunks.append(line_break)
# This is Clark Evans's interpretation (also in the spec
# examples):
#
#if folded and line_break == '\n':
# if not breaks:
# if self.peek() not in ' \t':
# chunks.append(' ')
# else:
# chunks.append(line_break)
#else:
# chunks.append(line_break)
else:
break
# Chomp the tail.
if chomping is not False:
chunks.append(line_break)
if chomping is True:
chunks.extend(breaks)
# We are done.
return ScalarToken(''.join(chunks), False, start_mark, end_mark,
style)
def scan_block_scalar_indicators(self, start_mark):
# See the specification for details.
chomping = None
increment = None
ch = self.peek()
if ch in '+-':
if ch == '+':
chomping = True
else:
chomping = False
self.forward()
ch = self.peek()
if ch in '0123456789':
increment = int(ch)
if increment == 0:
raise ScannerError("while scanning a block scalar", start_mark,
"expected indentation indicator in the range 1-9, but found 0",
self.get_mark())
self.forward()
elif ch in '0123456789':
increment = int(ch)
if increment == 0:
raise ScannerError("while scanning a block scalar", start_mark,
"expected indentation indicator in the range 1-9, but found 0",
self.get_mark())
self.forward()
ch = self.peek()
if ch in '+-':
if ch == '+':
chomping = True
else:
chomping = False
self.forward()
ch = self.peek()
if ch not in '\0 \r\n\x85\u2028\u2029':
raise ScannerError("while scanning a block scalar", start_mark,
"expected chomping or indentation indicators, but found %r"
% ch, self.get_mark())
return chomping, increment
def scan_block_scalar_ignored_line(self, start_mark):
# See the specification for details.
while self.peek() == ' ':
self.forward()
if self.peek() == '#':
while self.peek() not in '\0\r\n\x85\u2028\u2029':
self.forward()
ch = self.peek()
if ch not in '\0\r\n\x85\u2028\u2029':
raise ScannerError("while scanning a block scalar", start_mark,
"expected a comment or a line break, but found %r" % ch,
self.get_mark())
self.scan_line_break()
def scan_block_scalar_indentation(self):
# See the specification for details.
chunks = []
max_indent = 0
end_mark = self.get_mark()
while self.peek() in ' \r\n\x85\u2028\u2029':
if self.peek() != ' ':
chunks.append(self.scan_line_break())
end_mark = self.get_mark()
else:
self.forward()
if self.column > max_indent:
max_indent = self.column
return chunks, max_indent, end_mark
def scan_block_scalar_breaks(self, indent):
# See the specification for details.
chunks = []
end_mark = self.get_mark()
while self.column < indent and self.peek() == ' ':
self.forward()
while self.peek() in '\r\n\x85\u2028\u2029':
chunks.append(self.scan_line_break())
end_mark = self.get_mark()
while self.column < indent and self.peek() == ' ':
self.forward()
return chunks, end_mark
def scan_flow_scalar(self, style):
# See the specification for details.
# Note that we loose indentation rules for quoted scalars. Quoted
# scalars don't need to adhere indentation because " and ' clearly
# mark the beginning and the end of them. Therefore we are less
# restrictive then the specification requires. We only need to check
# that document separators are not included in scalars.
if style == '"':
double = True
else:
double = False
chunks = []
start_mark = self.get_mark()
quote = self.peek()
self.forward()
chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
while self.peek() != quote:
chunks.extend(self.scan_flow_scalar_spaces(double, start_mark))
chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
self.forward()
end_mark = self.get_mark()
return ScalarToken(''.join(chunks), False, start_mark, end_mark,
style)
ESCAPE_REPLACEMENTS = {
'0': '\0',
'a': '\x07',
'b': '\x08',
't': '\x09',
'\t': '\x09',
'n': '\x0A',
'v': '\x0B',
'f': '\x0C',
'r': '\x0D',
'e': '\x1B',
' ': '\x20',
'\"': '\"',
'\\': '\\',
'N': '\x85',
'_': '\xA0',
'L': '\u2028',
'P': '\u2029',
}
ESCAPE_CODES = {
'x': 2,
'u': 4,
'U': 8,
}
def scan_flow_scalar_non_spaces(self, double, start_mark):
# See the specification for details.
chunks = []
while True:
length = 0
while self.peek(length) not in '\'\"\\\0 \t\r\n\x85\u2028\u2029':
length += 1
if length:
chunks.append(self.prefix(length))
self.forward(length)
ch = self.peek()
if not double and ch == '\'' and self.peek(1) == '\'':
chunks.append('\'')
self.forward(2)
elif (double and ch == '\'') or (not double and ch in '\"\\'):
chunks.append(ch)
self.forward()
elif double and ch == '\\':
self.forward()
ch = self.peek()
if ch in self.ESCAPE_REPLACEMENTS:
chunks.append(self.ESCAPE_REPLACEMENTS[ch])
self.forward()
elif ch in self.ESCAPE_CODES:
length = self.ESCAPE_CODES[ch]
self.forward()
for k in range(length):
if self.peek(k) not in '0123456789ABCDEFabcdef':
raise ScannerError("while scanning a double-quoted scalar", start_mark,
"expected escape sequence of %d hexdecimal numbers, but found %r" %
(length, self.peek(k)), self.get_mark())
code = int(self.prefix(length), 16)
chunks.append(chr(code))
self.forward(length)
elif ch in '\r\n\x85\u2028\u2029':
self.scan_line_break()
chunks.extend(self.scan_flow_scalar_breaks(double, start_mark))
else:
raise ScannerError("while scanning a double-quoted scalar", start_mark,
"found unknown escape character %r" % ch, self.get_mark())
else:
return chunks
def scan_flow_scalar_spaces(self, double, start_mark):
# See the specification for details.
chunks = []
length = 0
while self.peek(length) in ' \t':
length += 1
whitespaces = self.prefix(length)
self.forward(length)
ch = self.peek()
if ch == '\0':
raise ScannerError("while scanning a quoted scalar", start_mark,
"found unexpected end of stream", self.get_mark())
elif ch in '\r\n\x85\u2028\u2029':
line_break = self.scan_line_break()
breaks = self.scan_flow_scalar_breaks(double, start_mark)
if line_break != '\n':
chunks.append(line_break)
elif not breaks:
chunks.append(' ')
chunks.extend(breaks)
else:
chunks.append(whitespaces)
return chunks
def scan_flow_scalar_breaks(self, double, start_mark):
# See the specification for details.
chunks = []
while True:
# Instead of checking indentation, we check for document
# separators.
prefix = self.prefix(3)
if (prefix == '---' or prefix == '...') \
and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
raise ScannerError("while scanning a quoted scalar", start_mark,
"found unexpected document separator", self.get_mark())
while self.peek() in ' \t':
self.forward()
if self.peek() in '\r\n\x85\u2028\u2029':
chunks.append(self.scan_line_break())
else:
return chunks
def scan_plain(self):
# See the specification for details.
# We add an additional restriction for the flow context:
# plain scalars in the flow context cannot contain ',', ':' and '?'.
# We also keep track of the `allow_simple_key` flag here.
# Indentation rules are loosed for the flow context.
chunks = []
start_mark = self.get_mark()
end_mark = start_mark
indent = self.indent+1
# We allow zero indentation for scalars, but then we need to check for
# document separators at the beginning of the line.
#if indent == 0:
# indent = 1
spaces = []
while True:
length = 0
if self.peek() == '#':
break
while True:
ch = self.peek(length)
if ch in '\0 \t\r\n\x85\u2028\u2029' \
or (not self.flow_level and ch == ':' and
self.peek(length+1) in '\0 \t\r\n\x85\u2028\u2029') \
or (self.flow_level and ch in ',:?[]{}'):
break
length += 1
# It's not clear what we should do with ':' in the flow context.
if (self.flow_level and ch == ':'
and self.peek(length+1) not in '\0 \t\r\n\x85\u2028\u2029,[]{}'):
self.forward(length)
raise ScannerError("while scanning a plain scalar", start_mark,
"found unexpected ':'", self.get_mark(),
"Please check http://pyyaml.org/wiki/YAMLColonInFlowContext for details.")
if length == 0:
break
self.allow_simple_key = False
chunks.extend(spaces)
chunks.append(self.prefix(length))
self.forward(length)
end_mark = self.get_mark()
spaces = self.scan_plain_spaces(indent, start_mark)
if not spaces or self.peek() == '#' \
or (not self.flow_level and self.column < indent):
break
return ScalarToken(''.join(chunks), True, start_mark, end_mark)
def scan_plain_spaces(self, indent, start_mark):
# See the specification for details.
# The specification is really confusing about tabs in plain scalars.
# We just forbid them completely. Do not use tabs in YAML!
chunks = []
length = 0
while self.peek(length) in ' ':
length += 1
whitespaces = self.prefix(length)
self.forward(length)
ch = self.peek()
if ch in '\r\n\x85\u2028\u2029':
line_break = self.scan_line_break()
self.allow_simple_key = True
prefix = self.prefix(3)
if (prefix == '---' or prefix == '...') \
and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
return
breaks = []
while self.peek() in ' \r\n\x85\u2028\u2029':
if self.peek() == ' ':
self.forward()
else:
breaks.append(self.scan_line_break())
prefix = self.prefix(3)
if (prefix == '---' or prefix == '...') \
and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
return
if line_break != '\n':
chunks.append(line_break)
elif not breaks:
chunks.append(' ')
chunks.extend(breaks)
elif whitespaces:
chunks.append(whitespaces)
return chunks
def scan_tag_handle(self, name, start_mark):
# See the specification for details.
# For some strange reasons, the specification does not allow '_' in
# tag handles. I have allowed it anyway.
ch = self.peek()
if ch != '!':
raise ScannerError("while scanning a %s" % name, start_mark,
"expected '!', but found %r" % ch, self.get_mark())
length = 1
ch = self.peek(length)
if ch != ' ':
while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
or ch in '-_':
length += 1
ch = self.peek(length)
if ch != '!':
self.forward(length)
raise ScannerError("while scanning a %s" % name, start_mark,
"expected '!', but found %r" % ch, self.get_mark())
length += 1
value = self.prefix(length)
self.forward(length)
return value
def scan_tag_uri(self, name, start_mark):
# See the specification for details.
# Note: we do not check if URI is well-formed.
chunks = []
length = 0
ch = self.peek(length)
while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
or ch in '-;/?:@&=+$,_.!~*\'()[]%':
if ch == '%':
chunks.append(self.prefix(length))
self.forward(length)
length = 0
chunks.append(self.scan_uri_escapes(name, start_mark))
else:
length += 1
ch = self.peek(length)
if length:
chunks.append(self.prefix(length))
self.forward(length)
length = 0
if not chunks:
raise ScannerError("while parsing a %s" % name, start_mark,
"expected URI, but found %r" % ch, self.get_mark())
return ''.join(chunks)
def scan_uri_escapes(self, name, start_mark):
# See the specification for details.
codes = []
mark = self.get_mark()
while self.peek() == '%':
self.forward()
for k in range(2):
if self.peek(k) not in '0123456789ABCDEFabcdef':
raise ScannerError("while scanning a %s" % name, start_mark,
"expected URI escape sequence of 2 hexdecimal numbers, but found %r"
% self.peek(k), self.get_mark())
codes.append(int(self.prefix(2), 16))
self.forward(2)
try:
value = bytes(codes).decode('utf-8')
except UnicodeDecodeError as exc:
raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark)
return value
def scan_line_break(self):
# Transforms:
# '\r\n' : '\n'
# '\r' : '\n'
# '\n' : '\n'
# '\x85' : '\n'
# '\u2028' : '\u2028'
# '\u2029 : '\u2029'
# default : ''
ch = self.peek()
if ch in '\r\n\x85':
if self.prefix(2) == '\r\n':
self.forward(2)
else:
self.forward()
return '\n'
elif ch in '\u2028\u2029':
self.forward()
return ch
return ''
#try:
# import psyco
# psyco.bind(Scanner)
#except ImportError:
# pass
|
bsd-2-clause
|
DasIch/django
|
tests/serializers/test_json.py
|
81
|
9163
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
import re
from django.core import serializers
from django.core.serializers.base import DeserializationError
from django.core.serializers.json import DjangoJSONEncoder
from django.test import SimpleTestCase, TestCase, TransactionTestCase
from django.utils.translation import override, ugettext_lazy
from .models import Score
from .tests import SerializersTestBase, SerializersTransactionTestBase
class JsonSerializerTestCase(SerializersTestBase, TestCase):
serializer_name = "json"
pkless_str = """[
{
"pk": null,
"model": "serializers.category",
"fields": {"name": "Reference"}
}, {
"model": "serializers.category",
"fields": {"name": "Non-fiction"}
}]"""
mapping_ordering_str = """[
{
"model": "serializers.article",
"pk": %(article_pk)s,
"fields": {
"author": %(author_pk)s,
"headline": "Poker has no place on ESPN",
"pub_date": "2006-06-16T11:00:00",
"categories": [
%(first_category_pk)s,
%(second_category_pk)s
],
"meta_data": []
}
}
]
"""
@staticmethod
def _validate_output(serial_str):
try:
json.loads(serial_str)
except Exception:
return False
else:
return True
@staticmethod
def _get_pk_values(serial_str):
ret_list = []
serial_list = json.loads(serial_str)
for obj_dict in serial_list:
ret_list.append(obj_dict["pk"])
return ret_list
@staticmethod
def _get_field_values(serial_str, field_name):
ret_list = []
serial_list = json.loads(serial_str)
for obj_dict in serial_list:
if field_name in obj_dict["fields"]:
ret_list.append(obj_dict["fields"][field_name])
return ret_list
def test_indentation_whitespace(self):
Score.objects.create(score=5.0)
Score.objects.create(score=6.0)
qset = Score.objects.all()
s = serializers.json.Serializer()
json_data = s.serialize(qset, indent=2)
for line in json_data.splitlines():
if re.search(r'.+,\s*$', line):
self.assertEqual(line, line.rstrip())
def test_json_deserializer_exception(self):
with self.assertRaises(DeserializationError):
for obj in serializers.deserialize("json", """[{"pk":1}"""):
pass
def test_helpful_error_message_invalid_pk(self):
"""
If there is an invalid primary key, the error message should contain
the model associated with it.
"""
test_string = """[{
"pk": "badpk",
"model": "serializers.player",
"fields": {
"name": "Bob",
"rank": 1,
"team": "Team"
}
}]"""
with self.assertRaisesMessage(DeserializationError, "(serializers.player:pk=badpk)"):
list(serializers.deserialize('json', test_string))
def test_helpful_error_message_invalid_field(self):
"""
If there is an invalid field value, the error message should contain
the model associated with it.
"""
test_string = """[{
"pk": "1",
"model": "serializers.player",
"fields": {
"name": "Bob",
"rank": "invalidint",
"team": "Team"
}
}]"""
expected = "(serializers.player:pk=1) field_value was 'invalidint'"
with self.assertRaisesMessage(DeserializationError, expected):
list(serializers.deserialize('json', test_string))
def test_helpful_error_message_for_foreign_keys(self):
"""
Invalid foreign keys with a natural key should throw a helpful error
message, such as what the failing key is.
"""
test_string = """[{
"pk": 1,
"model": "serializers.category",
"fields": {
"name": "Unknown foreign key",
"meta_data": [
"doesnotexist",
"metadata"
]
}
}]"""
key = ["doesnotexist", "metadata"]
expected = "(serializers.category:pk=1) field_value was '%r'" % key
with self.assertRaisesMessage(DeserializationError, expected):
list(serializers.deserialize('json', test_string))
def test_helpful_error_message_for_many2many_non_natural(self):
"""
Invalid many-to-many keys should throw a helpful error message.
"""
test_string = """[{
"pk": 1,
"model": "serializers.article",
"fields": {
"author": 1,
"headline": "Unknown many to many",
"pub_date": "2014-09-15T10:35:00",
"categories": [1, "doesnotexist"]
}
}, {
"pk": 1,
"model": "serializers.author",
"fields": {
"name": "Agnes"
}
}, {
"pk": 1,
"model": "serializers.category",
"fields": {
"name": "Reference"
}
}]"""
expected = "(serializers.article:pk=1) field_value was 'doesnotexist'"
with self.assertRaisesMessage(DeserializationError, expected):
list(serializers.deserialize('json', test_string))
def test_helpful_error_message_for_many2many_natural1(self):
"""
Invalid many-to-many keys should throw a helpful error message.
This tests the code path where one of a list of natural keys is invalid.
"""
test_string = """[{
"pk": 1,
"model": "serializers.categorymetadata",
"fields": {
"kind": "author",
"name": "meta1",
"value": "Agnes"
}
}, {
"pk": 1,
"model": "serializers.article",
"fields": {
"author": 1,
"headline": "Unknown many to many",
"pub_date": "2014-09-15T10:35:00",
"meta_data": [
["author", "meta1"],
["doesnotexist", "meta1"],
["author", "meta1"]
]
}
}, {
"pk": 1,
"model": "serializers.author",
"fields": {
"name": "Agnes"
}
}]"""
key = ["doesnotexist", "meta1"]
expected = "(serializers.article:pk=1) field_value was '%r'" % key
with self.assertRaisesMessage(DeserializationError, expected):
for obj in serializers.deserialize('json', test_string):
obj.save()
def test_helpful_error_message_for_many2many_natural2(self):
"""
Invalid many-to-many keys should throw a helpful error message. This
tests the code path where a natural many-to-many key has only a single
value.
"""
test_string = """[{
"pk": 1,
"model": "serializers.article",
"fields": {
"author": 1,
"headline": "Unknown many to many",
"pub_date": "2014-09-15T10:35:00",
"meta_data": [1, "doesnotexist"]
}
}, {
"pk": 1,
"model": "serializers.categorymetadata",
"fields": {
"kind": "author",
"name": "meta1",
"value": "Agnes"
}
}, {
"pk": 1,
"model": "serializers.author",
"fields": {
"name": "Agnes"
}
}]"""
expected = "(serializers.article:pk=1) field_value was 'doesnotexist'"
with self.assertRaisesMessage(DeserializationError, expected):
for obj in serializers.deserialize('json', test_string, ignore=False):
obj.save()
class JsonSerializerTransactionTestCase(SerializersTransactionTestBase, TransactionTestCase):
serializer_name = "json"
fwd_ref_str = """[
{
"pk": 1,
"model": "serializers.article",
"fields": {
"headline": "Forward references pose no problem",
"pub_date": "2006-06-16T15:00:00",
"categories": [1],
"author": 1
}
},
{
"pk": 1,
"model": "serializers.category",
"fields": {
"name": "Reference"
}
},
{
"pk": 1,
"model": "serializers.author",
"fields": {
"name": "Agnes"
}
}]"""
class DjangoJSONEncoderTests(SimpleTestCase):
def test_lazy_string_encoding(self):
self.assertEqual(
json.dumps({'lang': ugettext_lazy("French")}, cls=DjangoJSONEncoder),
'{"lang": "French"}'
)
with override('fr'):
self.assertEqual(
json.dumps({'lang': ugettext_lazy("French")}, cls=DjangoJSONEncoder),
'{"lang": "Fran\\u00e7ais"}'
)
|
bsd-3-clause
|
gam-phon/taiga-back
|
taiga/base/utils/urls.py
|
3
|
1794
|
# Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
# Copyright (C) 2014-2016 Anler Hernández <hello@anler.me>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import django_sites as sites
from django.core.urlresolvers import reverse as django_reverse
URL_TEMPLATE = "{scheme}://{domain}/{path}"
def build_url(path, scheme="http", domain="localhost"):
return URL_TEMPLATE.format(scheme=scheme, domain=domain, path=path.lstrip("/"))
def is_absolute_url(path):
"""Test wether or not `path` is absolute url."""
return path.startswith("http")
def get_absolute_url(path):
"""Return a path as an absolute url."""
if is_absolute_url(path):
return path
site = sites.get_current()
return build_url(path, scheme=site.scheme, domain=site.domain)
def reverse(viewname, *args, **kwargs):
"""Same behavior as django's reverse but uses django_sites to compute absolute url."""
return get_absolute_url(django_reverse(viewname, *args, **kwargs))
|
agpl-3.0
|
vmindru/ansible
|
test/units/modules/network/nxos/test_nxos_pim.py
|
44
|
2195
|
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat.mock import patch
from ansible.modules.network.nxos import nxos_pim
from .nxos_module import TestNxosModule, load_fixture, set_module_args
class TestNxosPimModule(TestNxosModule):
module = nxos_pim
def setUp(self):
super(TestNxosPimModule, self).setUp()
self.mock_get_config = patch('ansible.modules.network.nxos.nxos_pim.get_config')
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch('ansible.modules.network.nxos.nxos_pim.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
super(TestNxosPimModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, device=''):
self.get_config.return_value = load_fixture('nxos_pim', 'config.cfg')
self.load_config.return_value = None
def test_nxos_pim(self):
set_module_args(dict(ssm_range='232.0.0.0/8'))
self.execute_module(changed=True, commands=['ip pim ssm range 232.0.0.0/8'])
def test_nxos_pim_none(self):
set_module_args(dict(ssm_range='none'))
self.execute_module(changed=True, commands=['ip pim ssm range none'])
def test_nxos_pim_no_change(self):
set_module_args(dict(ssm_range='127.0.0.0/31'))
self.execute_module(changed=False, commands=[])
|
gpl-3.0
|
alkyl1978/gnuradio
|
gr-wxgui/python/wxgui/plotter/channel_plotter.py
|
57
|
7757
|
#
# Copyright 2008, 2009, 2010 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import wx
from grid_plotter_base import grid_plotter_base
from OpenGL import GL
import common
import numpy
import gltext
import math
LEGEND_TEXT_FONT_SIZE = 8
LEGEND_BOX_PADDING = 3
MIN_PADDING = 35, 10, 0, 0 #top, right, bottom, left
#constants for the waveform storage
SAMPLES_KEY = 'samples'
COLOR_SPEC_KEY = 'color_spec'
MARKERY_KEY = 'marker'
TRIG_OFF_KEY = 'trig_off'
##################################################
# Channel Plotter for X Y Waveforms
##################################################
class channel_plotter(grid_plotter_base):
def __init__(self, parent):
"""
Create a new channel plotter.
"""
#init
grid_plotter_base.__init__(self, parent, MIN_PADDING)
self.set_use_persistence(False)
#setup legend cache
self._legend_cache = self.new_gl_cache(self._draw_legend, 50)
self.enable_legend(False)
#setup waveform cache
self._waveform_cache = self.new_gl_cache(self._draw_waveforms, 50)
self._channels = dict()
#init channel plotter
self.register_init(self._init_channel_plotter)
self.callback = None
def _init_channel_plotter(self):
"""
Run gl initialization tasks.
"""
GL.glEnableClientState(GL.GL_VERTEX_ARRAY)
def enable_legend(self, enable=None):
"""
Enable/disable the legend.
Args:
enable: true to enable
Returns:
the enable state when None
"""
if enable is None: return self._enable_legend
self.lock()
self._enable_legend = enable
self._legend_cache.changed(True)
self.unlock()
def _draw_waveforms(self):
"""
Draw the waveforms for each channel.
Scale the waveform data to the grid using gl matrix operations.
"""
#use scissor to prevent drawing outside grid
GL.glEnable(GL.GL_SCISSOR_TEST)
GL.glScissor(
self.padding_left+1,
self.padding_bottom+1,
self.width-self.padding_left-self.padding_right-1,
self.height-self.padding_top-self.padding_bottom-1,
)
for channel in reversed(sorted(self._channels.keys())):
samples = self._channels[channel][SAMPLES_KEY]
num_samps = len(samples)
if not num_samps: continue
#use opengl to scale the waveform
GL.glPushMatrix()
GL.glTranslatef(self.padding_left, self.padding_top, 0)
GL.glScalef(
(self.width-self.padding_left-self.padding_right),
(self.height-self.padding_top-self.padding_bottom),
1,
)
GL.glTranslatef(0, 1, 0)
if isinstance(samples, tuple):
x_scale, x_trans = 1.0/(self.x_max-self.x_min), -self.x_min
points = zip(*samples)
else:
x_scale, x_trans = 1.0/(num_samps-1), -self._channels[channel][TRIG_OFF_KEY]
points = zip(numpy.arange(0, num_samps), samples)
GL.glScalef(x_scale, -1.0/(self.y_max-self.y_min), 1)
GL.glTranslatef(x_trans, -self.y_min, 0)
#draw the points/lines
GL.glColor3f(*self._channels[channel][COLOR_SPEC_KEY])
marker = self._channels[channel][MARKERY_KEY]
if marker is None:
GL.glVertexPointerf(points)
GL.glDrawArrays(GL.GL_LINE_STRIP, 0, len(points))
elif isinstance(marker, (int, float)) and marker > 0:
GL.glPointSize(marker)
GL.glVertexPointerf(points)
GL.glDrawArrays(GL.GL_POINTS, 0, len(points))
GL.glPopMatrix()
GL.glDisable(GL.GL_SCISSOR_TEST)
def _populate_point_label(self, x_val, y_val):
"""
Get the text the will populate the point label.
Give X and Y values for the current point.
Give values for the channel at the X coordinate.
Args:
x_val: the current x value
y_val: the current y value
Returns:
a string with newlines
"""
#create text
label_str = '%s: %s\n%s: %s'%(
self.x_label, common.eng_format(x_val, self.x_units),
self.y_label, common.eng_format(y_val, self.y_units),
)
for channel in sorted(self._channels.keys()):
samples = self._channels[channel][SAMPLES_KEY]
num_samps = len(samples)
if not num_samps: continue
if isinstance(samples, tuple): continue
#linear interpolation
x_index = (num_samps-1)*(x_val-self.x_min)/(self.x_max-self.x_min)
x_index_low = int(math.floor(x_index))
x_index_high = int(math.ceil(x_index))
scale = x_index - x_index_low + self._channels[channel][TRIG_OFF_KEY]
y_value = (samples[x_index_high] - samples[x_index_low])*scale + samples[x_index_low]
if math.isnan(y_value): continue
label_str += '\n%s: %s'%(channel, common.eng_format(y_value, self.y_units))
return label_str
def _call_callback (self, x_val, y_val):
if self.callback != None:
self.callback(x_val, y_val)
def set_callback (self, callback):
self.callback = callback
def _draw_legend(self):
"""
Draw the legend in the upper right corner.
For each channel, draw a rectangle out of the channel color,
and overlay the channel text on top of the rectangle.
"""
if not self.enable_legend(): return
x_off = self.width - self.padding_right - LEGEND_BOX_PADDING
for i, channel in enumerate(reversed(sorted(self._channels.keys()))):
samples = self._channels[channel][SAMPLES_KEY]
if not len(samples): continue
color_spec = self._channels[channel][COLOR_SPEC_KEY]
txt = gltext.Text(channel, font_size=LEGEND_TEXT_FONT_SIZE)
w, h = txt.get_size()
#draw rect + text
GL.glColor3f(*color_spec)
self._draw_rect(
x_off - w - LEGEND_BOX_PADDING,
self.padding_top/2 - h/2 - LEGEND_BOX_PADDING,
w+2*LEGEND_BOX_PADDING,
h+2*LEGEND_BOX_PADDING,
)
txt.draw_text(wx.Point(x_off - w, self.padding_top/2 - h/2))
x_off -= w + 4*LEGEND_BOX_PADDING
def clear_waveform(self, channel):
"""
Remove a waveform from the list of waveforms.
Args:
channel: the channel key
"""
self.lock()
if channel in self._channels.keys():
self._channels.pop(channel)
self._legend_cache.changed(True)
self._waveform_cache.changed(True)
self.unlock()
def set_waveform(self, channel, samples=[], color_spec=(0, 0, 0), marker=None, trig_off=0):
"""
Set the waveform for a given channel.
Args:
channel: the channel key
samples: the waveform samples
color_spec: the 3-tuple for line color
marker: None for line
trig_off: fraction of sample for trigger offset
"""
self.lock()
if channel not in self._channels.keys(): self._legend_cache.changed(True)
self._channels[channel] = {
SAMPLES_KEY: samples,
COLOR_SPEC_KEY: color_spec,
MARKERY_KEY: marker,
TRIG_OFF_KEY: trig_off,
}
self._waveform_cache.changed(True)
self.unlock()
if __name__ == '__main__':
app = wx.PySimpleApp()
frame = wx.Frame(None, -1, 'Demo', wx.DefaultPosition)
vbox = wx.BoxSizer(wx.VERTICAL)
plotter = channel_plotter(frame)
plotter.set_x_grid(-1, 1, .2)
plotter.set_y_grid(-1, 1, .4)
vbox.Add(plotter, 1, wx.EXPAND)
plotter = channel_plotter(frame)
plotter.set_x_grid(-1, 1, .2)
plotter.set_y_grid(-1, 1, .4)
vbox.Add(plotter, 1, wx.EXPAND)
frame.SetSizerAndFit(vbox)
frame.SetSize(wx.Size(800, 600))
frame.Show()
app.MainLoop()
|
gpl-3.0
|
osvalr/odoo
|
openerp/addons/__init__.py
|
378
|
1537
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2010-2011 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" Addons module.
This module serves to contain all OpenERP addons, across all configured addons
paths. For the code to manage those addons, see openerp.modules.
Addons are made available under `openerp.addons` after
openerp.tools.config.parse_config() is called (so that the addons paths are
known).
This module also conveniently reexports some symbols from openerp.modules.
Importing them from here is deprecated.
"""
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
strahlc/exaile
|
xl/migrations/settings/rating.py
|
5
|
1797
|
# Copyright (C) 2008-2010 Adam Olsen
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
# The developers of the Exaile media player hereby grant permission
# for non-GPL compatible GStreamer and Exaile plugins to be used and
# distributed together with GStreamer and Exaile. This permission is
# above and beyond the permissions granted by the GPL license by which
# Exaile is covered. If you modify this code, you may extend this
# exception to your version of the code, but you are not obligated to
# do so. If you do not wish to do so, delete this exception statement
# from your version.
from xl import settings
def migrate():
"""
Migrates the old 'miscellaneous/rating_*'
to the new 'rating/*' settings
"""
if settings.MANAGER.has_option('miscellaneous/rating_steps'):
value = settings.get_option('miscellaneous/rating_steps', 5)
settings.set_option('rating/maximum', value)
if settings.MANAGER.has_option('miscellaneous/rating_widget_tracks_limit'):
value = settings.get_option('miscellaneous/rating_widget_tracks_limit', 100)
settings.set_option('rating/tracks_limit', value)
|
gpl-2.0
|
webgeodatavore/django
|
django/contrib/gis/gdal/base.py
|
654
|
1179
|
from ctypes import c_void_p
from django.contrib.gis.gdal.error import GDALException
from django.utils import six
class GDALBase(object):
"""
Base object for GDAL objects that has a pointer access property
that controls access to the underlying C pointer.
"""
# Initially the pointer is NULL.
_ptr = None
# Default allowed pointer type.
ptr_type = c_void_p
# Pointer access property.
def _get_ptr(self):
# Raise an exception if the pointer isn't valid don't
# want to be passing NULL pointers to routines --
# that's very bad.
if self._ptr:
return self._ptr
else:
raise GDALException('GDAL %s pointer no longer valid.' % self.__class__.__name__)
def _set_ptr(self, ptr):
# Only allow the pointer to be set with pointers of the
# compatible type or None (NULL).
if isinstance(ptr, six.integer_types):
self._ptr = self.ptr_type(ptr)
elif ptr is None or isinstance(ptr, self.ptr_type):
self._ptr = ptr
else:
raise TypeError('Incompatible pointer type')
ptr = property(_get_ptr, _set_ptr)
|
bsd-3-clause
|
bikong2/django
|
tests/check_framework/test_templates.py
|
288
|
1403
|
from copy import deepcopy
from django.core.checks.templates import E001
from django.test import SimpleTestCase
from django.test.utils import override_settings
class CheckTemplateSettingsAppDirsTest(SimpleTestCase):
TEMPLATES_APP_DIRS_AND_LOADERS = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'loaders': ['django.template.loaders.filesystem.Loader'],
},
},
]
@property
def func(self):
from django.core.checks.templates import check_setting_app_dirs_loaders
return check_setting_app_dirs_loaders
@override_settings(TEMPLATES=TEMPLATES_APP_DIRS_AND_LOADERS)
def test_app_dirs_and_loaders(self):
"""
Error if template loaders are specified and APP_DIRS is True.
"""
self.assertEqual(self.func(None), [E001])
def test_app_dirs_removed(self):
TEMPLATES = deepcopy(self.TEMPLATES_APP_DIRS_AND_LOADERS)
del TEMPLATES[0]['APP_DIRS']
with self.settings(TEMPLATES=TEMPLATES):
self.assertEqual(self.func(None), [])
def test_loaders_removed(self):
TEMPLATES = deepcopy(self.TEMPLATES_APP_DIRS_AND_LOADERS)
del TEMPLATES[0]['OPTIONS']['loaders']
with self.settings(TEMPLATES=TEMPLATES):
self.assertEqual(self.func(None), [])
|
bsd-3-clause
|
shipci/sympy
|
sympy/external/importtools.py
|
85
|
7294
|
"""Tools to assist importing optional external modules."""
from __future__ import print_function, division
import sys
# Override these in the module to change the default warning behavior.
# For example, you might set both to False before running the tests so that
# warnings are not printed to the console, or set both to True for debugging.
WARN_NOT_INSTALLED = None # Default is False
WARN_OLD_VERSION = None # Default is True
def __sympy_debug():
# helper function from sympy/__init__.py
# We don't just import SYMPY_DEBUG from that file because we don't want to
# import all of sympy just to use this module.
import os
debug_str = os.getenv('SYMPY_DEBUG', 'False')
if debug_str in ('True', 'False'):
return eval(debug_str)
else:
raise RuntimeError("unrecognized value for SYMPY_DEBUG: %s" %
debug_str)
if __sympy_debug():
WARN_OLD_VERSION = True
WARN_NOT_INSTALLED = True
def import_module(module, min_module_version=None, min_python_version=None,
warn_not_installed=None, warn_old_version=None,
module_version_attr='__version__', module_version_attr_call_args=None,
__import__kwargs={}, catch=()):
"""
Import and return a module if it is installed.
If the module is not installed, it returns None.
A minimum version for the module can be given as the keyword argument
min_module_version. This should be comparable against the module version.
By default, module.__version__ is used to get the module version. To
override this, set the module_version_attr keyword argument. If the
attribute of the module to get the version should be called (e.g.,
module.version()), then set module_version_attr_call_args to the args such
that module.module_version_attr(*module_version_attr_call_args) returns the
module's version.
If the module version is less than min_module_version using the Python <
comparison, None will be returned, even if the module is installed. You can
use this to keep from importing an incompatible older version of a module.
You can also specify a minimum Python version by using the
min_python_version keyword argument. This should be comparable against
sys.version_info.
If the keyword argument warn_not_installed is set to True, the function will
emit a UserWarning when the module is not installed.
If the keyword argument warn_old_version is set to True, the function will
emit a UserWarning when the library is installed, but cannot be imported
because of the min_module_version or min_python_version options.
Note that because of the way warnings are handled, a warning will be
emitted for each module only once. You can change the default warning
behavior by overriding the values of WARN_NOT_INSTALLED and WARN_OLD_VERSION
in sympy.external.importtools. By default, WARN_NOT_INSTALLED is False and
WARN_OLD_VERSION is True.
This function uses __import__() to import the module. To pass additional
options to __import__(), use the __import__kwargs keyword argument. For
example, to import a submodule A.B, you must pass a nonempty fromlist option
to __import__. See the docstring of __import__().
This catches ImportError to determine if the module is not installed. To
catch additional errors, pass them as a tuple to the catch keyword
argument.
Examples
========
>>> from sympy.external import import_module
>>> numpy = import_module('numpy')
>>> numpy = import_module('numpy', min_python_version=(2, 7),
... warn_old_version=False)
>>> numpy = import_module('numpy', min_module_version='1.5',
... warn_old_version=False) # numpy.__version__ is a string
>>> # gmpy does not have __version__, but it does have gmpy.version()
>>> gmpy = import_module('gmpy', min_module_version='1.14',
... module_version_attr='version', module_version_attr_call_args=(),
... warn_old_version=False)
>>> # To import a submodule, you must pass a nonempty fromlist to
>>> # __import__(). The values do not matter.
>>> p3 = import_module('mpl_toolkits.mplot3d',
... __import__kwargs={'fromlist':['something']})
>>> # matplotlib.pyplot can raise RuntimeError when the display cannot be opened
>>> matplotlib = import_module('matplotlib',
... __import__kwargs={'fromlist':['pyplot']}, catch=(RuntimeError,))
"""
# keyword argument overrides default, and global variable overrides
# keyword argument.
warn_old_version = (WARN_OLD_VERSION if WARN_OLD_VERSION is not None
else warn_old_version or True)
warn_not_installed = (WARN_NOT_INSTALLED if WARN_NOT_INSTALLED is not None
else warn_not_installed or False)
import warnings
# Check Python first so we don't waste time importing a module we can't use
if min_python_version:
if sys.version_info < min_python_version:
if warn_old_version:
warnings.warn("Python version is too old to use %s "
"(%s or newer required)" % (
module, '.'.join(map(str, min_python_version))),
UserWarning)
return
# PyPy 1.6 has rudimentary NumPy support and importing it produces errors, so skip it
if module == 'numpy' and '__pypy__' in sys.builtin_module_names:
return
try:
mod = __import__(module, **__import__kwargs)
## there's something funny about imports with matplotlib and py3k. doing
## from matplotlib import collections
## gives python's stdlib collections module. explicitly re-importing
## the module fixes this.
from_list = __import__kwargs.get('fromlist', tuple())
for submod in from_list:
if submod == 'collections' and mod.__name__ == 'matplotlib':
__import__(module + '.' + submod)
except ImportError:
if warn_not_installed:
warnings.warn("%s module is not installed" % module, UserWarning)
return
except catch as e:
if warn_not_installed:
warnings.warn(
"%s module could not be used (%s)" % (module, repr(e)))
return
if min_module_version:
modversion = getattr(mod, module_version_attr)
if module_version_attr_call_args is not None:
modversion = modversion(*module_version_attr_call_args)
if modversion < min_module_version:
if warn_old_version:
# Attempt to create a pretty string version of the version
if isinstance(min_module_version, basestring):
verstr = min_module_version
elif isinstance(min_module_version, (tuple, list)):
verstr = '.'.join(map(str, min_module_version))
else:
# Either don't know what this is. Hopefully
# it's something that has a nice str version, like an int.
verstr = str(min_module_version)
warnings.warn("%s version is too old to use "
"(%s or newer required)" % (module, verstr),
UserWarning)
return
return mod
|
bsd-3-clause
|
lokkju/gr-smartnet
|
docs/doxygen/doxyxml/text.py
|
333
|
1832
|
#
# Copyright 2010 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
"""
Utilities for extracting text from generated classes.
"""
def is_string(txt):
if isinstance(txt, str):
return True
try:
if isinstance(txt, unicode):
return True
except NameError:
pass
return False
def description(obj):
if obj is None:
return None
return description_bit(obj).strip()
def description_bit(obj):
if hasattr(obj, 'content'):
contents = [description_bit(item) for item in obj.content]
result = ''.join(contents)
elif hasattr(obj, 'content_'):
contents = [description_bit(item) for item in obj.content_]
result = ''.join(contents)
elif hasattr(obj, 'value'):
result = description_bit(obj.value)
elif is_string(obj):
return obj
else:
raise StandardError('Expecting a string or something with content, content_ or value attribute')
# If this bit is a paragraph then add one some line breaks.
if hasattr(obj, 'name') and obj.name == 'para':
result += "\n\n"
return result
|
gpl-3.0
|
jmt4/django-oscar
|
tests/integration/offer/fixed_price_benefit_tests.py
|
53
|
3007
|
from decimal import Decimal as D
from django.test import TestCase
import mock
from oscar.apps.offer import models
from oscar.test import factories
from oscar.test.basket import add_product, add_products
class TestAFixedPriceDiscountAppliedWithCountCondition(TestCase):
def setUp(self):
range = models.Range.objects.create(
name="All products", includes_all_products=True)
self.condition = models.CountCondition.objects.create(
range=range,
type=models.Condition.COUNT,
value=3)
self.benefit = models.FixedPriceBenefit.objects.create(
range=range,
type=models.Benefit.FIXED_PRICE,
value=D('20.00'))
self.offer = mock.Mock()
self.basket = factories.create_basket(empty=True)
def test_applies_correctly_to_empty_basket(self):
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('0.00'), result.discount)
self.assertEqual(0, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_is_worth_less_than_value(self):
add_product(self.basket, D('6.00'), 3)
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('0.00'), result.discount)
self.assertEqual(0, self.basket.num_items_with_discount)
self.assertEqual(3, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_is_worth_the_same_as_value(self):
add_product(self.basket, D('5.00'), 4)
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('0.00'), result.discount)
self.assertEqual(0, self.basket.num_items_with_discount)
self.assertEqual(4, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_is_more_than_value(self):
add_product(self.basket, D('8.00'), 4)
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('4.00'), result.discount)
self.assertEqual(3, self.basket.num_items_with_discount)
self.assertEqual(1, self.basket.num_items_without_discount)
def test_rounding_error_for_multiple_products(self):
add_products(self.basket,
[(D('7.00'), 1), (D('7.00'), 1), (D('7.00'), 1)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D('1.00'), result.discount)
# Make sure discount together is the same as final discount
# Rounding error would return 0.99 instead 1.00
cumulative_discount = sum(
line.discount_value for line in self.basket.all_lines())
self.assertEqual(result.discount, cumulative_discount)
self.assertEqual(3, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
|
bsd-3-clause
|
MwanzanFelipe/rockletonfortune
|
lib/django/middleware/security.py
|
510
|
1753
|
import re
from django.conf import settings
from django.http import HttpResponsePermanentRedirect
class SecurityMiddleware(object):
def __init__(self):
self.sts_seconds = settings.SECURE_HSTS_SECONDS
self.sts_include_subdomains = settings.SECURE_HSTS_INCLUDE_SUBDOMAINS
self.content_type_nosniff = settings.SECURE_CONTENT_TYPE_NOSNIFF
self.xss_filter = settings.SECURE_BROWSER_XSS_FILTER
self.redirect = settings.SECURE_SSL_REDIRECT
self.redirect_host = settings.SECURE_SSL_HOST
self.redirect_exempt = [re.compile(r) for r in settings.SECURE_REDIRECT_EXEMPT]
def process_request(self, request):
path = request.path.lstrip("/")
if (self.redirect and not request.is_secure() and
not any(pattern.search(path)
for pattern in self.redirect_exempt)):
host = self.redirect_host or request.get_host()
return HttpResponsePermanentRedirect(
"https://%s%s" % (host, request.get_full_path())
)
def process_response(self, request, response):
if (self.sts_seconds and request.is_secure() and
'strict-transport-security' not in response):
sts_header = "max-age=%s" % self.sts_seconds
if self.sts_include_subdomains:
sts_header = sts_header + "; includeSubDomains"
response["strict-transport-security"] = sts_header
if self.content_type_nosniff and 'x-content-type-options' not in response:
response["x-content-type-options"] = "nosniff"
if self.xss_filter and 'x-xss-protection' not in response:
response["x-xss-protection"] = "1; mode=block"
return response
|
bsd-3-clause
|
dhimmel/networkx
|
examples/basic/properties.py
|
44
|
1071
|
#!/usr/bin/env python
"""
Compute some network properties for the lollipop graph.
"""
# Copyright (C) 2004 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
from networkx import *
G = lollipop_graph(4,6)
pathlengths=[]
print("source vertex {target:length, }")
for v in G.nodes():
spl=single_source_shortest_path_length(G,v)
print('%s %s' % (v,spl))
for p in spl.values():
pathlengths.append(p)
print('')
print("average shortest path length %s" % (sum(pathlengths)/len(pathlengths)))
# histogram of path lengths
dist={}
for p in pathlengths:
if p in dist:
dist[p]+=1
else:
dist[p]=1
print('')
print("length #paths")
verts=dist.keys()
for d in sorted(verts):
print('%s %d' % (d,dist[d]))
print("radius: %d" % radius(G))
print("diameter: %d" % diameter(G))
print("eccentricity: %s" % eccentricity(G))
print("center: %s" % center(G))
print("periphery: %s" % periphery(G))
print("density: %s" % density(G))
|
bsd-3-clause
|
DMLoy/ECommerceBasic
|
lib/python2.7/site-packages/django/templatetags/i18n.py
|
107
|
16719
|
from __future__ import unicode_literals
import re
from django.template import (Node, Variable, TemplateSyntaxError,
TokenParser, Library, TOKEN_TEXT, TOKEN_VAR)
from django.template.base import _render_value_in_context
from django.template.defaulttags import token_kwargs
from django.utils import six
from django.utils import translation
register = Library()
class GetAvailableLanguagesNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
from django.conf import settings
context[self.variable] = [(k, translation.ugettext(v)) for k, v in settings.LANGUAGES]
return ''
class GetLanguageInfoNode(Node):
def __init__(self, lang_code, variable):
self.lang_code = Variable(lang_code)
self.variable = variable
def render(self, context):
lang_code = self.lang_code.resolve(context)
context[self.variable] = translation.get_language_info(lang_code)
return ''
class GetLanguageInfoListNode(Node):
def __init__(self, languages, variable):
self.languages = Variable(languages)
self.variable = variable
def get_language_info(self, language):
# ``language`` is either a language code string or a sequence
# with the language code as its first item
if len(language[0]) > 1:
return translation.get_language_info(language[0])
else:
return translation.get_language_info(str(language))
def render(self, context):
langs = self.languages.resolve(context)
context[self.variable] = [self.get_language_info(lang) for lang in langs]
return ''
class GetCurrentLanguageNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = translation.get_language()
return ''
class GetCurrentLanguageBidiNode(Node):
def __init__(self, variable):
self.variable = variable
def render(self, context):
context[self.variable] = translation.get_language_bidi()
return ''
class TranslateNode(Node):
def __init__(self, filter_expression, noop, asvar=None,
message_context=None):
self.noop = noop
self.asvar = asvar
self.message_context = message_context
self.filter_expression = filter_expression
if isinstance(self.filter_expression.var, six.string_types):
self.filter_expression.var = Variable("'%s'" %
self.filter_expression.var)
def render(self, context):
self.filter_expression.var.translate = not self.noop
if self.message_context:
self.filter_expression.var.message_context = (
self.message_context.resolve(context))
output = self.filter_expression.resolve(context)
value = _render_value_in_context(output, context)
if self.asvar:
context[self.asvar] = value
return ''
else:
return value
class BlockTranslateNode(Node):
def __init__(self, extra_context, singular, plural=None, countervar=None,
counter=None, message_context=None):
self.extra_context = extra_context
self.singular = singular
self.plural = plural
self.countervar = countervar
self.counter = counter
self.message_context = message_context
def render_token_list(self, tokens):
result = []
vars = []
for token in tokens:
if token.token_type == TOKEN_TEXT:
result.append(token.contents.replace('%', '%%'))
elif token.token_type == TOKEN_VAR:
result.append('%%(%s)s' % token.contents)
vars.append(token.contents)
return ''.join(result), vars
def render(self, context, nested=False):
if self.message_context:
message_context = self.message_context.resolve(context)
else:
message_context = None
tmp_context = {}
for var, val in self.extra_context.items():
tmp_context[var] = val.resolve(context)
# Update() works like a push(), so corresponding context.pop() is at
# the end of function
context.update(tmp_context)
singular, vars = self.render_token_list(self.singular)
if self.plural and self.countervar and self.counter:
count = self.counter.resolve(context)
context[self.countervar] = count
plural, plural_vars = self.render_token_list(self.plural)
if message_context:
result = translation.npgettext(message_context, singular,
plural, count)
else:
result = translation.ungettext(singular, plural, count)
vars.extend(plural_vars)
else:
if message_context:
result = translation.pgettext(message_context, singular)
else:
result = translation.ugettext(singular)
data = dict([(v, _render_value_in_context(context.get(v, ''), context)) for v in vars])
context.pop()
try:
result = result % data
except (KeyError, ValueError):
if nested:
# Either string is malformed, or it's a bug
raise TemplateSyntaxError("'blocktrans' is unable to format "
"string returned by gettext: %r using %r" % (result, data))
with translation.override(None):
result = self.render(context, nested=True)
return result
class LanguageNode(Node):
def __init__(self, nodelist, language):
self.nodelist = nodelist
self.language = language
def render(self, context):
with translation.override(self.language.resolve(context)):
output = self.nodelist.render(context)
return output
@register.tag("get_available_languages")
def do_get_available_languages(parser, token):
"""
This will store a list of available languages
in the context.
Usage::
{% get_available_languages as languages %}
{% for language in languages %}
...
{% endfor %}
This will just pull the LANGUAGES setting from
your setting file (or the default settings) and
put it into the named variable.
"""
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError("'get_available_languages' requires 'as variable' (got %r)" % args)
return GetAvailableLanguagesNode(args[2])
@register.tag("get_language_info")
def do_get_language_info(parser, token):
"""
This will store the language information dictionary for the given language
code in a context variable.
Usage::
{% get_language_info for LANGUAGE_CODE as l %}
{{ l.code }}
{{ l.name }}
{{ l.name_local }}
{{ l.bidi|yesno:"bi-directional,uni-directional" }}
"""
args = token.contents.split()
if len(args) != 5 or args[1] != 'for' or args[3] != 'as':
raise TemplateSyntaxError("'%s' requires 'for string as variable' (got %r)" % (args[0], args[1:]))
return GetLanguageInfoNode(args[2], args[4])
@register.tag("get_language_info_list")
def do_get_language_info_list(parser, token):
"""
This will store a list of language information dictionaries for the given
language codes in a context variable. The language codes can be specified
either as a list of strings or a settings.LANGUAGES style tuple (or any
sequence of sequences whose first items are language codes).
Usage::
{% get_language_info_list for LANGUAGES as langs %}
{% for l in langs %}
{{ l.code }}
{{ l.name }}
{{ l.name_local }}
{{ l.bidi|yesno:"bi-directional,uni-directional" }}
{% endfor %}
"""
args = token.contents.split()
if len(args) != 5 or args[1] != 'for' or args[3] != 'as':
raise TemplateSyntaxError("'%s' requires 'for sequence as variable' (got %r)" % (args[0], args[1:]))
return GetLanguageInfoListNode(args[2], args[4])
@register.filter
def language_name(lang_code):
return translation.get_language_info(lang_code)['name']
@register.filter
def language_name_local(lang_code):
return translation.get_language_info(lang_code)['name_local']
@register.filter
def language_bidi(lang_code):
return translation.get_language_info(lang_code)['bidi']
@register.tag("get_current_language")
def do_get_current_language(parser, token):
"""
This will store the current language in the context.
Usage::
{% get_current_language as language %}
This will fetch the currently active language and
put it's value into the ``language`` context
variable.
"""
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError("'get_current_language' requires 'as variable' (got %r)" % args)
return GetCurrentLanguageNode(args[2])
@register.tag("get_current_language_bidi")
def do_get_current_language_bidi(parser, token):
"""
This will store the current language layout in the context.
Usage::
{% get_current_language_bidi as bidi %}
This will fetch the currently active language's layout and
put it's value into the ``bidi`` context variable.
True indicates right-to-left layout, otherwise left-to-right
"""
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError("'get_current_language_bidi' requires 'as variable' (got %r)" % args)
return GetCurrentLanguageBidiNode(args[2])
@register.tag("trans")
def do_translate(parser, token):
"""
This will mark a string for translation and will
translate the string for the current language.
Usage::
{% trans "this is a test" %}
This will mark the string for translation so it will
be pulled out by mark-messages.py into the .po files
and will run the string through the translation engine.
There is a second form::
{% trans "this is a test" noop %}
This will only mark for translation, but will return
the string unchanged. Use it when you need to store
values into forms that should be translated later on.
You can use variables instead of constant strings
to translate stuff you marked somewhere else::
{% trans variable %}
This will just try to translate the contents of
the variable ``variable``. Make sure that the string
in there is something that is in the .po file.
It is possible to store the translated string into a variable::
{% trans "this is a test" as var %}
{{ var }}
Contextual translations are also supported::
{% trans "this is a test" context "greeting" %}
This is equivalent to calling pgettext instead of (u)gettext.
"""
class TranslateParser(TokenParser):
def top(self):
value = self.value()
# Backwards Compatiblity fix:
# FilterExpression does not support single-quoted strings,
# so we make a cheap localized fix in order to maintain
# backwards compatibility with existing uses of ``trans``
# where single quote use is supported.
if value[0] == "'":
m = re.match("^'([^']+)'(\|.*$)", value)
if m:
value = '"%s"%s' % (m.group(1).replace('"','\\"'), m.group(2))
elif value[-1] == "'":
value = '"%s"' % value[1:-1].replace('"','\\"')
noop = False
asvar = None
message_context = None
while self.more():
tag = self.tag()
if tag == 'noop':
noop = True
elif tag == 'context':
message_context = parser.compile_filter(self.value())
elif tag == 'as':
asvar = self.tag()
else:
raise TemplateSyntaxError(
"Only options for 'trans' are 'noop', " \
"'context \"xxx\"', and 'as VAR'.")
return value, noop, asvar, message_context
value, noop, asvar, message_context = TranslateParser(token.contents).top()
return TranslateNode(parser.compile_filter(value), noop, asvar,
message_context)
@register.tag("blocktrans")
def do_block_translate(parser, token):
"""
This will translate a block of text with parameters.
Usage::
{% blocktrans with bar=foo|filter boo=baz|filter %}
This is {{ bar }} and {{ boo }}.
{% endblocktrans %}
Additionally, this supports pluralization::
{% blocktrans count count=var|length %}
There is {{ count }} object.
{% plural %}
There are {{ count }} objects.
{% endblocktrans %}
This is much like ngettext, only in template syntax.
The "var as value" legacy format is still supported::
{% blocktrans with foo|filter as bar and baz|filter as boo %}
{% blocktrans count var|length as count %}
Contextual translations are supported::
{% blocktrans with bar=foo|filter context "greeting" %}
This is {{ bar }}.
{% endblocktrans %}
This is equivalent to calling pgettext/npgettext instead of
(u)gettext/(u)ngettext.
"""
bits = token.split_contents()
options = {}
remaining_bits = bits[1:]
while remaining_bits:
option = remaining_bits.pop(0)
if option in options:
raise TemplateSyntaxError('The %r option was specified more '
'than once.' % option)
if option == 'with':
value = token_kwargs(remaining_bits, parser, support_legacy=True)
if not value:
raise TemplateSyntaxError('"with" in %r tag needs at least '
'one keyword argument.' % bits[0])
elif option == 'count':
value = token_kwargs(remaining_bits, parser, support_legacy=True)
if len(value) != 1:
raise TemplateSyntaxError('"count" in %r tag expected exactly '
'one keyword argument.' % bits[0])
elif option == "context":
try:
value = remaining_bits.pop(0)
value = parser.compile_filter(value)
except Exception:
raise TemplateSyntaxError('"context" in %r tag expected '
'exactly one argument.' % bits[0])
else:
raise TemplateSyntaxError('Unknown argument for %r tag: %r.' %
(bits[0], option))
options[option] = value
if 'count' in options:
countervar, counter = list(six.iteritems(options['count']))[0]
else:
countervar, counter = None, None
if 'context' in options:
message_context = options['context']
else:
message_context = None
extra_context = options.get('with', {})
singular = []
plural = []
while parser.tokens:
token = parser.next_token()
if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
singular.append(token)
else:
break
if countervar and counter:
if token.contents.strip() != 'plural':
raise TemplateSyntaxError("'blocktrans' doesn't allow other block tags inside it")
while parser.tokens:
token = parser.next_token()
if token.token_type in (TOKEN_VAR, TOKEN_TEXT):
plural.append(token)
else:
break
if token.contents.strip() != 'endblocktrans':
raise TemplateSyntaxError("'blocktrans' doesn't allow other block tags (seen %r) inside it" % token.contents)
return BlockTranslateNode(extra_context, singular, plural, countervar,
counter, message_context)
@register.tag
def language(parser, token):
"""
This will enable the given language just for this block.
Usage::
{% language "de" %}
This is {{ bar }} and {{ boo }}.
{% endlanguage %}
"""
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'%s' takes one argument (language)" % bits[0])
language = parser.compile_filter(bits[1])
nodelist = parser.parse(('endlanguage',))
parser.delete_first_token()
return LanguageNode(nodelist, language)
|
mit
|
willhardy/django
|
django/shortcuts.py
|
117
|
5429
|
"""
This module collects helper functions and classes that "span" multiple levels
of MVC. In other words, these functions/classes introduce controlled coupling
for convenience's sake.
"""
from django.http import (
Http404, HttpResponse, HttpResponsePermanentRedirect, HttpResponseRedirect,
)
from django.template import loader
from django.urls import NoReverseMatch, reverse
from django.utils import six
from django.utils.encoding import force_text
from django.utils.functional import Promise
def render_to_response(template_name, context=None, content_type=None, status=None, using=None):
"""
Returns a HttpResponse whose content is filled with the result of calling
django.template.loader.render_to_string() with the passed arguments.
"""
content = loader.render_to_string(template_name, context, using=using)
return HttpResponse(content, content_type, status)
def render(request, template_name, context=None, content_type=None, status=None, using=None):
"""
Returns a HttpResponse whose content is filled with the result of calling
django.template.loader.render_to_string() with the passed arguments.
"""
content = loader.render_to_string(template_name, context, request, using=using)
return HttpResponse(content, content_type, status)
def redirect(to, *args, **kwargs):
"""
Returns an HttpResponseRedirect to the appropriate URL for the arguments
passed.
The arguments could be:
* A model: the model's `get_absolute_url()` function will be called.
* A view name, possibly with arguments: `urls.reverse()` will be used
to reverse-resolve the name.
* A URL, which will be used as-is for the redirect location.
By default issues a temporary redirect; pass permanent=True to issue a
permanent redirect
"""
if kwargs.pop('permanent', False):
redirect_class = HttpResponsePermanentRedirect
else:
redirect_class = HttpResponseRedirect
return redirect_class(resolve_url(to, *args, **kwargs))
def _get_queryset(klass):
"""
Return a QuerySet or a Manager.
Duck typing in action: any class with a `get()` method (for
get_object_or_404) or a `filter()` method (for get_list_or_404) might do
the job.
"""
# If it is a model class or anything else with ._default_manager
if hasattr(klass, '_default_manager'):
return klass._default_manager.all()
return klass
def get_object_or_404(klass, *args, **kwargs):
"""
Uses get() to return an object, or raises a Http404 exception if the object
does not exist.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the get() query.
Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
object is found.
"""
queryset = _get_queryset(klass)
try:
return queryset.get(*args, **kwargs)
except AttributeError:
klass__name = klass.__name__ if isinstance(klass, type) else klass.__class__.__name__
raise ValueError(
"First argument to get_object_or_404() must be a Model, Manager, "
"or QuerySet, not '%s'." % klass__name
)
except queryset.model.DoesNotExist:
raise Http404('No %s matches the given query.' % queryset.model._meta.object_name)
def get_list_or_404(klass, *args, **kwargs):
"""
Uses filter() to return a list of objects, or raise a Http404 exception if
the list is empty.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the filter() query.
"""
queryset = _get_queryset(klass)
try:
obj_list = list(queryset.filter(*args, **kwargs))
except AttributeError:
klass__name = klass.__name__ if isinstance(klass, type) else klass.__class__.__name__
raise ValueError(
"First argument to get_list_or_404() must be a Model, Manager, or "
"QuerySet, not '%s'." % klass__name
)
if not obj_list:
raise Http404('No %s matches the given query.' % queryset.model._meta.object_name)
return obj_list
def resolve_url(to, *args, **kwargs):
"""
Return a URL appropriate for the arguments passed.
The arguments could be:
* A model: the model's `get_absolute_url()` function will be called.
* A view name, possibly with arguments: `urls.reverse()` will be used
to reverse-resolve the name.
* A URL, which will be returned as-is.
"""
# If it's a model, use get_absolute_url()
if hasattr(to, 'get_absolute_url'):
return to.get_absolute_url()
if isinstance(to, Promise):
# Expand the lazy instance, as it can cause issues when it is passed
# further to some Python functions like urlparse.
to = force_text(to)
if isinstance(to, six.string_types):
# Handle relative URLs
if to.startswith(('./', '../')):
return to
# Next try a reverse URL resolution.
try:
return reverse(to, args=args, kwargs=kwargs)
except NoReverseMatch:
# If this is a callable, re-raise.
if callable(to):
raise
# If this doesn't "feel" like a URL, re-raise.
if '/' not in to and '.' not in to:
raise
# Finally, fall back and assume it's a URL
return to
|
bsd-3-clause
|
hwu25/AppPkg
|
Applications/Python/Python-2.7.2/Lib/distutils/tests/test_cmd.py
|
10
|
4028
|
"""Tests for distutils.cmd."""
import unittest
import os
from test.test_support import captured_stdout, run_unittest
from distutils.cmd import Command
from distutils.dist import Distribution
from distutils.errors import DistutilsOptionError
from distutils import debug
class MyCmd(Command):
def initialize_options(self):
pass
class CommandTestCase(unittest.TestCase):
def setUp(self):
dist = Distribution()
self.cmd = MyCmd(dist)
def test_ensure_string_list(self):
cmd = self.cmd
cmd.not_string_list = ['one', 2, 'three']
cmd.yes_string_list = ['one', 'two', 'three']
cmd.not_string_list2 = object()
cmd.yes_string_list2 = 'ok'
cmd.ensure_string_list('yes_string_list')
cmd.ensure_string_list('yes_string_list2')
self.assertRaises(DistutilsOptionError,
cmd.ensure_string_list, 'not_string_list')
self.assertRaises(DistutilsOptionError,
cmd.ensure_string_list, 'not_string_list2')
def test_make_file(self):
cmd = self.cmd
# making sure it raises when infiles is not a string or a list/tuple
self.assertRaises(TypeError, cmd.make_file,
infiles=1, outfile='', func='func', args=())
# making sure execute gets called properly
def _execute(func, args, exec_msg, level):
self.assertEqual(exec_msg, 'generating out from in')
cmd.force = True
cmd.execute = _execute
cmd.make_file(infiles='in', outfile='out', func='func', args=())
def test_dump_options(self):
msgs = []
def _announce(msg, level):
msgs.append(msg)
cmd = self.cmd
cmd.announce = _announce
cmd.option1 = 1
cmd.option2 = 1
cmd.user_options = [('option1', '', ''), ('option2', '', '')]
cmd.dump_options()
wanted = ["command options for 'MyCmd':", ' option1 = 1',
' option2 = 1']
self.assertEqual(msgs, wanted)
def test_ensure_string(self):
cmd = self.cmd
cmd.option1 = 'ok'
cmd.ensure_string('option1')
cmd.option2 = None
cmd.ensure_string('option2', 'xxx')
self.assertTrue(hasattr(cmd, 'option2'))
cmd.option3 = 1
self.assertRaises(DistutilsOptionError, cmd.ensure_string, 'option3')
def test_ensure_string_list(self):
cmd = self.cmd
cmd.option1 = 'ok,dok'
cmd.ensure_string_list('option1')
self.assertEqual(cmd.option1, ['ok', 'dok'])
cmd.option2 = ['xxx', 'www']
cmd.ensure_string_list('option2')
cmd.option3 = ['ok', 2]
self.assertRaises(DistutilsOptionError, cmd.ensure_string_list,
'option3')
def test_ensure_filename(self):
cmd = self.cmd
cmd.option1 = __file__
cmd.ensure_filename('option1')
cmd.option2 = 'xxx'
self.assertRaises(DistutilsOptionError, cmd.ensure_filename, 'option2')
def test_ensure_dirname(self):
cmd = self.cmd
cmd.option1 = os.path.dirname(__file__) or os.curdir
cmd.ensure_dirname('option1')
cmd.option2 = 'xxx'
self.assertRaises(DistutilsOptionError, cmd.ensure_dirname, 'option2')
def test_debug_print(self):
cmd = self.cmd
with captured_stdout() as stdout:
cmd.debug_print('xxx')
stdout.seek(0)
self.assertEqual(stdout.read(), '')
debug.DEBUG = True
try:
with captured_stdout() as stdout:
cmd.debug_print('xxx')
stdout.seek(0)
self.assertEqual(stdout.read(), 'xxx\n')
finally:
debug.DEBUG = False
def test_suite():
return unittest.makeSuite(CommandTestCase)
if __name__ == '__main__':
run_unittest(test_suite())
|
bsd-2-clause
|
Venturi/oldcms
|
env/lib/python2.7/site-packages/pip/req/req_file.py
|
239
|
9670
|
"""
Requirements file parsing
"""
from __future__ import absolute_import
import os
import re
import shlex
import optparse
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves import filterfalse
import pip
from pip.download import get_file_content
from pip.req.req_install import InstallRequirement
from pip.exceptions import (RequirementsFileParseError)
from pip.utils import normalize_name
from pip import cmdoptions
__all__ = ['parse_requirements']
SCHEME_RE = re.compile(r'^(http|https|file):', re.I)
COMMENT_RE = re.compile(r'(^|\s)+#.*$')
SUPPORTED_OPTIONS = [
cmdoptions.constraints,
cmdoptions.editable,
cmdoptions.requirements,
cmdoptions.no_index,
cmdoptions.index_url,
cmdoptions.find_links,
cmdoptions.extra_index_url,
cmdoptions.allow_external,
cmdoptions.allow_all_external,
cmdoptions.no_allow_external,
cmdoptions.allow_unsafe,
cmdoptions.no_allow_unsafe,
cmdoptions.use_wheel,
cmdoptions.no_use_wheel,
cmdoptions.always_unzip,
cmdoptions.no_binary,
cmdoptions.only_binary,
]
# options to be passed to requirements
SUPPORTED_OPTIONS_REQ = [
cmdoptions.install_options,
cmdoptions.global_options
]
# the 'dest' string values
SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ]
def parse_requirements(filename, finder=None, comes_from=None, options=None,
session=None, constraint=False, wheel_cache=None):
"""Parse a requirements file and yield InstallRequirement instances.
:param filename: Path or url of requirements file.
:param finder: Instance of pip.index.PackageFinder.
:param comes_from: Origin description of requirements.
:param options: Global options.
:param session: Instance of pip.download.PipSession.
:param constraint: If true, parsing a constraint file rather than
requirements file.
:param wheel_cache: Instance of pip.wheel.WheelCache
"""
if session is None:
raise TypeError(
"parse_requirements() missing 1 required keyword argument: "
"'session'"
)
_, content = get_file_content(
filename, comes_from=comes_from, session=session
)
lines = content.splitlines()
lines = ignore_comments(lines)
lines = join_lines(lines)
lines = skip_regex(lines, options)
for line_number, line in enumerate(lines, 1):
req_iter = process_line(line, filename, line_number, finder,
comes_from, options, session, wheel_cache,
constraint=constraint)
for req in req_iter:
yield req
def process_line(line, filename, line_number, finder=None, comes_from=None,
options=None, session=None, wheel_cache=None,
constraint=False):
"""Process a single requirements line; This can result in creating/yielding
requirements, or updating the finder.
For lines that contain requirements, the only options that have an effect
are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
requirement. Other options from SUPPORTED_OPTIONS may be present, but are
ignored.
For lines that do not contain requirements, the only options that have an
effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
be present, but are ignored. These lines may contain multiple options
(although our docs imply only one is supported), and all our parsed and
affect the finder.
:param constraint: If True, parsing a constraints file.
"""
parser = build_parser()
defaults = parser.get_default_values()
defaults.index_url = None
if finder:
# `finder.format_control` will be updated during parsing
defaults.format_control = finder.format_control
args_str, options_str = break_args_options(line)
opts, _ = parser.parse_args(shlex.split(options_str), defaults)
# preserve for the nested code path
line_comes_from = '%s %s (line %s)' % (
'-c' if constraint else '-r', filename, line_number)
# yield a line requirement
if args_str:
isolated = options.isolated_mode if options else False
if options:
cmdoptions.check_install_build_global(options, opts)
# get the options that apply to requirements
req_options = {}
for dest in SUPPORTED_OPTIONS_REQ_DEST:
if dest in opts.__dict__ and opts.__dict__[dest]:
req_options[dest] = opts.__dict__[dest]
yield InstallRequirement.from_line(
args_str, line_comes_from, constraint=constraint,
isolated=isolated, options=req_options, wheel_cache=wheel_cache
)
# yield an editable requirement
elif opts.editables:
isolated = options.isolated_mode if options else False
default_vcs = options.default_vcs if options else None
yield InstallRequirement.from_editable(
opts.editables[0], comes_from=line_comes_from,
constraint=constraint, default_vcs=default_vcs, isolated=isolated,
wheel_cache=wheel_cache
)
# parse a nested requirements file
elif opts.requirements or opts.constraints:
if opts.requirements:
req_path = opts.requirements[0]
nested_constraint = False
else:
req_path = opts.constraints[0]
nested_constraint = True
# original file is over http
if SCHEME_RE.search(filename):
# do a url join so relative paths work
req_path = urllib_parse.urljoin(filename, req_path)
# original file and nested file are paths
elif not SCHEME_RE.search(req_path):
# do a join so relative paths work
req_dir = os.path.dirname(filename)
req_path = os.path.join(os.path.dirname(filename), req_path)
# TODO: Why not use `comes_from='-r {} (line {})'` here as well?
parser = parse_requirements(
req_path, finder, comes_from, options, session,
constraint=nested_constraint, wheel_cache=wheel_cache
)
for req in parser:
yield req
# set finder options
elif finder:
if opts.index_url:
finder.index_urls = [opts.index_url]
if opts.use_wheel is False:
finder.use_wheel = False
pip.index.fmt_ctl_no_use_wheel(finder.format_control)
if opts.no_index is True:
finder.index_urls = []
if opts.allow_all_external:
finder.allow_all_external = opts.allow_all_external
if opts.extra_index_urls:
finder.index_urls.extend(opts.extra_index_urls)
if opts.allow_external:
finder.allow_external |= set(
[normalize_name(v).lower() for v in opts.allow_external])
if opts.allow_unverified:
# Remove after 7.0
finder.allow_unverified |= set(
[normalize_name(v).lower() for v in opts.allow_unverified])
if opts.find_links:
# FIXME: it would be nice to keep track of the source
# of the find_links: support a find-links local path
# relative to a requirements file.
value = opts.find_links[0]
req_dir = os.path.dirname(os.path.abspath(filename))
relative_to_reqs_file = os.path.join(req_dir, value)
if os.path.exists(relative_to_reqs_file):
value = relative_to_reqs_file
finder.find_links.append(value)
def break_args_options(line):
"""Break up the line into an args and options string. We only want to shlex
(and then optparse) the options, not the args. args can contain markers
which are corrupted by shlex.
"""
tokens = line.split(' ')
args = []
options = tokens[:]
for token in tokens:
if token.startswith('-') or token.startswith('--'):
break
else:
args.append(token)
options.pop(0)
return ' '.join(args), ' '.join(options)
def build_parser():
"""
Return a parser for parsing requirement lines
"""
parser = optparse.OptionParser(add_help_option=False)
option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
for option_factory in option_factories:
option = option_factory()
parser.add_option(option)
# By default optparse sys.exits on parsing errors. We want to wrap
# that in our own exception.
def parser_exit(self, msg):
raise RequirementsFileParseError(msg)
parser.exit = parser_exit
return parser
def join_lines(iterator):
"""
Joins a line ending in '\' with the previous line.
"""
lines = []
for line in iterator:
if not line.endswith('\\'):
if lines:
lines.append(line)
yield ''.join(lines)
lines = []
else:
yield line
else:
lines.append(line.strip('\\'))
# TODO: handle space after '\'.
# TODO: handle '\' on last line.
def ignore_comments(iterator):
"""
Strips and filters empty or commented lines.
"""
for line in iterator:
line = COMMENT_RE.sub('', line)
line = line.strip()
if line:
yield line
def skip_regex(lines, options):
"""
Optionally exclude lines that match '--skip-requirements-regex'
"""
skip_regex = options.skip_requirements_regex if options else None
if skip_regex:
lines = filterfalse(re.compile(skip_regex).search, lines)
return lines
|
apache-2.0
|
ebukoz/thrive
|
erpnext/patches/v8_0/update_production_orders.py
|
16
|
1606
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
# reload schema
for doctype in ("Work Order", "Work Order Item", "Work Order Operation",
"BOM Item", "BOM Explosion Item", "BOM"):
frappe.reload_doctype(doctype)
frappe.reload_doc("stock", "doctype", "item")
frappe.reload_doc("stock", "doctype", "item_default")
# fetch all draft and submitted work orders
fields = ["name"]
if "source_warehouse" in frappe.db.get_table_columns("Work Order"):
fields.append("source_warehouse")
wo_orders = frappe.get_all("Work Order", filters={"docstatus": ["!=", 2]}, fields=fields)
count = 0
for p in wo_orders:
wo_order = frappe.get_doc("Work Order", p.name)
count += 1
# set required items table
wo_order.set_required_items()
for item in wo_order.get("required_items"):
# set source warehouse based on parent
if not item.source_warehouse and "source_warehouse" in fields:
item.source_warehouse = wo_order.get("source_warehouse")
item.db_update()
if wo_order.docstatus == 1:
# update transferred qty based on Stock Entry, it also updates db
wo_order.update_transaferred_qty_for_required_items()
# Set status where it was 'Unstopped', as it is deprecated
if wo_order.status == "Unstopped":
status = wo_order.get_status()
wo_order.db_set("status", status)
elif wo_order.status == "Stopped":
wo_order.update_reserved_qty_for_production()
if count % 200 == 0:
frappe.db.commit()
|
gpl-3.0
|
tensorflow/tensorflow
|
tensorflow/python/kernel_tests/conv2d_transpose_test.py
|
9
|
12650
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for convolution related functionality in tensorflow.ops.nn."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
class Conv2DTransposeTest(test.TestCase):
def testConv2DTransposeSingleStride(self):
with self.cached_session():
for dtype in (dtypes.float32, dtypes.int32):
strides = [1, 1, 1, 1]
# Input, output: [batch, height, width, depth]
x_shape = [2, 6, 4, 3]
y_shape = [2, 6, 4, 2]
# Filter: [kernel_height, kernel_width, output_depth, input_depth]
f_shape = [3, 3, 2, 3]
x = constant_op.constant(1, shape=x_shape, name="x", dtype=dtype)
f = constant_op.constant(1, shape=f_shape, name="filter", dtype=dtype)
output = nn_ops.conv2d_transpose(
x, f, y_shape, strides=strides, padding="SAME")
value = self.evaluate(output)
# We count the number of cells being added at the locations in the
# output.
# At the center, #cells=kernel_height * kernel_width
# At the corners, #cells=ceil(kernel_height/2) * ceil(kernel_width/2)
# At the borders, #cells=ceil(kernel_height/2)*kernel_width or
# kernel_height * ceil(kernel_width/2)
for n in xrange(x_shape[0]):
for k in xrange(f_shape[2]):
for w in xrange(y_shape[2]):
for h in xrange(y_shape[1]):
target = 4 * 3
h_in = h > 0 and h < y_shape[1] - 1
w_in = w > 0 and w < y_shape[2] - 1
if h_in and w_in:
target += 5 * 3
elif h_in or w_in:
target += 2 * 3
if dtype.is_integer:
self.assertAllEqual(target, value[n, h, w, k])
else:
self.assertAllClose(target, value[n, h, w, k])
def testConv2DTransposeSame(self):
with self.cached_session():
for dtype in (dtypes.float32, dtypes.int32):
strides = [1, 2, 2, 1]
# Input, output: [batch, height, width, depth]
x_shape = [2, 6, 4, 3]
y_shape = [2, 12, 8, 2]
# Filter: [kernel_height, kernel_width, output_depth, input_depth]
f_shape = [3, 3, 2, 3]
x = constant_op.constant(1, shape=x_shape, name="x", dtype=dtype)
f = constant_op.constant(1, shape=f_shape, name="filter", dtype=dtype)
output = nn_ops.conv2d_transpose(
x, f, y_shape, strides=strides, padding="SAME")
value = self.evaluate(output)
for n in xrange(x_shape[0]):
for k in xrange(f_shape[2]):
for w in xrange(y_shape[2]):
for h in xrange(y_shape[1]):
target = 3
# We add a case for locations divisible by the stride.
h_in = h % strides[1] == 0 and h > 0 and h < y_shape[1] - 1
w_in = w % strides[2] == 0 and w > 0 and w < y_shape[2] - 1
if h_in and w_in:
target += 9
elif h_in or w_in:
target += 3
if dtype.is_integer:
self.assertAllEqual(target, value[n, h, w, k])
else:
self.assertAllClose(target, value[n, h, w, k])
def testConv2DTransposeValid(self):
with self.cached_session():
for dtype in (dtypes.float32, dtypes.int32):
strides = [1, 2, 2, 1]
# Input, output: [batch, height, width, depth]
x_shape = [2, 6, 4, 3]
y_shape = [2, 13, 9, 2]
# Filter: [kernel_height, kernel_width, output_depth, input_depth]
f_shape = [3, 3, 2, 3]
x = constant_op.constant(1, shape=x_shape, name="x", dtype=dtype)
f = constant_op.constant(1, shape=f_shape, name="filter", dtype=dtype)
output = nn_ops.conv2d_transpose(
x, f, y_shape, strides=strides, padding="VALID")
value = self.evaluate(output)
cache_values = np.zeros(y_shape, dtype=np.float32)
# The amount of padding added
pad = 1
for n in xrange(x_shape[0]):
for k in xrange(f_shape[2]):
for w in xrange(pad, y_shape[2] - pad):
for h in xrange(pad, y_shape[1] - pad):
target = 3
# We add a case for locations divisible by the stride.
h_in = h % strides[1] == 0 and h > pad and h < y_shape[
1] - 1 - pad
w_in = w % strides[2] == 0 and w > pad and w < y_shape[
2] - 1 - pad
if h_in and w_in:
target += 9
elif h_in or w_in:
target += 3
cache_values[n, h, w, k] = target
# copy values in the border
cache_values[n, :, 0, k] = cache_values[n, :, 1, k]
cache_values[n, :, -1, k] = cache_values[n, :, -2, k]
cache_values[n, 0, :, k] = cache_values[n, 1, :, k]
cache_values[n, -1, :, k] = cache_values[n, -2, :, k]
if dtype.is_integer:
self.assertAllEqual(cache_values, value)
else:
self.assertAllClose(cache_values, value)
@test_util.run_deprecated_v1
def testGradient(self):
x_shape = [2, 6, 4, 3]
f_shape = [3, 3, 2, 3]
y_shape = [2, 12, 8, 2]
strides = [1, 2, 2, 1]
np.random.seed(1) # Make it reproducible.
x_val = np.random.random_sample(x_shape).astype(np.float64)
f_val = np.random.random_sample(f_shape).astype(np.float64)
with self.cached_session():
x = constant_op.constant(x_val, name="x", dtype=dtypes.float32)
f = constant_op.constant(f_val, name="f", dtype=dtypes.float32)
output = nn_ops.conv2d_transpose(
x, f, y_shape, strides=strides, padding="SAME")
err = gradient_checker.compute_gradient_error([x, f], [x_shape, f_shape],
output, y_shape)
print("conv2d_transpose gradient err = %g " % err)
err_tolerance = 0.0005
self.assertLess(err, err_tolerance)
def testConv2DTransposeSingleStrideNCHW(self):
# `NCHW` data format is only supported for CUDA device.
if test.is_gpu_available(cuda_only=True):
with self.session():
strides = [1, 1, 1, 1]
# Input, output: [batch, depth, height, width, depth]
x_shape = [2, 3, 6, 4]
y_shape = [2, 2, 6, 4]
# Filter: [kernel_height, kernel_width, output_depth, input_depth]
f_shape = [3, 3, 2, 3]
x = constant_op.constant(
1.0, shape=x_shape, name="x", dtype=dtypes.float32)
f = constant_op.constant(
1.0, shape=f_shape, name="filter", dtype=dtypes.float32)
output = nn_ops.conv2d_transpose(
x, f, y_shape, strides=strides, padding="SAME", data_format="NCHW")
value = self.evaluate(output)
for n in xrange(x_shape[0]):
for k in xrange(f_shape[2]):
for w in xrange(y_shape[3]):
for h in xrange(y_shape[2]):
target = 4 * 3.0
h_in = h > 0 and h < y_shape[2] - 1
w_in = w > 0 and w < y_shape[3] - 1
if h_in and w_in:
target += 5 * 3.0
elif h_in or w_in:
target += 2 * 3.0
self.assertAllClose(target, value[n, k, h, w])
def testConv2DTransposeSameNCHW(self):
# `NCHW` data format is only supported for CUDA device.
if test.is_gpu_available(cuda_only=True):
with self.session():
strides = [1, 1, 2, 2]
# Input, output: [batch, depth, height, width]
x_shape = [2, 3, 6, 4]
y_shape = [2, 2, 12, 8]
# Filter: [kernel_height, kernel_width, output_depth, input_depth]
f_shape = [3, 3, 2, 3]
x = constant_op.constant(
1.0, shape=x_shape, name="x", dtype=dtypes.float32)
f = constant_op.constant(
1.0, shape=f_shape, name="filter", dtype=dtypes.float32)
output = nn_ops.conv2d_transpose(
x, f, y_shape, strides=strides, padding="SAME", data_format="NCHW")
value = self.evaluate(output)
for n in xrange(x_shape[0]):
for k in xrange(f_shape[2]):
for w in xrange(y_shape[3]):
for h in xrange(y_shape[2]):
target = 3.0
# We add a case for locations divisible by the stride.
h_in = h % strides[2] == 0 and h > 0 and h < y_shape[2] - 1
w_in = w % strides[3] == 0 and w > 0 and w < y_shape[3] - 1
if h_in and w_in:
target += 9.0
elif h_in or w_in:
target += 3.0
self.assertAllClose(target, value[n, k, h, w])
def testConv2DTransposeValidNCHW(self):
# `NCHW` data format is only supported for CUDA device.
if test.is_gpu_available(cuda_only=True):
with self.session():
strides = [1, 1, 2, 2]
# Input, output: [batch, depth, height, width]
x_shape = [2, 3, 6, 4]
y_shape = [2, 2, 13, 9]
# Filter: [kernel_height, kernel_width, output_depth, input_depth]
f_shape = [3, 3, 2, 3]
x = constant_op.constant(
1.0, shape=x_shape, name="x", dtype=dtypes.float32)
f = constant_op.constant(
1.0, shape=f_shape, name="filter", dtype=dtypes.float32)
output = nn_ops.conv2d_transpose(
x, f, y_shape, strides=strides, padding="VALID", data_format="NCHW")
value = self.evaluate(output)
cache_values = np.zeros(y_shape, dtype=np.float32)
# The amount of padding added
pad = 1
for n in xrange(x_shape[0]):
for k in xrange(f_shape[2]):
for w in xrange(pad, y_shape[3] - pad):
for h in xrange(pad, y_shape[2] - pad):
target = 3.0
# We add a case for locations divisible by the stride.
h_in = h % strides[2] == 0 and h > pad and h < y_shape[
2] - 1 - pad
w_in = w % strides[3] == 0 and w > pad and w < y_shape[
3] - 1 - pad
if h_in and w_in:
target += 9.0
elif h_in or w_in:
target += 3.0
cache_values[n, k, h, w] = target
# copy values in the border
cache_values[n, k, :, 0] = cache_values[n, k, :, 1]
cache_values[n, k, :, -1] = cache_values[n, k, :, -2]
cache_values[n, k, 0, :] = cache_values[n, k, 1, :]
cache_values[n, k, -1, :] = cache_values[n, k, -2, :]
self.assertAllClose(cache_values, value)
def testConv2DTransposeShapeInference(self):
# Test case for 8972
initializer = random_ops.truncated_normal(
[3, 3, 5, 1], mean=0.0, stddev=0.01, dtype=dtypes.float32)
x = variables.Variable(random_ops.random_normal([3, 10, 5, 1]))
f = variable_scope.get_variable("f", initializer=initializer)
f_shape = array_ops.stack([array_ops.shape(x)[0], 10, 5, 5])
output = nn_ops.conv2d_transpose(
x, f, f_shape, strides=[1, 1, 1, 1], padding="SAME")
self.assertEqual(output.get_shape().as_list(), [3, 10, 5, 5])
if __name__ == "__main__":
test.main()
|
apache-2.0
|
tuomassalo/vr-no-flash
|
parse-coachmap.py
|
1
|
2371
|
#!/usr/bin/env python
"""
Read coachmap.swf and export all coaches to dist/coaches/*.{svg,js}.
"""
from swf.movie import SWF
from swf.export import SVGExporter, SingleShapeSVGExporterMixin, FrameSVGExporterMixin, NamesSVGExporterMixin
from swf.tag import TagPlaceObject, TagDefineShape, TagDefineSprite, TagFrameLabel
from subprocess import call
import json
print "Parsing..."
swf = SWF(open('coachmap.swf'))
# NB: the order of these mixins matter.
class CoachExporter(SingleShapeSVGExporterMixin, FrameSVGExporterMixin, NamesSVGExporterMixin, SVGExporter):
pass
exporter = CoachExporter()
# 1) Find the PlaceObject tag "floorplan".
placeobject = [x for x in swf.all_tags_of_type(TagPlaceObject) if x.instanceName == 'floorplan'][0]
# 2) Find corresponding DefineSprite.
sprite = [x for x in swf.all_tags_of_type((TagDefineShape, TagDefineSprite)) if x.characterId == placeobject.characterId][0]
# 3) Remove background (id=362) to get a tight viewbox for each coach type.
sprite.tags = [t for t in sprite.tags if not hasattr(t, 'characterId') or t.characterId != 362]
# 4) Remove filter from placeobject so there's something to see.
placeobject.colorTransform = None
placeobject.hasColorTransform = False
coaches = list(sprite.all_tags_of_type(TagFrameLabel))
# 5) For all coaches:
# - export to dist/coaches/*.svg
# - optimize (in-place)
# - wrap in dist/coaches/*.js
for (frame_idx, coach_type) in enumerate([l.frameName for l in coaches]):
# To test a single coach type:
# if coach_type != 'A40':
# continue
print "Converting frame %d/%d: %s" % (frame_idx, len(coaches)-1, coach_type)
svg = exporter.export(swf, shape=sprite, frame=frame_idx)
svg_filename = "dist/coaches/%s.svg" % coach_type
out = open(svg_filename, "w")
out.write(svg.read())
out.close()
# optimize:
# convertPathData would halve the filesize, but is buggy, see https://github.com/svg/svgo/issues/483
# call(["node_modules/svgo/bin/svgo", "--disable", "convertPathData", "--quiet", svg_filename])
call(["node_modules/svgo/bin/svgo", "--quiet", svg_filename])
with open(svg_filename, 'r') as final_svg:
js_contents = "nfDefineCoach(%s)" % json.dumps(dict(type=coach_type, svg=final_svg.read()))
js_filename = svg_filename.replace('.svg', '.js')
out_js = open(js_filename, "w")
out_js.write(js_contents)
out_js.close()
|
mit
|
perezg/infoxchange
|
BASE/lib/python2.7/site-packages/django/contrib/admin/options.py
|
24
|
65679
|
import copy
from functools import update_wrapper, partial
import warnings
from django import forms
from django.conf import settings
from django.forms.formsets import all_valid
from django.forms.models import (modelform_factory, modelformset_factory,
inlineformset_factory, BaseInlineFormSet)
from django.contrib.contenttypes.models import ContentType
from django.contrib.admin import widgets, helpers
from django.contrib.admin.util import unquote, flatten_fieldsets, get_deleted_objects, model_format_dict
from django.contrib.admin.templatetags.admin_static import static
from django.contrib import messages
from django.views.decorators.csrf import csrf_protect
from django.core.exceptions import PermissionDenied, ValidationError
from django.core.paginator import Paginator
from django.core.urlresolvers import reverse
from django.db import models, transaction, router
from django.db.models.constants import LOOKUP_SEP
from django.db.models.related import RelatedObject
from django.db.models.fields import BLANK_CHOICE_DASH, FieldDoesNotExist
from django.db.models.sql.constants import QUERY_TERMS
from django.http import Http404, HttpResponse, HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.template.response import SimpleTemplateResponse, TemplateResponse
from django.utils.decorators import method_decorator
from django.utils.datastructures import SortedDict
from django.utils.html import escape, escapejs
from django.utils.safestring import mark_safe
from django.utils import six
from django.utils.text import capfirst, get_text_list
from django.utils.translation import ugettext as _
from django.utils.translation import ungettext
from django.utils.encoding import force_text
HORIZONTAL, VERTICAL = 1, 2
# returns the <ul> class for a given radio_admin field
get_ul_class = lambda x: 'radiolist%s' % ((x == HORIZONTAL) and ' inline' or '')
class IncorrectLookupParameters(Exception):
pass
# Defaults for formfield_overrides. ModelAdmin subclasses can change this
# by adding to ModelAdmin.formfield_overrides.
FORMFIELD_FOR_DBFIELD_DEFAULTS = {
models.DateTimeField: {
'form_class': forms.SplitDateTimeField,
'widget': widgets.AdminSplitDateTime
},
models.DateField: {'widget': widgets.AdminDateWidget},
models.TimeField: {'widget': widgets.AdminTimeWidget},
models.TextField: {'widget': widgets.AdminTextareaWidget},
models.URLField: {'widget': widgets.AdminURLFieldWidget},
models.IntegerField: {'widget': widgets.AdminIntegerFieldWidget},
models.BigIntegerField: {'widget': widgets.AdminBigIntegerFieldWidget},
models.CharField: {'widget': widgets.AdminTextInputWidget},
models.ImageField: {'widget': widgets.AdminFileWidget},
models.FileField: {'widget': widgets.AdminFileWidget},
}
csrf_protect_m = method_decorator(csrf_protect)
class BaseModelAdmin(six.with_metaclass(forms.MediaDefiningClass)):
"""Functionality common to both ModelAdmin and InlineAdmin."""
raw_id_fields = ()
fields = None
exclude = None
fieldsets = None
form = forms.ModelForm
filter_vertical = ()
filter_horizontal = ()
radio_fields = {}
prepopulated_fields = {}
formfield_overrides = {}
readonly_fields = ()
ordering = None
def __init__(self):
overrides = FORMFIELD_FOR_DBFIELD_DEFAULTS.copy()
overrides.update(self.formfield_overrides)
self.formfield_overrides = overrides
def formfield_for_dbfield(self, db_field, **kwargs):
"""
Hook for specifying the form Field instance for a given database Field
instance.
If kwargs are given, they're passed to the form Field's constructor.
"""
request = kwargs.pop("request", None)
# If the field specifies choices, we don't need to look for special
# admin widgets - we just need to use a select widget of some kind.
if db_field.choices:
return self.formfield_for_choice_field(db_field, request, **kwargs)
# ForeignKey or ManyToManyFields
if isinstance(db_field, (models.ForeignKey, models.ManyToManyField)):
# Combine the field kwargs with any options for formfield_overrides.
# Make sure the passed in **kwargs override anything in
# formfield_overrides because **kwargs is more specific, and should
# always win.
if db_field.__class__ in self.formfield_overrides:
kwargs = dict(self.formfield_overrides[db_field.__class__], **kwargs)
# Get the correct formfield.
if isinstance(db_field, models.ForeignKey):
formfield = self.formfield_for_foreignkey(db_field, request, **kwargs)
elif isinstance(db_field, models.ManyToManyField):
formfield = self.formfield_for_manytomany(db_field, request, **kwargs)
# For non-raw_id fields, wrap the widget with a wrapper that adds
# extra HTML -- the "add other" interface -- to the end of the
# rendered output. formfield can be None if it came from a
# OneToOneField with parent_link=True or a M2M intermediary.
if formfield and db_field.name not in self.raw_id_fields:
related_modeladmin = self.admin_site._registry.get(
db_field.rel.to)
can_add_related = bool(related_modeladmin and
related_modeladmin.has_add_permission(request))
formfield.widget = widgets.RelatedFieldWidgetWrapper(
formfield.widget, db_field.rel, self.admin_site,
can_add_related=can_add_related)
return formfield
# If we've got overrides for the formfield defined, use 'em. **kwargs
# passed to formfield_for_dbfield override the defaults.
for klass in db_field.__class__.mro():
if klass in self.formfield_overrides:
kwargs = dict(copy.deepcopy(self.formfield_overrides[klass]), **kwargs)
return db_field.formfield(**kwargs)
# For any other type of field, just call its formfield() method.
return db_field.formfield(**kwargs)
def formfield_for_choice_field(self, db_field, request=None, **kwargs):
"""
Get a form Field for a database Field that has declared choices.
"""
# If the field is named as a radio_field, use a RadioSelect
if db_field.name in self.radio_fields:
# Avoid stomping on custom widget/choices arguments.
if 'widget' not in kwargs:
kwargs['widget'] = widgets.AdminRadioSelect(attrs={
'class': get_ul_class(self.radio_fields[db_field.name]),
})
if 'choices' not in kwargs:
kwargs['choices'] = db_field.get_choices(
include_blank=db_field.blank,
blank_choice=[('', _('None'))]
)
return db_field.formfield(**kwargs)
def formfield_for_foreignkey(self, db_field, request=None, **kwargs):
"""
Get a form Field for a ForeignKey.
"""
db = kwargs.get('using')
if db_field.name in self.raw_id_fields:
kwargs['widget'] = widgets.ForeignKeyRawIdWidget(db_field.rel,
self.admin_site, using=db)
elif db_field.name in self.radio_fields:
kwargs['widget'] = widgets.AdminRadioSelect(attrs={
'class': get_ul_class(self.radio_fields[db_field.name]),
})
kwargs['empty_label'] = db_field.blank and _('None') or None
return db_field.formfield(**kwargs)
def formfield_for_manytomany(self, db_field, request=None, **kwargs):
"""
Get a form Field for a ManyToManyField.
"""
# If it uses an intermediary model that isn't auto created, don't show
# a field in admin.
if not db_field.rel.through._meta.auto_created:
return None
db = kwargs.get('using')
if db_field.name in self.raw_id_fields:
kwargs['widget'] = widgets.ManyToManyRawIdWidget(db_field.rel,
self.admin_site, using=db)
kwargs['help_text'] = ''
elif db_field.name in (list(self.filter_vertical) + list(self.filter_horizontal)):
kwargs['widget'] = widgets.FilteredSelectMultiple(db_field.verbose_name, (db_field.name in self.filter_vertical))
return db_field.formfield(**kwargs)
def _declared_fieldsets(self):
if self.fieldsets:
return self.fieldsets
elif self.fields:
return [(None, {'fields': self.fields})]
return None
declared_fieldsets = property(_declared_fieldsets)
def get_ordering(self, request):
"""
Hook for specifying field ordering.
"""
return self.ordering or () # otherwise we might try to *None, which is bad ;)
def get_readonly_fields(self, request, obj=None):
"""
Hook for specifying custom readonly fields.
"""
return self.readonly_fields
def get_prepopulated_fields(self, request, obj=None):
"""
Hook for specifying custom prepopulated fields.
"""
return self.prepopulated_fields
def queryset(self, request):
"""
Returns a QuerySet of all model instances that can be edited by the
admin site. This is used by changelist_view.
"""
qs = self.model._default_manager.get_query_set()
# TODO: this should be handled by some parameter to the ChangeList.
ordering = self.get_ordering(request)
if ordering:
qs = qs.order_by(*ordering)
return qs
def lookup_allowed(self, lookup, value):
model = self.model
# Check FKey lookups that are allowed, so that popups produced by
# ForeignKeyRawIdWidget, on the basis of ForeignKey.limit_choices_to,
# are allowed to work.
for l in model._meta.related_fkey_lookups:
for k, v in widgets.url_params_from_lookup_dict(l).items():
if k == lookup and v == value:
return True
parts = lookup.split(LOOKUP_SEP)
# Last term in lookup is a query term (__exact, __startswith etc)
# This term can be ignored.
if len(parts) > 1 and parts[-1] in QUERY_TERMS:
parts.pop()
# Special case -- foo__id__exact and foo__id queries are implied
# if foo has been specificially included in the lookup list; so
# drop __id if it is the last part. However, first we need to find
# the pk attribute name.
rel_name = None
for part in parts[:-1]:
try:
field, _, _, _ = model._meta.get_field_by_name(part)
except FieldDoesNotExist:
# Lookups on non-existants fields are ok, since they're ignored
# later.
return True
if hasattr(field, 'rel'):
model = field.rel.to
rel_name = field.rel.get_related_field().name
elif isinstance(field, RelatedObject):
model = field.model
rel_name = model._meta.pk.name
else:
rel_name = None
if rel_name and len(parts) > 1 and parts[-1] == rel_name:
parts.pop()
if len(parts) == 1:
return True
clean_lookup = LOOKUP_SEP.join(parts)
return clean_lookup in self.list_filter or clean_lookup == self.date_hierarchy
def has_add_permission(self, request):
"""
Returns True if the given request has permission to add an object.
Can be overriden by the user in subclasses.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_add_permission())
def has_change_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overriden by the user in subclasses. In such case it should
return True if the given request has permission to change the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to change *any* object of the given type.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_change_permission())
def has_delete_permission(self, request, obj=None):
"""
Returns True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overriden by the user in subclasses. In such case it should
return True if the given request has permission to delete the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to delete *any* object of the given type.
"""
opts = self.opts
return request.user.has_perm(opts.app_label + '.' + opts.get_delete_permission())
class ModelAdmin(BaseModelAdmin):
"Encapsulates all admin options and functionality for a given model."
list_display = ('__str__',)
list_display_links = ()
list_filter = ()
list_select_related = False
list_per_page = 100
list_max_show_all = 200
list_editable = ()
search_fields = ()
date_hierarchy = None
save_as = False
save_on_top = False
paginator = Paginator
inlines = []
# Custom templates (designed to be over-ridden in subclasses)
add_form_template = None
change_form_template = None
change_list_template = None
delete_confirmation_template = None
delete_selected_confirmation_template = None
object_history_template = None
# Actions
actions = []
action_form = helpers.ActionForm
actions_on_top = True
actions_on_bottom = False
actions_selection_counter = True
def __init__(self, model, admin_site):
self.model = model
self.opts = model._meta
self.admin_site = admin_site
super(ModelAdmin, self).__init__()
def get_inline_instances(self, request, obj=None):
inline_instances = []
for inline_class in self.inlines:
inline = inline_class(self.model, self.admin_site)
if request:
if not (inline.has_add_permission(request) or
inline.has_change_permission(request, obj) or
inline.has_delete_permission(request, obj)):
continue
if not inline.has_add_permission(request):
inline.max_num = 0
inline_instances.append(inline)
return inline_instances
def get_urls(self):
from django.conf.urls import patterns, url
def wrap(view):
def wrapper(*args, **kwargs):
return self.admin_site.admin_view(view)(*args, **kwargs)
return update_wrapper(wrapper, view)
info = self.model._meta.app_label, self.model._meta.module_name
urlpatterns = patterns('',
url(r'^$',
wrap(self.changelist_view),
name='%s_%s_changelist' % info),
url(r'^add/$',
wrap(self.add_view),
name='%s_%s_add' % info),
url(r'^(.+)/history/$',
wrap(self.history_view),
name='%s_%s_history' % info),
url(r'^(.+)/delete/$',
wrap(self.delete_view),
name='%s_%s_delete' % info),
url(r'^(.+)/$',
wrap(self.change_view),
name='%s_%s_change' % info),
)
return urlpatterns
def urls(self):
return self.get_urls()
urls = property(urls)
@property
def media(self):
extra = '' if settings.DEBUG else '.min'
js = [
'core.js',
'admin/RelatedObjectLookups.js',
'jquery%s.js' % extra,
'jquery.init.js'
]
if self.actions is not None:
js.append('actions%s.js' % extra)
if self.prepopulated_fields:
js.extend(['urlify.js', 'prepopulate%s.js' % extra])
if self.opts.get_ordered_objects():
js.extend(['getElementsBySelector.js', 'dom-drag.js' , 'admin/ordering.js'])
return forms.Media(js=[static('admin/js/%s' % url) for url in js])
def get_model_perms(self, request):
"""
Returns a dict of all perms for this model. This dict has the keys
``add``, ``change``, and ``delete`` mapping to the True/False for each
of those actions.
"""
return {
'add': self.has_add_permission(request),
'change': self.has_change_permission(request),
'delete': self.has_delete_permission(request),
}
def get_fieldsets(self, request, obj=None):
"Hook for specifying fieldsets for the add form."
if self.declared_fieldsets:
return self.declared_fieldsets
form = self.get_form(request, obj)
fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj))
return [(None, {'fields': fields})]
def get_form(self, request, obj=None, **kwargs):
"""
Returns a Form class for use in the admin add view. This is used by
add_view and change_view.
"""
if self.declared_fieldsets:
fields = flatten_fieldsets(self.declared_fieldsets)
else:
fields = None
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
exclude.extend(self.get_readonly_fields(request, obj))
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# ModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
# if exclude is an empty list we pass None to be consistant with the
# default on modelform_factory
exclude = exclude or None
defaults = {
"form": self.form,
"fields": fields,
"exclude": exclude,
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
return modelform_factory(self.model, **defaults)
def get_changelist(self, request, **kwargs):
"""
Returns the ChangeList class for use on the changelist page.
"""
from django.contrib.admin.views.main import ChangeList
return ChangeList
def get_object(self, request, object_id):
"""
Returns an instance matching the primary key provided. ``None`` is
returned if no match is found (or the object_id failed validation
against the primary key field).
"""
queryset = self.queryset(request)
model = queryset.model
try:
object_id = model._meta.pk.to_python(object_id)
return queryset.get(pk=object_id)
except (model.DoesNotExist, ValidationError):
return None
def get_changelist_form(self, request, **kwargs):
"""
Returns a Form class for use in the Formset on the changelist page.
"""
defaults = {
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
return modelform_factory(self.model, **defaults)
def get_changelist_formset(self, request, **kwargs):
"""
Returns a FormSet class for use on the changelist page if list_editable
is used.
"""
defaults = {
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
}
defaults.update(kwargs)
return modelformset_factory(self.model,
self.get_changelist_form(request), extra=0,
fields=self.list_editable, **defaults)
def get_formsets(self, request, obj=None):
for inline in self.get_inline_instances(request, obj):
yield inline.get_formset(request, obj)
def get_paginator(self, request, queryset, per_page, orphans=0, allow_empty_first_page=True):
return self.paginator(queryset, per_page, orphans, allow_empty_first_page)
def log_addition(self, request, object):
"""
Log that an object has been successfully added.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, ADDITION
LogEntry.objects.log_action(
user_id = request.user.pk,
content_type_id = ContentType.objects.get_for_model(object).pk,
object_id = object.pk,
object_repr = force_text(object),
action_flag = ADDITION
)
def log_change(self, request, object, message):
"""
Log that an object has been successfully changed.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, CHANGE
LogEntry.objects.log_action(
user_id = request.user.pk,
content_type_id = ContentType.objects.get_for_model(object).pk,
object_id = object.pk,
object_repr = force_text(object),
action_flag = CHANGE,
change_message = message
)
def log_deletion(self, request, object, object_repr):
"""
Log that an object will be deleted. Note that this method is called
before the deletion.
The default implementation creates an admin LogEntry object.
"""
from django.contrib.admin.models import LogEntry, DELETION
LogEntry.objects.log_action(
user_id = request.user.pk,
content_type_id = ContentType.objects.get_for_model(self.model).pk,
object_id = object.pk,
object_repr = object_repr,
action_flag = DELETION
)
def action_checkbox(self, obj):
"""
A list_display column containing a checkbox widget.
"""
return helpers.checkbox.render(helpers.ACTION_CHECKBOX_NAME, force_text(obj.pk))
action_checkbox.short_description = mark_safe('<input type="checkbox" id="action-toggle" />')
action_checkbox.allow_tags = True
def get_actions(self, request):
"""
Return a dictionary mapping the names of all actions for this
ModelAdmin to a tuple of (callable, name, description) for each action.
"""
# If self.actions is explicitally set to None that means that we don't
# want *any* actions enabled on this page.
from django.contrib.admin.views.main import IS_POPUP_VAR
if self.actions is None or IS_POPUP_VAR in request.GET:
return SortedDict()
actions = []
# Gather actions from the admin site first
for (name, func) in self.admin_site.actions:
description = getattr(func, 'short_description', name.replace('_', ' '))
actions.append((func, name, description))
# Then gather them from the model admin and all parent classes,
# starting with self and working back up.
for klass in self.__class__.mro()[::-1]:
class_actions = getattr(klass, 'actions', [])
# Avoid trying to iterate over None
if not class_actions:
continue
actions.extend([self.get_action(action) for action in class_actions])
# get_action might have returned None, so filter any of those out.
actions = filter(None, actions)
# Convert the actions into a SortedDict keyed by name.
actions = SortedDict([
(name, (func, name, desc))
for func, name, desc in actions
])
return actions
def get_action_choices(self, request, default_choices=BLANK_CHOICE_DASH):
"""
Return a list of choices for use in a form object. Each choice is a
tuple (name, description).
"""
choices = [] + default_choices
for func, name, description in six.itervalues(self.get_actions(request)):
choice = (name, description % model_format_dict(self.opts))
choices.append(choice)
return choices
def get_action(self, action):
"""
Return a given action from a parameter, which can either be a callable,
or the name of a method on the ModelAdmin. Return is a tuple of
(callable, name, description).
"""
# If the action is a callable, just use it.
if callable(action):
func = action
action = action.__name__
# Next, look for a method. Grab it off self.__class__ to get an unbound
# method instead of a bound one; this ensures that the calling
# conventions are the same for functions and methods.
elif hasattr(self.__class__, action):
func = getattr(self.__class__, action)
# Finally, look for a named method on the admin site
else:
try:
func = self.admin_site.get_action(action)
except KeyError:
return None
if hasattr(func, 'short_description'):
description = func.short_description
else:
description = capfirst(action.replace('_', ' '))
return func, action, description
def get_list_display(self, request):
"""
Return a sequence containing the fields to be displayed on the
changelist.
"""
return self.list_display
def get_list_display_links(self, request, list_display):
"""
Return a sequence containing the fields to be displayed as links
on the changelist. The list_display parameter is the list of fields
returned by get_list_display().
"""
if self.list_display_links or not list_display:
return self.list_display_links
else:
# Use only the first item in list_display as link
return list(list_display)[:1]
def get_list_filter(self, request):
"""
Returns a sequence containing the fields to be displayed as filters in
the right sidebar of the changelist page.
"""
return self.list_filter
def construct_change_message(self, request, form, formsets):
"""
Construct a change message from a changed object.
"""
change_message = []
if form.changed_data:
change_message.append(_('Changed %s.') % get_text_list(form.changed_data, _('and')))
if formsets:
for formset in formsets:
for added_object in formset.new_objects:
change_message.append(_('Added %(name)s "%(object)s".')
% {'name': force_text(added_object._meta.verbose_name),
'object': force_text(added_object)})
for changed_object, changed_fields in formset.changed_objects:
change_message.append(_('Changed %(list)s for %(name)s "%(object)s".')
% {'list': get_text_list(changed_fields, _('and')),
'name': force_text(changed_object._meta.verbose_name),
'object': force_text(changed_object)})
for deleted_object in formset.deleted_objects:
change_message.append(_('Deleted %(name)s "%(object)s".')
% {'name': force_text(deleted_object._meta.verbose_name),
'object': force_text(deleted_object)})
change_message = ' '.join(change_message)
return change_message or _('No fields changed.')
def message_user(self, request, message, level=messages.INFO, extra_tags='',
fail_silently=False):
"""
Send a message to the user. The default implementation
posts a message using the django.contrib.messages backend.
Exposes almost the same API as messages.add_message(), but accepts the
positional arguments in a different order to maintain backwards
compatibility. For convenience, it accepts the `level` argument as
a string rather than the usual level number.
"""
if not isinstance(level, int):
# attempt to get the level if passed a string
try:
level = getattr(messages.constants, level.upper())
except AttributeError:
levels = messages.constants.DEFAULT_TAGS.values()
levels_repr = ', '.join('`%s`' % l for l in levels)
raise ValueError('Bad message level string: `%s`. '
'Possible values are: %s' % (level, levels_repr))
messages.add_message(request, level, message, extra_tags=extra_tags,
fail_silently=fail_silently)
def save_form(self, request, form, change):
"""
Given a ModelForm return an unsaved instance. ``change`` is True if
the object is being changed, and False if it's being added.
"""
return form.save(commit=False)
def save_model(self, request, obj, form, change):
"""
Given a model instance save it to the database.
"""
obj.save()
def delete_model(self, request, obj):
"""
Given a model instance delete it from the database.
"""
obj.delete()
def save_formset(self, request, form, formset, change):
"""
Given an inline formset save it to the database.
"""
formset.save()
def save_related(self, request, form, formsets, change):
"""
Given the ``HttpRequest``, the parent ``ModelForm`` instance, the
list of inline formsets and a boolean value based on whether the
parent is being added or changed, save the related objects to the
database. Note that at this point save_form() and save_model() have
already been called.
"""
form.save_m2m()
for formset in formsets:
self.save_formset(request, form, formset, change=change)
def render_change_form(self, request, context, add=False, change=False, form_url='', obj=None):
opts = self.model._meta
app_label = opts.app_label
ordered_objects = opts.get_ordered_objects()
context.update({
'add': add,
'change': change,
'has_add_permission': self.has_add_permission(request),
'has_change_permission': self.has_change_permission(request, obj),
'has_delete_permission': self.has_delete_permission(request, obj),
'has_file_field': True, # FIXME - this should check if form or formsets have a FileField,
'has_absolute_url': hasattr(self.model, 'get_absolute_url'),
'ordered_objects': ordered_objects,
'form_url': form_url,
'opts': opts,
'content_type_id': ContentType.objects.get_for_model(self.model).id,
'save_as': self.save_as,
'save_on_top': self.save_on_top,
})
if add and self.add_form_template is not None:
form_template = self.add_form_template
else:
form_template = self.change_form_template
return TemplateResponse(request, form_template or [
"admin/%s/%s/change_form.html" % (app_label, opts.object_name.lower()),
"admin/%s/change_form.html" % app_label,
"admin/change_form.html"
], context, current_app=self.admin_site.name)
def response_add(self, request, obj, post_url_continue=None):
"""
Determines the HttpResponse for the add_view stage.
"""
opts = obj._meta
pk_value = obj._get_pk_val()
msg_dict = {'name': force_text(opts.verbose_name), 'obj': force_text(obj)}
# Here, we distinguish between different save types by checking for
# the presence of keys in request.POST.
if "_continue" in request.POST:
msg = _('The %(name)s "%(obj)s" was added successfully. You may edit it again below.') % msg_dict
self.message_user(request, msg)
if post_url_continue is None:
post_url_continue = reverse('admin:%s_%s_change' %
(opts.app_label, opts.module_name),
args=(pk_value,),
current_app=self.admin_site.name)
else:
try:
post_url_continue = post_url_continue % pk_value
warnings.warn(
"The use of string formats for post_url_continue "
"in ModelAdmin.response_add() is deprecated. Provide "
"a pre-formatted url instead.",
DeprecationWarning, stacklevel=2)
except TypeError:
pass
if "_popup" in request.POST:
post_url_continue += "?_popup=1"
return HttpResponseRedirect(post_url_continue)
if "_popup" in request.POST:
return HttpResponse(
'<!DOCTYPE html><html><head><title></title></head><body>'
'<script type="text/javascript">opener.dismissAddAnotherPopup(window, "%s", "%s");</script></body></html>' % \
# escape() calls force_text.
(escape(pk_value), escapejs(obj)))
elif "_addanother" in request.POST:
msg = _('The %(name)s "%(obj)s" was added successfully. You may add another %(name)s below.') % msg_dict
self.message_user(request, msg)
return HttpResponseRedirect(request.path)
else:
msg = _('The %(name)s "%(obj)s" was added successfully.') % msg_dict
self.message_user(request, msg)
return self.response_post_save_add(request, obj)
def response_change(self, request, obj):
"""
Determines the HttpResponse for the change_view stage.
"""
opts = self.model._meta
pk_value = obj._get_pk_val()
msg_dict = {'name': force_text(opts.verbose_name), 'obj': force_text(obj)}
if "_continue" in request.POST:
msg = _('The %(name)s "%(obj)s" was changed successfully. You may edit it again below.') % msg_dict
self.message_user(request, msg)
if "_popup" in request.REQUEST:
return HttpResponseRedirect(request.path + "?_popup=1")
else:
return HttpResponseRedirect(request.path)
elif "_saveasnew" in request.POST:
msg = _('The %(name)s "%(obj)s" was added successfully. You may edit it again below.') % msg_dict
self.message_user(request, msg)
return HttpResponseRedirect(reverse('admin:%s_%s_change' %
(opts.app_label, opts.module_name),
args=(pk_value,),
current_app=self.admin_site.name))
elif "_addanother" in request.POST:
msg = _('The %(name)s "%(obj)s" was changed successfully. You may add another %(name)s below.') % msg_dict
self.message_user(request, msg)
return HttpResponseRedirect(reverse('admin:%s_%s_add' %
(opts.app_label, opts.module_name),
current_app=self.admin_site.name))
else:
msg = _('The %(name)s "%(obj)s" was changed successfully.') % msg_dict
self.message_user(request, msg)
return self.response_post_save_change(request, obj)
def response_post_save_add(self, request, obj):
"""
Figure out where to redirect after the 'Save' button has been pressed
when adding a new object.
"""
opts = self.model._meta
if self.has_change_permission(request, None):
post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.module_name),
current_app=self.admin_site.name)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def response_post_save_change(self, request, obj):
"""
Figure out where to redirect after the 'Save' button has been pressed
when editing an existing object.
"""
opts = self.model._meta
if self.has_change_permission(request, None):
post_url = reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.module_name),
current_app=self.admin_site.name)
else:
post_url = reverse('admin:index',
current_app=self.admin_site.name)
return HttpResponseRedirect(post_url)
def response_action(self, request, queryset):
"""
Handle an admin action. This is called if a request is POSTed to the
changelist; it returns an HttpResponse if the action was handled, and
None otherwise.
"""
# There can be multiple action forms on the page (at the top
# and bottom of the change list, for example). Get the action
# whose button was pushed.
try:
action_index = int(request.POST.get('index', 0))
except ValueError:
action_index = 0
# Construct the action form.
data = request.POST.copy()
data.pop(helpers.ACTION_CHECKBOX_NAME, None)
data.pop("index", None)
# Use the action whose button was pushed
try:
data.update({'action': data.getlist('action')[action_index]})
except IndexError:
# If we didn't get an action from the chosen form that's invalid
# POST data, so by deleting action it'll fail the validation check
# below. So no need to do anything here
pass
action_form = self.action_form(data, auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
# If the form's valid we can handle the action.
if action_form.is_valid():
action = action_form.cleaned_data['action']
select_across = action_form.cleaned_data['select_across']
func, name, description = self.get_actions(request)[action]
# Get the list of selected PKs. If nothing's selected, we can't
# perform an action on it, so bail. Except we want to perform
# the action explicitly on all objects.
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
if not selected and not select_across:
# Reminder that something needs to be selected or nothing will happen
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg)
return None
if not select_across:
# Perform the action only on the selected objects
queryset = queryset.filter(pk__in=selected)
response = func(self, request, queryset)
# Actions may return an HttpResponse, which will be used as the
# response from the POST. If not, we'll be a good little HTTP
# citizen and redirect back to the changelist page.
if isinstance(response, HttpResponse):
return response
else:
return HttpResponseRedirect(request.get_full_path())
else:
msg = _("No action selected.")
self.message_user(request, msg)
return None
@csrf_protect_m
@transaction.commit_on_success
def add_view(self, request, form_url='', extra_context=None):
"The 'add' admin view for this model."
model = self.model
opts = model._meta
if not self.has_add_permission(request):
raise PermissionDenied
ModelForm = self.get_form(request)
formsets = []
inline_instances = self.get_inline_instances(request, None)
if request.method == 'POST':
form = ModelForm(request.POST, request.FILES)
if form.is_valid():
new_object = self.save_form(request, form, change=False)
form_validated = True
else:
form_validated = False
new_object = self.model()
prefixes = {}
for FormSet, inline in zip(self.get_formsets(request), inline_instances):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset = FormSet(data=request.POST, files=request.FILES,
instance=new_object,
save_as_new="_saveasnew" in request.POST,
prefix=prefix, queryset=inline.queryset(request))
formsets.append(formset)
if all_valid(formsets) and form_validated:
self.save_model(request, new_object, form, False)
self.save_related(request, form, formsets, False)
self.log_addition(request, new_object)
return self.response_add(request, new_object)
else:
# Prepare the dict of initial data from the request.
# We have to special-case M2Ms as a list of comma-separated PKs.
initial = dict(request.GET.items())
for k in initial:
try:
f = opts.get_field(k)
except models.FieldDoesNotExist:
continue
if isinstance(f, models.ManyToManyField):
initial[k] = initial[k].split(",")
form = ModelForm(initial=initial)
prefixes = {}
for FormSet, inline in zip(self.get_formsets(request), inline_instances):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset = FormSet(instance=self.model(), prefix=prefix,
queryset=inline.queryset(request))
formsets.append(formset)
adminForm = helpers.AdminForm(form, list(self.get_fieldsets(request)),
self.get_prepopulated_fields(request),
self.get_readonly_fields(request),
model_admin=self)
media = self.media + adminForm.media
inline_admin_formsets = []
for inline, formset in zip(inline_instances, formsets):
fieldsets = list(inline.get_fieldsets(request))
readonly = list(inline.get_readonly_fields(request))
prepopulated = dict(inline.get_prepopulated_fields(request))
inline_admin_formset = helpers.InlineAdminFormSet(inline, formset,
fieldsets, prepopulated, readonly, model_admin=self)
inline_admin_formsets.append(inline_admin_formset)
media = media + inline_admin_formset.media
context = {
'title': _('Add %s') % force_text(opts.verbose_name),
'adminform': adminForm,
'is_popup': "_popup" in request.REQUEST,
'media': media,
'inline_admin_formsets': inline_admin_formsets,
'errors': helpers.AdminErrorList(form, formsets),
'app_label': opts.app_label,
}
context.update(extra_context or {})
return self.render_change_form(request, context, form_url=form_url, add=True)
@csrf_protect_m
@transaction.commit_on_success
def change_view(self, request, object_id, form_url='', extra_context=None):
"The 'change' admin view for this model."
model = self.model
opts = model._meta
obj = self.get_object(request, unquote(object_id))
if not self.has_change_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {'name': force_text(opts.verbose_name), 'key': escape(object_id)})
if request.method == 'POST' and "_saveasnew" in request.POST:
return self.add_view(request, form_url=reverse('admin:%s_%s_add' %
(opts.app_label, opts.module_name),
current_app=self.admin_site.name))
ModelForm = self.get_form(request, obj)
formsets = []
inline_instances = self.get_inline_instances(request, obj)
if request.method == 'POST':
form = ModelForm(request.POST, request.FILES, instance=obj)
if form.is_valid():
form_validated = True
new_object = self.save_form(request, form, change=True)
else:
form_validated = False
new_object = obj
prefixes = {}
for FormSet, inline in zip(self.get_formsets(request, new_object), inline_instances):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset = FormSet(request.POST, request.FILES,
instance=new_object, prefix=prefix,
queryset=inline.queryset(request))
formsets.append(formset)
if all_valid(formsets) and form_validated:
self.save_model(request, new_object, form, True)
self.save_related(request, form, formsets, True)
change_message = self.construct_change_message(request, form, formsets)
self.log_change(request, new_object, change_message)
return self.response_change(request, new_object)
else:
form = ModelForm(instance=obj)
prefixes = {}
for FormSet, inline in zip(self.get_formsets(request, obj), inline_instances):
prefix = FormSet.get_default_prefix()
prefixes[prefix] = prefixes.get(prefix, 0) + 1
if prefixes[prefix] != 1 or not prefix:
prefix = "%s-%s" % (prefix, prefixes[prefix])
formset = FormSet(instance=obj, prefix=prefix,
queryset=inline.queryset(request))
formsets.append(formset)
adminForm = helpers.AdminForm(form, self.get_fieldsets(request, obj),
self.get_prepopulated_fields(request, obj),
self.get_readonly_fields(request, obj),
model_admin=self)
media = self.media + adminForm.media
inline_admin_formsets = []
for inline, formset in zip(inline_instances, formsets):
fieldsets = list(inline.get_fieldsets(request, obj))
readonly = list(inline.get_readonly_fields(request, obj))
prepopulated = dict(inline.get_prepopulated_fields(request, obj))
inline_admin_formset = helpers.InlineAdminFormSet(inline, formset,
fieldsets, prepopulated, readonly, model_admin=self)
inline_admin_formsets.append(inline_admin_formset)
media = media + inline_admin_formset.media
context = {
'title': _('Change %s') % force_text(opts.verbose_name),
'adminform': adminForm,
'object_id': object_id,
'original': obj,
'is_popup': "_popup" in request.REQUEST,
'media': media,
'inline_admin_formsets': inline_admin_formsets,
'errors': helpers.AdminErrorList(form, formsets),
'app_label': opts.app_label,
}
context.update(extra_context or {})
return self.render_change_form(request, context, change=True, obj=obj, form_url=form_url)
@csrf_protect_m
def changelist_view(self, request, extra_context=None):
"""
The 'change list' admin view for this model.
"""
from django.contrib.admin.views.main import ERROR_FLAG
opts = self.model._meta
app_label = opts.app_label
if not self.has_change_permission(request, None):
raise PermissionDenied
list_display = self.get_list_display(request)
list_display_links = self.get_list_display_links(request, list_display)
list_filter = self.get_list_filter(request)
# Check actions to see if any are available on this changelist
actions = self.get_actions(request)
if actions:
# Add the action checkboxes if there are any actions available.
list_display = ['action_checkbox'] + list(list_display)
ChangeList = self.get_changelist(request)
try:
cl = ChangeList(request, self.model, list_display,
list_display_links, list_filter, self.date_hierarchy,
self.search_fields, self.list_select_related,
self.list_per_page, self.list_max_show_all, self.list_editable,
self)
except IncorrectLookupParameters:
# Wacky lookup parameters were given, so redirect to the main
# changelist page, without parameters, and pass an 'invalid=1'
# parameter via the query string. If wacky parameters were given
# and the 'invalid=1' parameter was already in the query string,
# something is screwed up with the database, so display an error
# page.
if ERROR_FLAG in request.GET.keys():
return SimpleTemplateResponse('admin/invalid_setup.html', {
'title': _('Database error'),
})
return HttpResponseRedirect(request.path + '?' + ERROR_FLAG + '=1')
# If the request was POSTed, this might be a bulk action or a bulk
# edit. Try to look up an action or confirmation first, but if this
# isn't an action the POST will fall through to the bulk edit check,
# below.
action_failed = False
selected = request.POST.getlist(helpers.ACTION_CHECKBOX_NAME)
# Actions with no confirmation
if (actions and request.method == 'POST' and
'index' in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, queryset=cl.get_query_set(request))
if response:
return response
else:
action_failed = True
else:
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
self.message_user(request, msg)
action_failed = True
# Actions with confirmation
if (actions and request.method == 'POST' and
helpers.ACTION_CHECKBOX_NAME in request.POST and
'index' not in request.POST and '_save' not in request.POST):
if selected:
response = self.response_action(request, queryset=cl.get_query_set(request))
if response:
return response
else:
action_failed = True
# If we're allowing changelist editing, we need to construct a formset
# for the changelist given all the fields to be edited. Then we'll
# use the formset to validate/process POSTed data.
formset = cl.formset = None
# Handle POSTed bulk-edit data.
if (request.method == "POST" and cl.list_editable and
'_save' in request.POST and not action_failed):
FormSet = self.get_changelist_formset(request)
formset = cl.formset = FormSet(request.POST, request.FILES, queryset=cl.result_list)
if formset.is_valid():
changecount = 0
for form in formset.forms:
if form.has_changed():
obj = self.save_form(request, form, change=True)
self.save_model(request, obj, form, change=True)
self.save_related(request, form, formsets=[], change=True)
change_msg = self.construct_change_message(request, form, None)
self.log_change(request, obj, change_msg)
changecount += 1
if changecount:
if changecount == 1:
name = force_text(opts.verbose_name)
else:
name = force_text(opts.verbose_name_plural)
msg = ungettext("%(count)s %(name)s was changed successfully.",
"%(count)s %(name)s were changed successfully.",
changecount) % {'count': changecount,
'name': name,
'obj': force_text(obj)}
self.message_user(request, msg)
return HttpResponseRedirect(request.get_full_path())
# Handle GET -- construct a formset for display.
elif cl.list_editable:
FormSet = self.get_changelist_formset(request)
formset = cl.formset = FormSet(queryset=cl.result_list)
# Build the list of media to be used by the formset.
if formset:
media = self.media + formset.media
else:
media = self.media
# Build the action form and populate it with available actions.
if actions:
action_form = self.action_form(auto_id=None)
action_form.fields['action'].choices = self.get_action_choices(request)
else:
action_form = None
selection_note_all = ungettext('%(total_count)s selected',
'All %(total_count)s selected', cl.result_count)
context = {
'module_name': force_text(opts.verbose_name_plural),
'selection_note': _('0 of %(cnt)s selected') % {'cnt': len(cl.result_list)},
'selection_note_all': selection_note_all % {'total_count': cl.result_count},
'title': cl.title,
'is_popup': cl.is_popup,
'cl': cl,
'media': media,
'has_add_permission': self.has_add_permission(request),
'app_label': app_label,
'action_form': action_form,
'actions_on_top': self.actions_on_top,
'actions_on_bottom': self.actions_on_bottom,
'actions_selection_counter': self.actions_selection_counter,
}
context.update(extra_context or {})
return TemplateResponse(request, self.change_list_template or [
'admin/%s/%s/change_list.html' % (app_label, opts.object_name.lower()),
'admin/%s/change_list.html' % app_label,
'admin/change_list.html'
], context, current_app=self.admin_site.name)
@csrf_protect_m
@transaction.commit_on_success
def delete_view(self, request, object_id, extra_context=None):
"The 'delete' admin view for this model."
opts = self.model._meta
app_label = opts.app_label
obj = self.get_object(request, unquote(object_id))
if not self.has_delete_permission(request, obj):
raise PermissionDenied
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {'name': force_text(opts.verbose_name), 'key': escape(object_id)})
using = router.db_for_write(self.model)
# Populate deleted_objects, a data structure of all related objects that
# will also be deleted.
(deleted_objects, perms_needed, protected) = get_deleted_objects(
[obj], opts, request.user, self.admin_site, using)
if request.POST: # The user has already confirmed the deletion.
if perms_needed:
raise PermissionDenied
obj_display = force_text(obj)
self.log_deletion(request, obj, obj_display)
self.delete_model(request, obj)
self.message_user(request, _('The %(name)s "%(obj)s" was deleted successfully.') % {'name': force_text(opts.verbose_name), 'obj': force_text(obj_display)})
if not self.has_change_permission(request, None):
return HttpResponseRedirect(reverse('admin:index',
current_app=self.admin_site.name))
return HttpResponseRedirect(reverse('admin:%s_%s_changelist' %
(opts.app_label, opts.module_name),
current_app=self.admin_site.name))
object_name = force_text(opts.verbose_name)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": object_name}
else:
title = _("Are you sure?")
context = {
"title": title,
"object_name": object_name,
"object": obj,
"deleted_objects": deleted_objects,
"perms_lacking": perms_needed,
"protected": protected,
"opts": opts,
"app_label": app_label,
}
context.update(extra_context or {})
return TemplateResponse(request, self.delete_confirmation_template or [
"admin/%s/%s/delete_confirmation.html" % (app_label, opts.object_name.lower()),
"admin/%s/delete_confirmation.html" % app_label,
"admin/delete_confirmation.html"
], context, current_app=self.admin_site.name)
def history_view(self, request, object_id, extra_context=None):
"The 'history' admin view for this model."
from django.contrib.admin.models import LogEntry
# First check if the user can see this history.
model = self.model
obj = get_object_or_404(model, pk=unquote(object_id))
if not self.has_change_permission(request, obj):
raise PermissionDenied
# Then get the history for this object.
opts = model._meta
app_label = opts.app_label
action_list = LogEntry.objects.filter(
object_id=unquote(object_id),
content_type__id__exact=ContentType.objects.get_for_model(model).id
).select_related().order_by('action_time')
context = {
'title': _('Change history: %s') % force_text(obj),
'action_list': action_list,
'module_name': capfirst(force_text(opts.verbose_name_plural)),
'object': obj,
'app_label': app_label,
'opts': opts,
}
context.update(extra_context or {})
return TemplateResponse(request, self.object_history_template or [
"admin/%s/%s/object_history.html" % (app_label, opts.object_name.lower()),
"admin/%s/object_history.html" % app_label,
"admin/object_history.html"
], context, current_app=self.admin_site.name)
class InlineModelAdmin(BaseModelAdmin):
"""
Options for inline editing of ``model`` instances.
Provide ``name`` to specify the attribute name of the ``ForeignKey`` from
``model`` to its parent. This is required if ``model`` has more than one
``ForeignKey`` to its parent.
"""
model = None
fk_name = None
formset = BaseInlineFormSet
extra = 3
max_num = None
template = None
verbose_name = None
verbose_name_plural = None
can_delete = True
def __init__(self, parent_model, admin_site):
self.admin_site = admin_site
self.parent_model = parent_model
self.opts = self.model._meta
super(InlineModelAdmin, self).__init__()
if self.verbose_name is None:
self.verbose_name = self.model._meta.verbose_name
if self.verbose_name_plural is None:
self.verbose_name_plural = self.model._meta.verbose_name_plural
@property
def media(self):
extra = '' if settings.DEBUG else '.min'
js = ['jquery%s.js' % extra, 'jquery.init.js', 'inlines%s.js' % extra]
if self.prepopulated_fields:
js.extend(['urlify.js', 'prepopulate%s.js' % extra])
if self.filter_vertical or self.filter_horizontal:
js.extend(['SelectBox.js', 'SelectFilter2.js'])
return forms.Media(js=[static('admin/js/%s' % url) for url in js])
def get_formset(self, request, obj=None, **kwargs):
"""Returns a BaseInlineFormSet class for use in admin add/change views."""
if self.declared_fieldsets:
fields = flatten_fieldsets(self.declared_fieldsets)
else:
fields = None
if self.exclude is None:
exclude = []
else:
exclude = list(self.exclude)
exclude.extend(self.get_readonly_fields(request, obj))
if self.exclude is None and hasattr(self.form, '_meta') and self.form._meta.exclude:
# Take the custom ModelForm's Meta.exclude into account only if the
# InlineModelAdmin doesn't define its own.
exclude.extend(self.form._meta.exclude)
# if exclude is an empty list we use None, since that's the actual
# default
exclude = exclude or None
can_delete = self.can_delete and self.has_delete_permission(request, obj)
defaults = {
"form": self.form,
"formset": self.formset,
"fk_name": self.fk_name,
"fields": fields,
"exclude": exclude,
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
"extra": self.extra,
"max_num": self.max_num,
"can_delete": can_delete,
}
defaults.update(kwargs)
return inlineformset_factory(self.parent_model, self.model, **defaults)
def get_fieldsets(self, request, obj=None):
if self.declared_fieldsets:
return self.declared_fieldsets
form = self.get_formset(request, obj).form
fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj))
return [(None, {'fields': fields})]
def queryset(self, request):
queryset = super(InlineModelAdmin, self).queryset(request)
if not self.has_change_permission(request):
queryset = queryset.none()
return queryset
def has_add_permission(self, request):
if self.opts.auto_created:
# We're checking the rights to an auto-created intermediate model,
# which doesn't have its own individual permissions. The user needs
# to have the change permission for the related model in order to
# be able to do anything with the intermediate model.
return self.has_change_permission(request)
return request.user.has_perm(
self.opts.app_label + '.' + self.opts.get_add_permission())
def has_change_permission(self, request, obj=None):
opts = self.opts
if opts.auto_created:
# The model was auto-created as intermediary for a
# ManyToMany-relationship, find the target model
for field in opts.fields:
if field.rel and field.rel.to != self.parent_model:
opts = field.rel.to._meta
break
return request.user.has_perm(
opts.app_label + '.' + opts.get_change_permission())
def has_delete_permission(self, request, obj=None):
if self.opts.auto_created:
# We're checking the rights to an auto-created intermediate model,
# which doesn't have its own individual permissions. The user needs
# to have the change permission for the related model in order to
# be able to do anything with the intermediate model.
return self.has_change_permission(request, obj)
return request.user.has_perm(
self.opts.app_label + '.' + self.opts.get_delete_permission())
class StackedInline(InlineModelAdmin):
template = 'admin/edit_inline/stacked.html'
class TabularInline(InlineModelAdmin):
template = 'admin/edit_inline/tabular.html'
|
apache-2.0
|
malkavi/Flexget
|
flexget/tests/test_tmdb.py
|
3
|
1857
|
import pytest
from flexget.components.tmdb.api_tmdb import TMDBSearchResult
from flexget.manager import Session
@pytest.mark.online
class TestTmdbLookup:
config = """
tasks:
test:
mock:
- {title: '[Group] Taken 720p', imdb_url: 'http://www.imdb.com/title/tt0936501/'}
- {title: 'The Matrix'}
tmdb_lookup: yes
"""
def test_tmdb_lookup(self, execute_task):
task = execute_task('test')
# check that these were created
assert task.find_entry(
tmdb_name='Taken', tmdb_year=2008
), 'Didn\'t populate tmdb info for Taken'
assert task.find_entry(
tmdb_name='The Matrix', tmdb_year=1999
), 'Didn\'t populate tmdb info for The Matrix'
@pytest.mark.online
class TestTmdbUnicodeLookup:
config = """
templates:
global:
tmdb_lookup: yes
tasks:
test_unicode:
disable: seen
mock:
- {'title': '\u0417\u0435\u0440\u043a\u0430\u043b\u0430 Mirrors 2008', 'url': 'mock://whatever'}
if:
- tmdb_year > now.year - 1: reject
"""
@pytest.mark.xfail(reason='VCR attempts to compare str to unicode')
def test_unicode(self, execute_task):
execute_task('test_unicode')
with Session() as session:
r = session.query(TMDBSearchResult).all()
assert len(r) == 1, 'Should have added a search result'
assert (
r[0].search == '\u0437\u0435\u0440\u043a\u0430\u043b\u0430 mirrors (2008)'
), 'The search result should be lower case'
execute_task('test_unicode')
with Session() as session:
r = session.query(TMDBSearchResult).all()
assert len(r) == 1, 'Should not have added a new row'
|
mit
|
freelan-developers/chromalog
|
chromalog/log.py
|
1
|
5905
|
"""
Log-related functions and structures.
"""
from builtins import map
import sys
import logging
from colorama import AnsiToWin32
from functools import partial
from contextlib import contextmanager
from .colorizer import Colorizer
from .mark.objects import Mark
from .stream import stream_has_color_support
class ColorizingFormatter(logging.Formatter, object):
"""
A formatter that colorize its output.
"""
@contextmanager
def _patch_record(self, record, colorizer, message_color_tag):
save_dict = record.__dict__.copy()
if colorizer:
if isinstance(record.args, dict):
record.args = dict(
(
k, colorizer.colorize(
v, context_color_tag=message_color_tag
)
) for k, v in record.args.items()
)
else:
record.args = tuple(map(
partial(
colorizer.colorize,
context_color_tag=message_color_tag,
),
record.args,
))
record.filename = colorizer.colorize(record.filename)
record.funcName = colorizer.colorize(record.funcName)
record.levelname = colorizer.colorize(record.levelname)
record.module = colorizer.colorize(record.module)
record.name = colorizer.colorize(record.name)
record.pathname = colorizer.colorize(record.pathname)
record.processName = colorizer.colorize(record.processName)
record.threadName = colorizer.colorize(record.threadName)
if message_color_tag:
message = colorizer.colorize(Mark(
record.getMessage(),
color_tag=message_color_tag,
))
record.getMessage = lambda: message
try:
yield
finally:
record.__dict__ = save_dict
def format(self, record):
"""
Colorize the arguments of a record.
:record: A `LogRecord` instance.
:returns: The colorized formatted string.
.. note:: The `record` object must have a `colorizer` attribute to be
use for colorizing the formatted string. If no such attribute is
found, the default non-colorized behaviour is used instead.
"""
colorizer = getattr(record, 'colorizer', None)
message_color_tag = getattr(record, 'message_color_tag', None)
with self._patch_record(record, colorizer, message_color_tag):
return super(ColorizingFormatter, self).format(record)
class ColorizingStreamHandler(logging.StreamHandler, object):
"""
A stream handler that colorize its output.
"""
_RECORD_ATTRIBUTE_NAME = 'colorizer'
default_attributes_map = {
'name': 'important',
'levelname': lambda record: str(record.levelname).lower(),
'message': lambda record: str(record.levelname).lower(),
}
def __init__(
self,
stream=None,
colorizer=None,
highlighter=None,
attributes_map=None,
):
"""
Initializes a colorizing stream handler.
:param stream: The stream to use for output.
:param colorizer: The colorizer to use for colorizing the output. If
not specified, a :class:`chromalog.colorizer.Colorizer` is
instantiated.
:param highlighter: The colorizer to use for highlighting the output
when color is not supported.
:param attributes_map: A map of LogRecord attributes/color tags.
"""
if not stream:
stream = sys.stderr
self.has_color_support = stream_has_color_support(stream)
self.color_disabled = False
self.attributes_map = attributes_map or self.default_attributes_map
if self.has_color_support:
stream = AnsiToWin32(stream).stream
super(ColorizingStreamHandler, self).__init__(
stream
)
self.colorizer = colorizer or Colorizer()
self.highlighter = highlighter
self.setFormatter(ColorizingFormatter())
@property
def active_colorizer(self):
"""
The active colorizer or highlighter depending on whether color is
supported.
"""
if (
self.has_color_support and
not self.color_disabled and
self.colorizer
):
return self.colorizer
return self.highlighter
@contextmanager
def __bind_to_record(self, record):
setattr(record, self._RECORD_ATTRIBUTE_NAME, self.active_colorizer)
try:
yield
finally:
delattr(record, self._RECORD_ATTRIBUTE_NAME)
def _color_tag_from_record(self, color_tag, record):
if hasattr(color_tag, '__call__'):
return color_tag(record)
else:
return color_tag.format(**record.__dict__)
def format(self, record):
"""
Format a `LogRecord` and prints it to the associated stream.
"""
with self.__bind_to_record(record):
for attribute, color_tag in self.attributes_map.items():
if attribute == 'message':
record.message_color_tag = self._color_tag_from_record(
color_tag,
record,
)
else:
setattr(record, attribute, Mark(
getattr(record, attribute),
color_tag=self._color_tag_from_record(
color_tag,
record,
),
))
return super(ColorizingStreamHandler, self).format(record)
|
mit
|
jody-frankowski/ansible-modules-core
|
system/mount.py
|
29
|
10793
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Red Hat, inc
# Written by Seth Vidal
# based on the mount modules from salt and puppet
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: mount
short_description: Control active and configured mount points
description:
- This module controls active and configured mount points in C(/etc/fstab).
version_added: "0.6"
options:
name:
description:
- "path to the mount point, eg: C(/mnt/files)"
required: true
default: null
aliases: []
src:
description:
- device to be mounted on I(name).
required: true
default: null
fstype:
description:
- file-system type
required: true
default: null
opts:
description:
- mount options (see fstab(8))
required: false
default: null
dump:
description:
- "dump (see fstab(8)), Note that if nulled, C(state=present) will cease to work and duplicate entries will be made with subsequent runs."
required: false
default: 0
passno:
description:
- "passno (see fstab(8)), Note that if nulled, C(state=present) will cease to work and duplicate entries will be made with subsequent runs."
required: false
default: 0
state:
description:
- If C(mounted) or C(unmounted), the device will be actively mounted or unmounted
as needed and appropriately configured in I(fstab).
C(absent) and C(present) only deal with
I(fstab) but will not affect current mounting. If specifying C(mounted) and the mount
point is not present, the mount point will be created. Similarly, specifying C(absent) will remove the mount point directory.
required: true
choices: [ "present", "absent", "mounted", "unmounted" ]
default: null
fstab:
description:
- file to use instead of C(/etc/fstab). You shouldn't use that option
unless you really know what you are doing. This might be useful if
you need to configure mountpoints in a chroot environment.
required: false
default: /etc/fstab
notes: []
requirements: []
author: Seth Vidal
'''
EXAMPLES = '''
# Mount DVD read-only
- mount: name=/mnt/dvd src=/dev/sr0 fstype=iso9660 opts=ro state=present
# Mount up device by label
- mount: name=/srv/disk src='LABEL=SOME_LABEL' fstype=ext4 state=present
# Mount up device by UUID
- mount: name=/home src='UUID=b3e48f45-f933-4c8e-a700-22a159ec9077' fstype=xfs opts=noatime state=present
'''
def write_fstab(lines, dest):
fs_w = open(dest, 'w')
for l in lines:
fs_w.write(l)
fs_w.flush()
fs_w.close()
def set_mount(**kwargs):
""" set/change a mount point location in fstab """
# kwargs: name, src, fstype, opts, dump, passno, state, fstab=/etc/fstab
args = dict(
opts = 'defaults',
dump = '0',
passno = '0',
fstab = '/etc/fstab'
)
args.update(kwargs)
new_line = '%(src)s %(name)s %(fstype)s %(opts)s %(dump)s %(passno)s\n'
to_write = []
exists = False
changed = False
for line in open(args['fstab'], 'r').readlines():
if not line.strip():
to_write.append(line)
continue
if line.strip().startswith('#'):
to_write.append(line)
continue
if len(line.split()) != 6:
# not sure what this is or why it is here
# but it is not our fault so leave it be
to_write.append(line)
continue
ld = {}
ld['src'], ld['name'], ld['fstype'], ld['opts'], ld['dump'], ld['passno'] = line.split()
if ld['name'] != args['name']:
to_write.append(line)
continue
# it exists - now see if what we have is different
exists = True
for t in ('src', 'fstype','opts', 'dump', 'passno'):
if ld[t] != args[t]:
changed = True
ld[t] = args[t]
if changed:
to_write.append(new_line % ld)
else:
to_write.append(line)
if not exists:
to_write.append(new_line % args)
changed = True
if changed:
write_fstab(to_write, args['fstab'])
return (args['name'], changed)
def unset_mount(**kwargs):
""" remove a mount point from fstab """
# kwargs: name, src, fstype, opts, dump, passno, state, fstab=/etc/fstab
args = dict(
opts = 'default',
dump = '0',
passno = '0',
fstab = '/etc/fstab'
)
args.update(kwargs)
to_write = []
changed = False
for line in open(args['fstab'], 'r').readlines():
if not line.strip():
to_write.append(line)
continue
if line.strip().startswith('#'):
to_write.append(line)
continue
if len(line.split()) != 6:
# not sure what this is or why it is here
# but it is not our fault so leave it be
to_write.append(line)
continue
ld = {}
ld['src'], ld['name'], ld['fstype'], ld['opts'], ld['dump'], ld['passno'] = line.split()
if ld['name'] != args['name']:
to_write.append(line)
continue
# if we got here we found a match - continue and mark changed
changed = True
if changed:
write_fstab(to_write, args['fstab'])
return (args['name'], changed)
def mount(module, **kwargs):
""" mount up a path or remount if needed """
mount_bin = module.get_bin_path('mount')
name = kwargs['name']
if os.path.ismount(name):
cmd = [ mount_bin , '-o', 'remount', name ]
else:
cmd = [ mount_bin, name ]
rc, out, err = module.run_command(cmd)
if rc == 0:
return 0, ''
else:
return rc, out+err
def umount(module, **kwargs):
""" unmount a path """
umount_bin = module.get_bin_path('umount')
name = kwargs['name']
cmd = [umount_bin, name]
rc, out, err = module.run_command(cmd)
if rc == 0:
return 0, ''
else:
return rc, out+err
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(required=True, choices=['present', 'absent', 'mounted', 'unmounted']),
name = dict(required=True),
opts = dict(default=None),
passno = dict(default=None),
dump = dict(default=None),
src = dict(required=True),
fstype = dict(required=True),
fstab = dict(default='/etc/fstab')
)
)
changed = False
rc = 0
args = {
'name': module.params['name'],
'src': module.params['src'],
'fstype': module.params['fstype']
}
if module.params['passno'] is not None:
args['passno'] = module.params['passno']
if module.params['opts'] is not None:
args['opts'] = module.params['opts']
if ' ' in args['opts']:
module.fail_json(msg="unexpected space in 'opts' parameter")
if module.params['dump'] is not None:
args['dump'] = module.params['dump']
if module.params['fstab'] is not None:
args['fstab'] = module.params['fstab']
# if fstab file does not exist, we first need to create it. This mainly
# happens when fstab optin is passed to the module.
if not os.path.exists(args['fstab']):
if not os.path.exists(os.path.dirname(args['fstab'])):
os.makedirs(os.path.dirname(args['fstab']))
open(args['fstab'],'a').close()
# absent == remove from fstab and unmounted
# unmounted == do not change fstab state, but unmount
# present == add to fstab, do not change mount state
# mounted == add to fstab if not there and make sure it is mounted, if it has changed in fstab then remount it
state = module.params['state']
name = module.params['name']
if state == 'absent':
name, changed = unset_mount(**args)
if changed:
if os.path.ismount(name):
res,msg = umount(module, **args)
if res:
module.fail_json(msg="Error unmounting %s: %s" % (name, msg))
if os.path.exists(name):
try:
os.rmdir(name)
except (OSError, IOError), e:
module.fail_json(msg="Error rmdir %s: %s" % (name, str(e)))
module.exit_json(changed=changed, **args)
if state == 'unmounted':
if os.path.ismount(name):
res,msg = umount(module, **args)
if res:
module.fail_json(msg="Error unmounting %s: %s" % (name, msg))
changed = True
module.exit_json(changed=changed, **args)
if state in ['mounted', 'present']:
if state == 'mounted':
if not os.path.exists(name):
try:
os.makedirs(name)
except (OSError, IOError), e:
module.fail_json(msg="Error making dir %s: %s" % (name, str(e)))
name, changed = set_mount(**args)
if state == 'mounted':
res = 0
if os.path.ismount(name):
if changed:
res,msg = mount(module, **args)
elif 'bind' in args.get('opts', []):
changed = True
cmd = 'mount -l'
rc, out, err = module.run_command(cmd)
allmounts = out.split('\n')
for mounts in allmounts[:-1]:
arguments = mounts.split()
if arguments[0] == args['src'] and arguments[2] == args['name'] and arguments[4] == args['fstype']:
changed = False
if changed:
res,msg = mount(module, **args)
else:
changed = True
res,msg = mount(module, **args)
if res:
module.fail_json(msg="Error mounting %s: %s" % (name, msg))
module.exit_json(changed=changed, **args)
module.fail_json(msg='Unexpected position reached')
sys.exit(0)
# import module snippets
from ansible.module_utils.basic import *
main()
|
gpl-3.0
|
red-hood/calendarserver
|
contrib/performance/report_principals.py
|
1
|
2743
|
##
# Copyright (c) 2010-2015 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Benchmark a server's response to a simple displayname startswith
report.
"""
from urllib2 import HTTPDigestAuthHandler
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.internet import reactor
from twisted.web.client import Agent
from twisted.web.http_headers import Headers
from httpauth import AuthHandlerAgent
from httpclient import StringProducer
from benchlib import initialize, sample
body = """\
<?xml version="1.0" encoding="utf-8" ?>
<x0:principal-property-search xmlns:x2="urn:ietf:params:xml:ns:caldav" xmlns:x0="DAV:" xmlns:x1="http://calendarserver.org/ns/" test="anyof"><x0:property-search><x0:prop><x0:displayname/></x0:prop><x0:match match-type="starts-with">user</x0:match></x0:property-search><x0:property-search><x0:prop><x1:email-address-set/></x0:prop><x0:match match-type="starts-with">user</x0:match></x0:property-search><x0:property-search><x0:prop><x1:first-name/></x0:prop><x0:match match-type="starts-with">user</x0:match></x0:property-search><x0:property-search><x0:prop><x1:last-name/></x0:prop><x0:match match-type="starts-with">user</x0:match></x0:property-search><x0:prop><x1:email-address-set/><x2:calendar-user-address-set/><x2:calendar-user-type/><x0:displayname/><x1:last-name/><x1:first-name/><x1:record-type/><x0:principal-URL/></x0:prop></x0:principal-property-search>"""
@inlineCallbacks
def measure(host, port, dtrace, attendeeCount, samples):
user = password = "user01"
root = "/"
principal = "/"
calendar = "report-principal"
authinfo = HTTPDigestAuthHandler()
authinfo.add_password(
realm="Test Realm",
uri="http://%s:%d/" % (host, port),
user=user,
passwd=password)
agent = AuthHandlerAgent(Agent(reactor), authinfo)
# Set up the calendar first
yield initialize(agent, host, port, user, password, root, principal, calendar)
url = 'http://%s:%d/principals/' % (host, port)
headers = Headers({"content-type": ["text/xml"]})
samples = yield sample(
dtrace, samples, agent,
lambda: ('REPORT', url, headers, StringProducer(body)))
returnValue(samples)
|
apache-2.0
|
scottw13/BET-1
|
doc/conf.py
|
1
|
8589
|
# Copyright (C) 2014-2015 The BET Development Team
# -*- coding: utf-8 -*-
#
# BET documentation build configuration file, created by
# sphinx-quickstart on Tue Apr 15 14:33:13 2014.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.coverage',
'sphinx.ext.mathjax', 'sphinx.ext.intersphinx']
intersphinx_cache_limit = 10 #days to keep cached inventories
intersphinx_mapping = {'python': ('https://docs.python.org/2.7', None),
'polyadcirc' : ('http://ut-chg.github.io/PolyADCIRC', None),
'matplotlib':('http://matplotlib.sourceforge.net', None),
'numpy':('http://docs.scipy.org/doc/numpy',None),
'np':('http://docs.scipy.org/doc/numpy',None),
'scipy':('http://docs.scipy.org/doc/scipy',None)
}
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'BET'
copyright = u'2014, The BET Development Team (Lindley Graham, Steven Mattis, Troy Butler)'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'BETdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'BET.tex', u'BET Documentation',
u'The BET Development Team (Lindley Graham, Steven Mattis, Troy Butler)', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'bet', u'BET Documentation',
[u'The BET Development Team (Lindley Graham, Steven Mattis, Troy Butler)'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'BET', u'BET Documentation',
u'The BET Development Team (Lindley Graham, Steven Mattis, Troy Butler)', 'BET', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls= 'footnote'
|
gpl-3.0
|
Pencroff/ai-hackathon-2017
|
Backend/venv/lib/python3.6/site-packages/requests/packages/urllib3/packages/backports/makefile.py
|
339
|
1461
|
# -*- coding: utf-8 -*-
"""
backports.makefile
~~~~~~~~~~~~~~~~~~
Backports the Python 3 ``socket.makefile`` method for use with anything that
wants to create a "fake" socket object.
"""
import io
from socket import SocketIO
def backport_makefile(self, mode="r", buffering=None, encoding=None,
errors=None, newline=None):
"""
Backport of ``socket.makefile`` from Python 3.5.
"""
if not set(mode) <= set(["r", "w", "b"]):
raise ValueError(
"invalid mode %r (only r, w, b allowed)" % (mode,)
)
writing = "w" in mode
reading = "r" in mode or not writing
assert reading or writing
binary = "b" in mode
rawmode = ""
if reading:
rawmode += "r"
if writing:
rawmode += "w"
raw = SocketIO(self, rawmode)
self._makefile_refs += 1
if buffering is None:
buffering = -1
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
if not binary:
raise ValueError("unbuffered streams must be binary")
return raw
if reading and writing:
buffer = io.BufferedRWPair(raw, raw, buffering)
elif reading:
buffer = io.BufferedReader(raw, buffering)
else:
assert writing
buffer = io.BufferedWriter(raw, buffering)
if binary:
return buffer
text = io.TextIOWrapper(buffer, encoding, errors, newline)
text.mode = mode
return text
|
mit
|
batermj/algorithm-challenger
|
code-analysis/programming_anguage/python/source_codes/Python3.8.0/Python-3.8.0/Lib/lib2to3/fixes/fix_apply.py
|
22
|
2430
|
# Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for apply().
This converts apply(func, v, k) into (func)(*v, **k)."""
# Local imports
from .. import pytree
from ..pgen2 import token
from .. import fixer_base
from ..fixer_util import Call, Comma, parenthesize
class FixApply(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """
power< 'apply'
trailer<
'('
arglist<
(not argument<NAME '=' any>) func=any ','
(not argument<NAME '=' any>) args=any [','
(not argument<NAME '=' any>) kwds=any] [',']
>
')'
>
>
"""
def transform(self, node, results):
syms = self.syms
assert results
func = results["func"]
args = results["args"]
kwds = results.get("kwds")
# I feel like we should be able to express this logic in the
# PATTERN above but I don't know how to do it so...
if args:
if args.type == self.syms.star_expr:
return # Make no change.
if (args.type == self.syms.argument and
args.children[0].value == '**'):
return # Make no change.
if kwds and (kwds.type == self.syms.argument and
kwds.children[0].value == '**'):
return # Make no change.
prefix = node.prefix
func = func.clone()
if (func.type not in (token.NAME, syms.atom) and
(func.type != syms.power or
func.children[-2].type == token.DOUBLESTAR)):
# Need to parenthesize
func = parenthesize(func)
func.prefix = ""
args = args.clone()
args.prefix = ""
if kwds is not None:
kwds = kwds.clone()
kwds.prefix = ""
l_newargs = [pytree.Leaf(token.STAR, "*"), args]
if kwds is not None:
l_newargs.extend([Comma(),
pytree.Leaf(token.DOUBLESTAR, "**"),
kwds])
l_newargs[-2].prefix = " " # that's the ** token
# XXX Sometimes we could be cleverer, e.g. apply(f, (x, y) + t)
# can be translated into f(x, y, *t) instead of f(*(x, y) + t)
#new = pytree.Node(syms.power, (func, ArgList(l_newargs)))
return Call(func, l_newargs, prefix=prefix)
|
apache-2.0
|
MatthewShao/mitmproxy
|
test/mitmproxy/tools/web/test_static_viewer.py
|
3
|
2522
|
import json
from unittest import mock
from mitmproxy.test import taddons
from mitmproxy.test import tflow
from mitmproxy import flowfilter
from mitmproxy.tools.web.app import flow_to_json
from mitmproxy.tools.web import static_viewer
from mitmproxy.addons import save
def test_save_static(tmpdir):
tmpdir.mkdir('static')
static_viewer.save_static(tmpdir)
assert len(tmpdir.listdir()) == 2
assert tmpdir.join('index.html').check(file=1)
assert tmpdir.join('static/static.js').read() == 'MITMWEB_STATIC = true;'
def test_save_filter_help(tmpdir):
static_viewer.save_filter_help(tmpdir)
f = tmpdir.join('/filter-help.json')
assert f.check(file=1)
assert f.read() == json.dumps(dict(commands=flowfilter.help))
def test_save_settings(tmpdir):
static_viewer.save_settings(tmpdir)
f = tmpdir.join('/settings.json')
assert f.check(file=1)
def test_save_flows(tmpdir):
flows = [tflow.tflow(req=True, resp=None), tflow.tflow(req=True, resp=True)]
static_viewer.save_flows(tmpdir, flows)
assert tmpdir.join('flows.json').check(file=1)
assert tmpdir.join('flows.json').read() == json.dumps([flow_to_json(f) for f in flows])
@mock.patch('mitmproxy.ctx.log')
def test_save_flows_content(ctx, tmpdir):
flows = [tflow.tflow(req=True, resp=None), tflow.tflow(req=True, resp=True)]
with mock.patch('time.time', mock.Mock(side_effect=[1, 2, 2] * 4)):
static_viewer.save_flows_content(tmpdir, flows)
flows_path = tmpdir.join('flows')
assert len(flows_path.listdir()) == len(flows)
for p in flows_path.listdir():
assert p.join('request').check(dir=1)
assert p.join('response').check(dir=1)
assert p.join('request/content.data').check(file=1)
assert p.join('request/content').check(dir=1)
assert p.join('response/content.data').check(file=1)
assert p.join('response/content').check(dir=1)
assert p.join('request/content/Auto.json').check(file=1)
assert p.join('response/content/Auto.json').check(file=1)
def test_static_viewer(tmpdir):
s = static_viewer.StaticViewer()
sa = save.Save()
with taddons.context() as tctx:
sa.save([tflow.tflow(resp=True)], str(tmpdir.join('foo')))
tctx.master.addons.add(s)
tctx.configure(s, web_static_viewer=str(tmpdir), rfile=str(tmpdir.join('foo')))
assert tmpdir.join('index.html').check(file=1)
assert tmpdir.join('static').check(dir=1)
assert tmpdir.join('flows').check(dir=1)
|
mit
|
ramsateesh/designate
|
designate/storage/impl_sqlalchemy/migrate_repo/versions/060_placeholder.py
|
140
|
1035
|
# Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Author: Kiall Mac Innes <kiall@hp.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# This is a placeholder for Kilo backports.
# Do not use this number for new Liberty work. New Liberty work starts after
# all the placeholders.
#
# See https://blueprints.launchpad.net/nova/+spec/backportable-db-migrations
# http://lists.openstack.org/pipermail/openstack-dev/2013-March/006827.html
def upgrade(migrate_engine):
pass
def downgrade(migration_engine):
pass
|
apache-2.0
|
OmnInfinity/volatility
|
volatility/plugins/malware/cmdhistory.py
|
50
|
34078
|
# Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# Authors:
# Michael Hale Ligh <michael.ligh@mnin.org>
#
# Contributors/References:
# Richard Stevens and Eoghan Casey
# Extracting Windows Cmd Line Details from Physical Memory.
# http://ww.dfrws.org/2010/proceedings/stevens.pdf
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
import volatility.obj as obj
import volatility.plugins.common as common
import volatility.utils as utils
import volatility.win32.tasks as tasks
import volatility.debug as debug
MAX_HISTORY_DEFAULT = 50
#--------------------------------------------------------------------------------
# VTypes
#--------------------------------------------------------------------------------
# Windows 7 Types from conhost.exe
conhost_types_x86 = {
'_COMMAND': [ None, {
'CmdLength': [ 0x00, ['unsigned short']],
'Cmd' : [ 0x02, ['String', dict(encoding = 'utf16', length = lambda x : x.CmdLength)]],
}],
'_COMMAND_HISTORY': [ None, {
'ListEntry': [ 0x00, ['_LIST_ENTRY']],
'Flags' : [ 0x08, ['Flags', {'bitmap': {'Allocated': 0, 'Reset': 1}}]],
'Application': [ 0x0C, ['pointer', ['String', dict(encoding = 'utf16', length = 256)]]],
'CommandCount': [ 0x10, ['short']],
'LastAdded': [ 0x12, ['short']],
'LastDisplayed': [ 0x14, ['short']],
'FirstCommand': [ 0x16, ['short']],
'CommandCountMax': [ 0x18, ['short']],
'ProcessHandle': [ 0x1C, ['unsigned int']],
'PopupList': [ 0x20, ['_LIST_ENTRY']],
'CommandBucket': [ 0x28, ['array', lambda x : x.CommandCount, ['pointer', ['_COMMAND']]]],
}],
'_ALIAS': [ None, {
'ListEntry': [ 0x00, ['_LIST_ENTRY']],
'SourceLength': [ 0x08, ['unsigned short']],
'TargetLength': [ 0x0A, ['unsigned short']],
'Source': [ 0x0C, ['pointer', ['String', dict(encoding = 'utf16', length = lambda x : x.SourceLength)]]],
'Target': [ 0x10, ['pointer', ['String', dict(encoding = 'utf16', length = lambda x : x.TargetLength)]]],
}],
'_EXE_ALIAS_LIST' : [ None, {
'ListEntry': [ 0x00, ['_LIST_ENTRY']],
'ExeLength': [ 0x08, ['unsigned short']],
'ExeName': [ 0x0C, ['pointer', ['String', dict(encoding = 'utf16', length = lambda x : x.ExeLength * 2)]]],
'AliasList': [ 0x10, ['_LIST_ENTRY']],
}],
'_POPUP_LIST' : [ None, {
'ListEntry' : [ 0x00, ['_LIST_ENTRY']],
}],
'_CONSOLE_INFORMATION': [ None, {
'CurrentScreenBuffer': [ 0x98, ['pointer', ['_SCREEN_INFORMATION']]],
'ScreenBuffer': [ 0x9C, ['pointer', ['_SCREEN_INFORMATION']]],
'HistoryList': [ 0xD4, ['_LIST_ENTRY']],
'ProcessList': [ 0x18, ['_LIST_ENTRY']], # GetConsoleProcessList()
'ExeAliasList': [ 0xDC, ['_LIST_ENTRY']], # GetConsoleAliasExes()
'HistoryBufferCount': [ 0xE4, ['unsigned short']], # GetConsoleHistoryInfo()
'HistoryBufferMax': [ 0xE6, ['unsigned short']], # GetConsoleHistoryInfo()
'CommandHistorySize': [ 0xE8, ['unsigned short']],
'OriginalTitle': [ 0xEC, ['pointer', ['String', dict(encoding = 'utf16', length = 256)]]], # GetConsoleOriginalTitle()
'Title': [ 0xF0, ['pointer', ['String', dict(encoding = 'utf16', length = 256)]]], # GetConsoleTitle()
}],
'_CONSOLE_PROCESS': [ None, {
'ListEntry': [ 0x00, ['_LIST_ENTRY']],
'ProcessHandle': [ 0x8, ['unsigned int']],
}],
'_SCREEN_INFORMATION': [ None, {
'ScreenX': [ 0x08, ['short']],
'ScreenY': [ 0x0A, ['short']],
'Rows': [ 0x3C, ['pointer', ['array', lambda x : x.ScreenY, ['_ROW']]]],
'Next': [ 0xDC, ['pointer', ['_SCREEN_INFORMATION']]],
}],
'_ROW': [ 0x1C, {
'Chars': [ 0x08, ['pointer', ['String', dict(encoding = 'utf16', length = 256)]]],
}],
}
# Windows 7 Types from conhost.exe
conhost_types_x64 = {
'_COMMAND': [ None, {
'CmdLength': [ 0x00, ['unsigned short']],
'Cmd' : [ 0x02, ['String', dict(encoding = 'utf16', length = lambda x : x.CmdLength)]],
}],
'_COMMAND_HISTORY': [ None, {
'ListEntry': [ 0x00, ['_LIST_ENTRY']],
'Flags' : [ 0x10, ['Flags', {'bitmap': {'Allocated': 0, 'Reset': 1}}]], # AllocateCommandHistory()
'Application': [ 0x18, ['pointer', ['String', dict(encoding = 'utf16', length = 256)]]], # AllocateCommandHistory()
'CommandCount': [ 0x20, ['short']],
'LastAdded': [ 0x22, ['short']],
'LastDisplayed': [ 0x24, ['short']],
'FirstCommand': [ 0x26, ['short']],
'CommandCountMax': [ 0x28, ['short']], # AllocateCommandHistory()
'ProcessHandle': [ 0x30, ['address']], # AllocateCommandHistory()
'PopupList': [ 0x38, ['_LIST_ENTRY']], # AllocateCommandHistory()
'CommandBucket': [ 0x48, ['array', lambda x : x.CommandCount, ['pointer', ['_COMMAND']]]],
}],
'_ALIAS': [ None, {
'ListEntry': [ 0x00, ['_LIST_ENTRY']],
'SourceLength': [ 0x10, ['unsigned short']], # AddAlias()
'TargetLength': [ 0x12, ['unsigned short']], # AddAlias()
'Source': [ 0x18, ['pointer', ['String', dict(encoding = 'utf16', length = lambda x : x.SourceLength)]]], # AddAlias()
'Target': [ 0x20, ['pointer', ['String', dict(encoding = 'utf16', length = lambda x : x.TargetLength)]]], # AddAlias()
}],
'_EXE_ALIAS_LIST' : [ None, {
'ListEntry': [ 0x00, ['_LIST_ENTRY']],
'ExeLength': [ 0x10, ['unsigned short']], # AddExeAliasList()
'ExeName': [ 0x18, ['pointer', ['String', dict(encoding = 'utf16', length = lambda x : x.ExeLength * 2)]]], # AddExeAliasList()
'AliasList': [ 0x20, ['_LIST_ENTRY']], # AddExeAliasList()
}],
'_POPUP_LIST' : [ None, {
'ListEntry' : [ 0x00, ['_LIST_ENTRY']],
}],
'_CONSOLE_INFORMATION': [ None, {
'ProcessList': [ 0x28, ['_LIST_ENTRY']], # SrvGetConsoleProcessList()
'CurrentScreenBuffer': [ 0xE0, ['pointer', ['_SCREEN_INFORMATION']]], # AllocateConsole()
'ScreenBuffer': [ 0xE8, ['pointer', ['_SCREEN_INFORMATION']]], # AllocateConsole()
'HistoryList': [ 0x148, ['_LIST_ENTRY']], # AllocateCommandHistory()
'ExeAliasList': [ 0x158, ['_LIST_ENTRY']], # SrvGetConsoleAliasExes()
'HistoryBufferCount': [ 0x168, ['unsigned short']], # AllocateConsole()
'HistoryBufferMax': [ 0x16A, ['unsigned short']], # AllocateConsole()
'CommandHistorySize': [ 0x16C, ['unsigned short']], # AllocateConsole()
'OriginalTitle': [ 0x170, ['pointer', ['String', dict(encoding = 'utf16', length = 256)]]], # SrvGetConsoleTitle()
'Title': [ 0x178, ['pointer', ['String', dict(encoding = 'utf16', length = 256)]]], # SrvGetConsoleTitle()
}],
'_CONSOLE_PROCESS': [ None, {
'ListEntry': [ 0x00, ['_LIST_ENTRY']],
'ProcessHandle': [ 0x10, ['unsigned int']], # FindProcessInList()
}],
'_SCREEN_INFORMATION': [ None, {
'ScreenX': [ 8, ['short']],
'ScreenY': [ 10, ['short']],
'Rows': [ 0x48, ['pointer', ['array', lambda x : x.ScreenY, ['_ROW']]]],
'Next': [ 0x128, ['pointer', ['_SCREEN_INFORMATION']]],
}],
'_ROW': [ 0x28, {
'Chars': [ 0x08, ['pointer', ['String', dict(encoding = 'utf16', length = 256)]]],
}],
}
# Windows XP, 2003, 2008, Vista from winsrv.dll
winsrv_types_x86 = {
'_COMMAND': [ None, {
'CmdLength': [ 0x00, ['unsigned short']],
'Cmd' : [ 0x02, ['String', dict(encoding = 'utf16', length = lambda x : x.CmdLength)]],
}],
'_COMMAND_HISTORY': [ None, {
'Flags' : [ 0x00, ['Flags', {'bitmap': {'Allocated': 0, 'Reset': 1}}]],
'ListEntry': [ 0x04, ['_LIST_ENTRY']],
'Application': [ 0x0C, ['pointer', ['String', dict(encoding = 'utf16', length = 256)]]],
'CommandCount': [ 0x10, ['short']],
'LastAdded': [ 0x12, ['short']],
'LastDisplayed': [ 0x14, ['short']],
'FirstCommand': [ 0x16, ['short']],
'CommandCountMax': [ 0x18, ['short']],
'ProcessHandle': [ 0x1C, ['unsigned int']],
'PopupList': [ 0x20, ['_LIST_ENTRY']],
'CommandBucket': [ 0x28, ['array', lambda x : x.CommandCount, ['pointer', ['_COMMAND']]]],
}],
'_ALIAS': [ None, {
'ListEntry': [ 0x00, ['_LIST_ENTRY']],
'SourceLength': [ 0x08, ['unsigned short']],
'TargetLength': [ 0x0A, ['unsigned short']],
'Source': [ 0x0C, ['pointer', ['String', dict(encoding = 'utf16', length = lambda x : x.SourceLength)]]],
'Target': [ 0x10, ['pointer', ['String', dict(encoding = 'utf16', length = lambda x : x.TargetLength)]]],
}],
'_EXE_ALIAS_LIST' : [ None, {
'ListEntry': [ 0x00, ['_LIST_ENTRY']],
'ExeLength': [ 0x08, ['unsigned short']],
'ExeName': [ 0x0C, ['pointer', ['String', dict(encoding = 'utf16', length = lambda x : x.ExeLength * 2)]]],
'AliasList': [ 0x10, ['_LIST_ENTRY']],
}],
'_POPUP_LIST' : [ None, {
'ListEntry' : [ 0x00, ['_LIST_ENTRY']],
}],
'_CONSOLE_INFORMATION': [ None, {
'CurrentScreenBuffer': [ 0xB0, ['pointer', ['_SCREEN_INFORMATION']]],
'ScreenBuffer': [ 0xB4, ['pointer', ['_SCREEN_INFORMATION']]],
'HistoryList': [ 0x108, ['_LIST_ENTRY']],
'ProcessList': [ 0x100, ['_LIST_ENTRY']],
'ExeAliasList': [ 0x110, ['_LIST_ENTRY']],
'HistoryBufferCount': [ 0x118, ['unsigned short']],
'HistoryBufferMax': [ 0x11A, ['unsigned short']],
'CommandHistorySize': [ 0x11C, ['unsigned short']],
'OriginalTitle': [ 0x124, ['pointer', ['String', dict(encoding = 'utf16', length = 256)]]],
'Title': [ 0x128, ['pointer', ['String', dict(encoding = 'utf16', length = 256)]]],
}],
'_CONSOLE_PROCESS': [ None, {
'ListEntry': [ 0x00, ['_LIST_ENTRY']],
'ProcessHandle': [ 0x08, ['unsigned int']],
'Process': [ 0x0C, ['pointer', ['_CSR_PROCESS']]],
}],
'_SCREEN_INFORMATION': [ None, {
'Console': [ 0x00, ['pointer', ['_CONSOLE_INFORMATION']]],
'ScreenX': [ 0x24, ['short']],
'ScreenY': [ 0x26, ['short']],
'Rows': [ 0x58, ['pointer', ['array', lambda x : x.ScreenY, ['_ROW']]]],
'Next': [ 0xF8, ['pointer', ['_SCREEN_INFORMATION']]],
}],
'_ROW': [ 0x1C, {
'Chars': [ 0x08, ['pointer', ['String', dict(encoding = 'utf16', length = 256)]]],
}],
'_CSR_PROCESS' : [ 0x60, { # this is a public PDB
'ClientId' : [ 0x0, ['_CLIENT_ID']],
'ListLink' : [ 0x8, ['_LIST_ENTRY']],
'ThreadList' : [ 0x10, ['_LIST_ENTRY']],
'NtSession' : [ 0x18, ['pointer', ['_CSR_NT_SESSION']]],
'ClientPort' : [ 0x1c, ['pointer', ['void']]],
'ClientViewBase' : [ 0x20, ['pointer', ['unsigned char']]],
'ClientViewBounds' : [ 0x24, ['pointer', ['unsigned char']]],
'ProcessHandle' : [ 0x28, ['pointer', ['void']]],
'SequenceNumber' : [ 0x2c, ['unsigned long']],
'Flags' : [ 0x30, ['unsigned long']],
'DebugFlags' : [ 0x34, ['unsigned long']],
'ReferenceCount' : [ 0x38, ['unsigned long']],
'ProcessGroupId' : [ 0x3c, ['unsigned long']],
'ProcessGroupSequence' : [ 0x40, ['unsigned long']],
'LastMessageSequence' : [ 0x44, ['unsigned long']],
'NumOutstandingMessages' : [ 0x48, ['unsigned long']],
'ShutdownLevel' : [ 0x4c, ['unsigned long']],
'ShutdownFlags' : [ 0x50, ['unsigned long']],
'Luid' : [ 0x54, ['_LUID']],
'ServerDllPerProcessData' : [ 0x5c, ['array', 1, ['pointer', ['void']]]],
}],
}
winsrv_types_x64 = {
'_COMMAND': [ None, {
'CmdLength': [ 0x00, ['unsigned short']],
'Cmd' : [ 0x02, ['String', dict(encoding = 'utf16', length = lambda x : x.CmdLength)]],
}],
'_COMMAND_HISTORY': [ None, {
'Flags' : [ 0x00, ['Flags', {'bitmap': {'Allocated': 0, 'Reset': 1}}]],
'ListEntry': [ 0x08, ['_LIST_ENTRY']],
'Application': [ 0x18, ['pointer', ['String', dict(encoding = 'utf16', length = 256)]]],
'CommandCount': [ 0x20, ['short']],
'LastAdded': [ 0x22, ['short']],
'LastDisplayed': [ 0x24, ['short']],
'FirstCommand': [ 0x26, ['short']],
'CommandCountMax': [ 0x28, ['short']],
'ProcessHandle': [ 0x30, ['unsigned int']],
'PopupList': [ 0x38, ['_LIST_ENTRY']],
'CommandBucket': [ 0x48, ['array', lambda x : x.CommandCount, ['pointer', ['_COMMAND']]]],
}],
'_ALIAS': [ None, {
'ListEntry': [ 0x00, ['_LIST_ENTRY']],
'SourceLength': [ 0x10, ['unsigned short']],
'TargetLength': [ 0x12, ['unsigned short']],
'Source': [ 0x14, ['pointer', ['String', dict(encoding = 'utf16', length = lambda x : x.SourceLength)]]],
'Target': [ 0x1C, ['pointer', ['String', dict(encoding = 'utf16', length = lambda x : x.TargetLength)]]],
}],
'_EXE_ALIAS_LIST' : [ None, {
'ListEntry': [ 0x00, ['_LIST_ENTRY']],
'ExeLength': [ 0x10, ['unsigned short']],
'ExeName': [ 0x12, ['pointer', ['String', dict(encoding = 'utf16', length = lambda x : x.ExeLength * 2)]]],
'AliasList': [ 0x1A, ['_LIST_ENTRY']],
}],
'_POPUP_LIST' : [ None, {
'ListEntry' : [ 0x00, ['_LIST_ENTRY']],
}],
'_CONSOLE_INFORMATION': [ None, {
'CurrentScreenBuffer': [ 0xE8, ['pointer', ['_SCREEN_INFORMATION']]],
'ScreenBuffer': [ 0xF0, ['pointer', ['_SCREEN_INFORMATION']]],
'HistoryList': [ 0x188, ['_LIST_ENTRY']],
'ProcessList': [ 0x178, ['_LIST_ENTRY']],
'ExeAliasList': [ 0x198, ['_LIST_ENTRY']],
'HistoryBufferCount': [ 0x1A8, ['unsigned short']],
'HistoryBufferMax': [ 0x1AA, ['unsigned short']],
'CommandHistorySize': [ 0x1AC, ['unsigned short']],
'OriginalTitle': [ 0x1B0, ['pointer', ['String', dict(encoding = 'utf16', length = 256)]]],
'Title': [ 0x1B8, ['pointer', ['String', dict(encoding = 'utf16', length = 256)]]],
}],
'_CONSOLE_PROCESS': [ None, {
'ListEntry': [ 0x00, ['_LIST_ENTRY']],
'ProcessHandle': [ 0x10, ['unsigned int']],
'Process': [ 0x18, ['pointer', ['_CSR_PROCESS']]],
}],
'_SCREEN_INFORMATION': [ None, {
'Console': [ 0x00, ['pointer', ['_CONSOLE_INFORMATION']]],
'ScreenX': [ 0x28, ['short']],
'ScreenY': [ 0x2A, ['short']],
'Rows': [ 0x68, ['pointer', ['array', lambda x : x.ScreenY, ['_ROW']]]],
'Next': [ 0x128, ['pointer', ['_SCREEN_INFORMATION']]],
}],
'_ROW': [ 0x28, {
'Chars': [ 0x08, ['pointer', ['String', dict(encoding = 'utf16', length = 256)]]],
}],
'_CSR_PROCESS' : [ 0x60, { # this is a public PDB
'ClientId' : [ 0x0, ['_CLIENT_ID']],
'ListLink' : [ 0x8, ['_LIST_ENTRY']],
'ThreadList' : [ 0x10, ['_LIST_ENTRY']],
'NtSession' : [ 0x18, ['pointer', ['_CSR_NT_SESSION']]],
'ClientPort' : [ 0x1c, ['pointer', ['void']]],
'ClientViewBase' : [ 0x20, ['pointer', ['unsigned char']]],
'ClientViewBounds' : [ 0x24, ['pointer', ['unsigned char']]],
'ProcessHandle' : [ 0x28, ['pointer', ['void']]],
'SequenceNumber' : [ 0x2c, ['unsigned long']],
'Flags' : [ 0x30, ['unsigned long']],
'DebugFlags' : [ 0x34, ['unsigned long']],
'ReferenceCount' : [ 0x38, ['unsigned long']],
'ProcessGroupId' : [ 0x3c, ['unsigned long']],
'ProcessGroupSequence' : [ 0x40, ['unsigned long']],
'LastMessageSequence' : [ 0x44, ['unsigned long']],
'NumOutstandingMessages' : [ 0x48, ['unsigned long']],
'ShutdownLevel' : [ 0x4c, ['unsigned long']],
'ShutdownFlags' : [ 0x50, ['unsigned long']],
'Luid' : [ 0x54, ['_LUID']],
'ServerDllPerProcessData' : [ 0x5c, ['array', 1, ['pointer', ['void']]]],
}],
}
#--------------------------------------------------------------------------------
# Object Classes
#--------------------------------------------------------------------------------
class _CONSOLE_INFORMATION(obj.CType):
""" object class for console information structs """
def get_histories(self):
for hist in self.HistoryList.list_of_type("_COMMAND_HISTORY", "ListEntry"):
yield hist
def get_exe_aliases(self):
"""Generator for exe aliases.
There is one _EXE_ALIAS_LIST for each executable
(i.e. C:\windows\system32\cmd.exe) with registered
aliases. The _EXE_ALIAS_LIST.AliasList contains
one _ALIAS structure for each specific mapping.
See GetConsoleAliasExes, GetConsoleAliases, and
AddConsoleAlias.
"""
for exe_alias in self.ExeAliasList.list_of_type("_EXE_ALIAS_LIST", "ListEntry"):
yield exe_alias
def get_processes(self):
"""Generator for processes attached to the console.
Multiple processes can be attached to the same
console (usually as a result of inheritance from a
parent process or by duplicating another process's
console handle). Internally, they are tracked as
_CONSOLE_PROCESS structures in this linked list.
See GetConsoleProcessList and AttachConsole.
"""
for h in self.ProcessList.list_of_type("_CONSOLE_PROCESS", "ListEntry"):
yield h
def get_screens(self):
"""Generator for screens in the console.
A console can have multiple screen buffers at a time,
but only the current/active one is displayed.
Multiple screens are tracked using the singly-linked
list _SCREEN_INFORMATION.Next.
See CreateConsoleScreenBuffer
"""
screens = [self.CurrentScreenBuffer]
if self.ScreenBuffer not in screens:
screens.append(self.ScreenBuffer)
for screen in screens:
cur = screen
while cur and cur.v() != 0:
yield cur
cur = cur.Next.dereference()
class _CONSOLE_PROCESS(obj.CType):
""" object class for console process """
def reference_object_by_handle(self):
""" Given a process handle, return a reference to
the _EPROCESS object. This function is similar to
the kernel API ObReferenceObjectByHandle. """
console_information = self.obj_parent
parent_process = console_information.obj_parent
for h in parent_process.ObjectTable.handles():
if h.HandleValue == self.ProcessHandle:
return h.dereference_as("_EPROCESS")
return obj.NoneObject("Could not find process in handle table")
class _SCREEN_INFORMATION(obj.CType):
""" object class for screen information """
def get_buffer(self, truncate = True):
"""Get the screen buffer.
The screen buffer is comprised of the screen's Y
coordinate which tells us the number of rows and
the X coordinate which tells us the width of each
row in characters. These together provide all of
the input and output that users see when the
console is displayed.
@param truncate: True if the empty rows at the
end (i.e. bottom) of the screen buffer should be
supressed.
"""
rows = []
for _, row in enumerate(self.Rows.dereference()):
if row.Chars.is_valid():
rows.append(str(row.Chars.dereference())[0:self.ScreenX])
# To truncate empty rows at the end, walk the list
# backwards and get the last non-empty row. Use that
# row index to splice. An "empty" row isn't just ""
# as one might assume. It is actually ScreenX number
# of space characters
if truncate:
non_empty_index = 0
for index, row in enumerate(reversed(rows)):
## It seems that when the buffer width is greater than 128
## characters, its truncated to 128 in memory.
if row.count(" ") != min(self.ScreenX, 128):
non_empty_index = index
break
if non_empty_index == 0:
rows = []
else:
rows = rows[0:len(rows) - non_empty_index]
return rows
class _EXE_ALIAS_LIST(obj.CType):
""" object class for alias lists """
def get_aliases(self):
"""Generator for the individual aliases for a
particular executable."""
for alias in self.AliasList.list_of_type("_ALIAS", "ListEntry"):
yield alias
class _COMMAND_HISTORY(obj.CType):
""" object class for command histories """
def is_valid(self, max_history = MAX_HISTORY_DEFAULT): #pylint: disable-msg=W0221
"""Override BaseObject.is_valid with some additional
checks specific to _COMMAND_HISTORY objects."""
if not obj.CType.is_valid(self):
return False
# The count must be between zero and max
if self.CommandCount < 0 or self.CommandCount > max_history:
return False
# Last added must be between -1 and max
if self.LastAdded < -1 or self.LastAdded > max_history:
return False
# Last displayed must be between -1 and max
if self.LastDisplayed < -1 or self.LastDisplayed > max_history:
return False
# First command must be between zero and max
if self.FirstCommand < 0 or self.FirstCommand > max_history:
return False
# Validate first command with last added
if self.FirstCommand != 0 and self.FirstCommand != self.LastAdded + 1:
return False
# Process handle must be a valid pid
if self.ProcessHandle <= 0 or self.ProcessHandle > 0xFFFF:
return False
Popup = obj.Object("_POPUP_LIST", offset = self.PopupList.Flink,
vm = self.obj_vm)
# Check that the popup list entry is in tact
if Popup.ListEntry.Blink != self.PopupList.obj_offset:
return False
return True
def get_commands(self):
"""Generator for commands in the history buffer.
The CommandBucket is an array of pointers to _COMMAND
structures. The array size is CommandCount. Once CommandCount
is reached, the oldest commands are cycled out and the
rest are coalesced.
"""
for i, cmd in enumerate(self.CommandBucket):
if cmd:
yield i, cmd.dereference()
#--------------------------------------------------------------------------------
# Profile Modifications
#--------------------------------------------------------------------------------
class CmdHistoryVTypesx86(obj.ProfileModification):
"""This modification applies the vtypes for 32bit
Windows up to Windows 7."""
before = ['WindowsObjectClasses']
def check(self, profile):
m = profile.metadata
return (m.get('os', None) == 'windows' and
m.get('memory_model', '32bit') == '32bit' and
(m.get('major') < 6 or (m.get('major') == 6 and m.get('minor') < 1)))
def modification(self, profile):
profile.vtypes.update(winsrv_types_x86)
class CmdHistoryVTypesx64(obj.ProfileModification):
"""This modification applies the vtypes for 64bit
Windows up to Windows 7."""
before = ['WindowsObjectClasses']
def check(self, profile):
m = profile.metadata
return (m.get('os', None) == 'windows' and
m.get('memory_model', '32bit') == '64bit' and
(m.get('major') < 6 or (m.get('major') == 6 and m.get('minor') < 1)))
def modification(self, profile):
profile.vtypes.update(winsrv_types_x64)
class CmdHistoryVTypesWin7x86(obj.ProfileModification):
"""This modification applies the vtypes for 32bit
Windows starting with Windows 7."""
before = ['WindowsObjectClasses']
conditions = {'os': lambda x: x == 'windows',
'major': lambda x: x == 6,
'minor': lambda x: x >= 1,
'memory_model': lambda x : x == '32bit'}
def modification(self, profile):
profile.vtypes.update(conhost_types_x86)
class CmdHistoryVTypesWin7x64(obj.ProfileModification):
"""This modification applies the vtypes for 64bit
Windows starting with Windows 7."""
before = ['WindowsObjectClasses']
conditions = {'os': lambda x: x == 'windows',
'major': lambda x: x == 6,
'minor': lambda x: x >= 1,
'memory_model': lambda x : x == '64bit'}
def modification(self, profile):
profile.vtypes.update(conhost_types_x64)
class CmdHistoryObjectClasses(obj.ProfileModification):
"""This modification applies the object classes for all
versions of 32bit Windows."""
before = ['WindowsObjectClasses']
conditions = {'os': lambda x: x == 'windows'}
# 'memory_model': lambda x : x == '32bit'}
def modification(self, profile):
profile.object_classes.update({
'_CONSOLE_INFORMATION': _CONSOLE_INFORMATION,
'_SCREEN_INFORMATION': _SCREEN_INFORMATION,
'_EXE_ALIAS_LIST': _EXE_ALIAS_LIST,
'_COMMAND_HISTORY': _COMMAND_HISTORY,
'_CONSOLE_PROCESS': _CONSOLE_PROCESS,
})
#--------------------------------------------------------------------------------
# CmdScan Plugin
#--------------------------------------------------------------------------------
class CmdScan(common.AbstractWindowsCommand):
"""Extract command history by scanning for _COMMAND_HISTORY"""
def __init__(self, config, *args, **kwargs):
common.AbstractWindowsCommand.__init__(self, config, *args, **kwargs)
# The default comes from HKCU\Console\HistoryBufferSize
config.add_option('MAX_HISTORY', short_option = 'M', default = MAX_HISTORY_DEFAULT,
action = 'store', type = 'int',
help = 'CommandCountMax (default = 50)')
def cmdhistory_process_filter(self, addr_space):
"""Generator for processes that might contain command
history information.
Takes into account if we're on Windows 7 or an earlier
operator system.
@param addr_space: a kernel address space.
"""
# Detect if we're on windows seven
use_conhost = (6, 1) <= (addr_space.profile.metadata.get('major', 0),
addr_space.profile.metadata.get('minor', 0))
for task in tasks.pslist(addr_space):
process_name = str(task.ImageFileName).lower()
# The process we select is conhost on Win7 or csrss for others
if ((use_conhost and process_name == "conhost.exe") or
(not use_conhost and process_name == "csrss.exe")):
yield task
def calculate(self):
"""The default pattern we search for, as described by Stevens and Casey,
is "\x32\x00". That's because CommandCountMax is a little-endian
unsigned short whose default value is 50. However, that value can be
changed by right clicking cmd.exe and going to Properties->Options->Cmd History
or by calling the API function kernel32!SetConsoleHistoryInfo. Thus
you can tweak the search criteria by using the --MAX_HISTORY.
"""
addr_space = utils.load_as(self._config)
MAX_HISTORY = self._config.MAX_HISTORY
srch_pattern = chr(MAX_HISTORY) + "\x00"
for task in self.cmdhistory_process_filter(addr_space):
process_space = task.get_process_address_space()
for found in task.search_process_memory([srch_pattern]):
hist = obj.Object("_COMMAND_HISTORY",
vm = process_space,
offset = found - addr_space.profile.\
get_obj_offset("_COMMAND_HISTORY", "CommandCountMax"))
if hist.is_valid(max_history = MAX_HISTORY):
yield task, hist
def render_text(self, outfd, data):
for task, hist in data:
outfd.write("*" * 50 + "\n")
outfd.write("CommandProcess: {0} Pid: {1}\n".format(
task.ImageFileName, task.UniqueProcessId))
outfd.write("CommandHistory: {0:#x} Application: {1} Flags: {2}\n".format(
hist.obj_offset, hist.Application.dereference(),
hist.Flags))
outfd.write("CommandCount: {0} LastAdded: {1} LastDisplayed: {2}\n".format(
hist.CommandCount, hist.LastAdded, hist.LastDisplayed))
outfd.write("FirstCommand: {0} CommandCountMax: {1}\n".format(
hist.FirstCommand, hist.CommandCountMax))
outfd.write("ProcessHandle: {0:#x}\n".format(hist.ProcessHandle))
# If the _COMMAND_HISTORY is in use, we would only take
# hist.CommandCount but since we're brute forcing, try the
# maximum and hope that some slots were not overwritten
# or zero-ed out.
pointers = obj.Object("Array", targetType = "address",
count = hist.CommandCountMax,
offset = hist.obj_offset +
hist.obj_vm.profile.get_obj_offset("_COMMAND_HISTORY", "CommandBucket"),
vm = hist.obj_vm)
for i, p in enumerate(pointers):
cmd = p.dereference_as("_COMMAND")
if cmd and str(cmd.Cmd):
outfd.write("Cmd #{0} @ {1:#x}: {2}\n".format(
i, cmd.obj_offset, str(cmd.Cmd)))
#--------------------------------------------------------------------------------
# Consoles Plugin
#--------------------------------------------------------------------------------
class Consoles(CmdScan):
"""Extract command history by scanning for _CONSOLE_INFORMATION"""
def __init__(self, config, *args, **kwargs):
CmdScan.__init__(self, config, *args, **kwargs)
# The default comes from HKCU\Console\NumberOfHistoryBuffers
config.add_option('HISTORY_BUFFERS', short_option = 'B', default = 4,
action = 'store', type = 'int',
help = 'HistoryBufferMax (default = 4)')
def calculate(self):
addr_space = utils.load_as(self._config)
srch_pattern = chr(self._config.MAX_HISTORY) + "\x00"
for task in self.cmdhistory_process_filter(addr_space):
for found in task.search_process_memory([srch_pattern]):
console = obj.Object("_CONSOLE_INFORMATION",
offset = found -
addr_space.profile.get_obj_offset("_CONSOLE_INFORMATION", "CommandHistorySize"),
vm = task.get_process_address_space(),
parent = task)
if (console.HistoryBufferMax != self._config.HISTORY_BUFFERS or
console.HistoryBufferCount > self._config.HISTORY_BUFFERS):
continue
# Check the first command history as the final constraint
history = obj.Object("_COMMAND_HISTORY",
offset = console.HistoryList.Flink.dereference().obj_offset -
addr_space.profile.get_obj_offset("_COMMAND_HISTORY", "ListEntry"),
vm = task.get_process_address_space())
if history.CommandCountMax != self._config.MAX_HISTORY:
continue
yield task, console
def render_text(self, outfd, data):
for task, console in data:
outfd.write("*" * 50 + "\n")
outfd.write("ConsoleProcess: {0} Pid: {1}\n".format(
task.ImageFileName, task.UniqueProcessId))
outfd.write("Console: {0:#x} CommandHistorySize: {1}\n".format(
console.obj_offset, console.CommandHistorySize))
outfd.write("HistoryBufferCount: {0} HistoryBufferMax: {1}\n".format(
console.HistoryBufferCount, console.HistoryBufferMax))
outfd.write("OriginalTitle: {0}\n".format(console.OriginalTitle.dereference()))
outfd.write("Title: {0}\n".format(console.Title.dereference()))
for console_proc in console.get_processes():
process = console_proc.reference_object_by_handle()
if process:
outfd.write("AttachedProcess: {0} Pid: {1} Handle: {2:#x}\n".format(
process.ImageFileName, process.UniqueProcessId,
console_proc.ProcessHandle))
for hist in console.get_histories():
outfd.write("----\n")
outfd.write("CommandHistory: {0:#x} Application: {1} Flags: {2}\n".format(
hist.obj_offset, hist.Application.dereference(),
hist.Flags))
outfd.write("CommandCount: {0} LastAdded: {1} LastDisplayed: {2}\n".format(
hist.CommandCount, hist.LastAdded, hist.LastDisplayed))
outfd.write("FirstCommand: {0} CommandCountMax: {1}\n".format(
hist.FirstCommand, hist.CommandCountMax))
outfd.write("ProcessHandle: {0:#x}\n".format(hist.ProcessHandle))
for i, cmd in hist.get_commands():
if cmd.Cmd:
outfd.write("Cmd #{0} at {1:#x}: {2}\n".format(
i, cmd.obj_offset, str(cmd.Cmd)))
for exe_alias in console.get_exe_aliases():
for alias in exe_alias.get_aliases():
outfd.write("----\n")
outfd.write("Alias: {0} Source: {1} Target: {2}\n".format(
exe_alias.ExeName.dereference(), alias.Source.dereference(),
alias.Target.dereference()))
for screen in console.get_screens():
outfd.write("----\n")
outfd.write("Screen {0:#x} X:{1} Y:{2}\n".format(
screen.dereference(), screen.ScreenX, screen.ScreenY))
outfd.write("Dump:\n{0}\n".format('\n'.join(screen.get_buffer())))
|
gpl-2.0
|
superdesk/superdesk-content-api
|
content_api/packages/service.py
|
1
|
2020
|
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import logging
from content_api.items.service import ItemsService
logger = logging.getLogger(__name__)
class PackagesService(ItemsService):
"""
A service that knows how to perform CRUD operations on the `package`
content types.
Serves mainly as a proxy to the data layer.
"""
def on_fetched_item(self, document):
"""Event handler when a single package is retrieved from database.
It sets the `uri` field for all associated (referenced) objects.
:param dict document: fetched MongoDB document representing the package
"""
self._process_referenced_objects(document)
super().on_fetched_item(document)
def on_fetched(self, result):
"""Event handler when a collection of packages is retrieved from
database.
For each package in the fetched collection it sets the `uri` field for
all objects associated with (referenced by) the package.
:param dict result: dictionary contaning the list of MongoDB documents
(the fetched packages) and some metadata, e.g. pagination info
"""
for document in result['_items']:
self._process_referenced_objects(document)
super().on_fetched(result)
def _process_referenced_objects(self, document):
"""Do some processing on the objects referenced by `document`.
For all referenced objects their `uri` field is generated and their
`_id` field removed.
:param dict document: MongoDB document representing a package object
"""
for item in document.get('associations', {}).values():
item['uri'] = self._get_uri(item)
item.pop('_id', None)
|
agpl-3.0
|
PiRSquared17/damnvid
|
dModules.py
|
12
|
5307
|
# -*- coding: utf-8 -*-
from dCore import *
from dLog import *
from dTubes import *
import time
import tarfile
import shutil
class DamnVideoModule:
def __init__(self, uri):
self.name = 'generic'
self.uri = uri
self.link = None
self.id = None
self.valid = None
self.title = None
self.ticket = None
self.ticketdate = 0
self.regex = {
'title':DV.generic_title_extract
}
def isUp(self):
return True
def validURI(self):
return not not self.valid
def getLink(self):
return DamnUnicode(self.link)
def getURI(self):
return DamnUnicode(self.uri)
def getID(self):
return self.id
def getStorage(self):
return DV.modulesstorage[self.name]
def getTitle(self):
if self.title is None:
total = DamnURLGetAll(self.link, onerror='')
search = self.regex['title']
if type(self.regex['title']) not in (type(()),type([])):
search = (self.regex['title'],)
for i in search:
res = i.search(total)
if res:
self.title = DamnHtmlEntities(res.group(1))
break
if self.title is not None:
return DamnUnicode(self.title)
return DV.l('Unknown title')
def getIcon(self):
return self.name
def pref(self, pref, value=None):
if value is None:
return DV.prefs.getm(self.name, pref)
return DV.prefs.setm(self.name, pref, value)
def newTicket(self, ticket):
self.ticket = ticket
self.ticketdate = time.time()
def getProfile(self):
return self.pref('profile')
def getOutdir(self):
return self.pref('outdir')
def renewTicket(self):
if self.ticket is None:
self.newTicket(self.uri)
def getDownload(self):
self.renewTicket()
return self.ticket
def getFFmpegArgs(self):
return []
def getDownloadGetter(self):
return self.getDownload
def addVid(self, parent):
parent.addValid(self.getVidObject())
def getVidObject(self):
obj = {'name':DamnUnicode(self.getTitle()), 'profile':self.getProfile(), 'profilemodified':False, 'fromfile':DamnUnicode(self.getTitle()), 'dirname':DamnUnicode(self.getLink()), 'uri':DamnUnicode(self.getID()), 'status':DV.l('Pending.'), 'icon':self.getIcon(), 'module':self, 'downloadgetter':self.getDownloadGetter()}
Damnlog('Module', self.name, 'returning video object:', obj)
return obj
def DamnInstallModule(module):
Damnlog('Attempting to install module', module)
if not os.path.exists(module):
return 'nofile'
if not tarfile.is_tarfile(module):
return 'nomodule'
mod = tarfile.open(module, 'r')
files = mod.getnames()
if not len(files):
return 'nomodule'
if files[0].find('/') in (-1, 0):
return 'nomodule'
prefix = files[0][0:files[0].find('/') + 1]
for i in files:
if i.find('/') in (-1, 0):
return 'nomodule'
if i[0:i.find('/') + 1] != prefix:
return 'nomodule'
if os.path.exists(DV.modules_path + prefix):
if os.path.isdir(DV.modules_path + prefix):
shutil.rmtree(DV.modules_path + prefix)
else:
os.remove(DV.modules_path + prefix)
mod.extractall(DV.modules_path)
try:
DV.prefs.rems('damnvid-module-' + prefix[0:-1]) # Reset module preferences when installing it.
DV.prefs.save()
except:
Damnlog('Resetting module preferences for module', module, '(probably not installed or left default before)')
DamnLoadModule(DV.modules_path + prefix[0:-1])
Damnlog('Success installing module', module)
return 'success'
def DamnIterModules(keys=True): # Lawl, this spells "DamnIt"
mods = DV.modules.keys()
mods.sort()
if keys:
return mods
ret = []
for i in mods:
ret.append(DV.modules[i])
return ret
def DamnRegisterModule(module):
Damnlog('Attempting to register module', module['name'])
if module.has_key('minversion'):
if DamnVersionCompare(module['minversion'], DV.version)==1:
return 'minversion'
DV.modules[module['name']] = module
DV.modulesstorage[module['name']] = {}
if module.has_key('register'):
module['class'].register = {}
if module['register'].has_key('listicons'):
module['class'].register['listicons'] = {}
for icon in module['register']['listicons'].iterkeys():
DV.listicons.add(DV.modules_path + module['name'] + DV.sep + module['register']['listicons'][icon], icon)
if module.has_key('preferences'):
for pref in module['preferences'].iterkeys():
DV.preferences['damnvid-module-' + module['name'] + ':' + pref] = module['preferences'][pref]
DV.defaultprefs['damnvid-module-' + module['name'] + ':' + pref] = module['preferences'][pref]['default']
if module['preferences'][pref]['kind'] == 'dir':
DV.path_prefs.append('damnvid-module-' + module['name'] + ':' + pref)
if module.has_key('preferences_order'):
DV.preference_order['damnvid-module-' + module['name']] = module['preferences_order']
else:
DV.preference_order['damnvid-module-' + module['name']] = module['preferences'].keys()
Damnlog('Module registered:', module['name'])
def DamnGetAlternateModule(uri):
Damnlog('Got request to get new module for URI:', uri)
urlgrabber = DamnVideoLoader(None, [uri], feedback=False, allownonmodules=False)
urlgrabber.start()
time.sleep(.1)
while not urlgrabber.done:
time.sleep(.05)
res = urlgrabber.result
urlgrabber.done = False
if res is None:
Damnlog('No module found for URI:',uri,'; DamnGetAlternateModule returning None.')
return None
Damnlog('Module found for URI:',uri,'; returning', res['module'])
return res['module']
from dIsolatedModule import *
|
gpl-3.0
|
schleichdi2/OPENNFR-6.1-CORE
|
bitbake/lib/toaster/tests/browser/test_new_project_page.py
|
4
|
4101
|
#! /usr/bin/env python
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
# BitBake Toaster Implementation
#
# Copyright (C) 2013-2016 Intel Corporation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from django.core.urlresolvers import reverse
from tests.browser.selenium_helpers import SeleniumTestCase
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import InvalidElementStateException
from orm.models import Project, Release, BitbakeVersion
class TestNewProjectPage(SeleniumTestCase):
""" Test project data at /project/X/ is displayed correctly """
def setUp(self):
bitbake, c = BitbakeVersion.objects.get_or_create(
name="master",
giturl="git://master",
branch="master",
dirpath="master")
release, c = Release.objects.get_or_create(name="msater",
description="master"
"release",
branch_name="master",
helptext="latest",
bitbake_version=bitbake)
self.release, c = Release.objects.get_or_create(
name="msater2",
description="master2"
"release2",
branch_name="master2",
helptext="latest2",
bitbake_version=bitbake)
def test_create_new_project(self):
""" Test creating a project """
project_name = "masterproject"
url = reverse('newproject')
self.get(url)
self.enter_text('#new-project-name', project_name)
select = Select(self.find('#projectversion'))
select.select_by_value(str(self.release.pk))
self.click("#create-project-button")
# We should get redirected to the new project's page with the
# notification at the top
element = self.wait_until_visible('#project-created-notification')
self.assertTrue(project_name in element.text,
"New project name not in new project notification")
self.assertTrue(Project.objects.filter(name=project_name).count(),
"New project not found in database")
def test_new_duplicates_project_name(self):
"""
Should not be able to create a new project whose name is the same
as an existing project
"""
project_name = "dupproject"
Project.objects.create_project(name=project_name,
release=self.release)
url = reverse('newproject')
self.get(url)
self.enter_text('#new-project-name', project_name)
select = Select(self.find('#projectversion'))
select.select_by_value(str(self.release.pk))
element = self.wait_until_visible('#hint-error-project-name')
self.assertTrue(("Project names must be unique" in element.text),
"Did not find unique project name error message")
# Try and click it anyway, if it submits we'll have a new project in
# the db and assert then
try:
self.click("#create-project-button")
except InvalidElementStateException:
pass
self.assertTrue(
(Project.objects.filter(name=project_name).count() == 1),
"New project not found in database")
|
gpl-2.0
|
opavader/ansible
|
lib/ansible/plugins/lookup/ini.py
|
82
|
3304
|
# (c) 2015, Yannig Perre <yannig.perre(at)gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import StringIO
import os
import codecs
import ConfigParser
import re
from ansible.errors import *
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def read_properties(self, filename, key, dflt, is_regexp):
config = StringIO.StringIO()
config.write('[java_properties]\n' + open(filename).read())
config.seek(0, os.SEEK_SET)
self.cp.readfp(config)
return self.get_value(key, 'java_properties', dflt, is_regexp)
def read_ini(self, filename, key, section, dflt, is_regexp):
self.cp.readfp(open(filename))
return self.get_value(key, section, dflt, is_regexp)
def get_value(self, key, section, dflt, is_regexp):
# Retrieve all values from a section using a regexp
if is_regexp:
return [v for k, v in self.cp.items(section) if re.match(key, k)]
value = None
# Retrieve a single value
try:
value = self.cp.get(section, key)
except ConfigParser.NoOptionError as e:
return dflt
return value
def run(self, terms, variables=None, **kwargs):
basedir = self.get_basedir(variables)
self.basedir = basedir
self.cp = ConfigParser.ConfigParser()
ret = []
for term in terms:
params = term.split()
key = params[0]
paramvals = {
'file' : 'ansible.ini',
're' : False,
'default' : None,
'section' : "global",
'type' : "ini",
}
# parameters specified?
try:
for param in params[1:]:
name, value = param.split('=')
assert(name in paramvals)
paramvals[name] = value
except (ValueError, AssertionError) as e:
raise errors.AnsibleError(e)
path = self._loader.path_dwim_relative(basedir, 'files', paramvals['file'])
if paramvals['type'] == "properties":
var = self.read_properties(path, key, paramvals['default'], paramvals['re'])
else:
var = self.read_ini(path, key, paramvals['section'], paramvals['default'], paramvals['re'])
if var is not None:
if type(var) is list:
for v in var:
ret.append(v)
else:
ret.append(var)
return ret
|
gpl-3.0
|
musicpax/funcy
|
tests/test_debug.py
|
2
|
3333
|
import re
import time
from funcy.debug import *
from funcy.flow import silent
def test_tap():
assert capture(tap, 42) == '42\n'
assert capture(tap, 42, label='Life and ...') == 'Life and ...: 42\n'
def test_log_calls():
log = []
@log_calls(log.append)
def f(x, y):
return x + y
f(1, 2)
f('a', 'b')
assert log == [
"Call f(1, 2)",
"-> 3 from f(1, 2)",
"Call f('a', 'b')",
"-> 'ab' from f('a', 'b')",
]
def test_print_calls():
def f(x, y):
return x + y
capture(print_calls(f), 1, 2) == "Call f(1, 2)\n-> 3 from f(1, 2)\n",
capture(print_calls()(f), 1, 2) == "Call f(1, 2)\n-> 3 from f(1, 2)\n",
def test_log_calls_raise():
log = []
@log_calls(log.append, stack=False)
def f():
raise Exception('something bad')
silent(f)()
assert log == [
"Call f()",
"-> Exception: something bad raised in f()",
]
def test_log_errors():
log = []
@log_errors(log.append)
def f(x):
return 1 / x
silent(f)(1)
silent(f)(0)
assert len(log) == 1
assert log[0].startswith('Traceback')
assert re.search(r'ZeroDivisionError: .*\n raised in f\(0\)$', log[0])
def test_log_errors_manager():
log = []
try:
with log_errors(log.append):
1 / 0
except ZeroDivisionError:
pass
try:
with log_errors(log.append, 'name check', stack=False):
hey
except NameError:
pass
assert len(log) == 2
print(log)
assert log[0].startswith('Traceback')
assert re.search(r'ZeroDivisionError: .* zero\s*$', log[0])
assert not log[1].startswith('Traceback')
assert re.search(r"NameError: (global )?name 'hey' is not defined raised in name check", log[1])
def test_print_errors():
def error():
1 / 0
f = print_errors(error)
assert f.__name__ == 'error'
assert 'ZeroDivisionError' in capture(silent(f))
g = print_errors(stack=False)(error)
assert g.__name__ == 'error'
assert capture(silent(g)).startswith('ZeroDivisionError')
def test_print_errors_manager():
@silent
def f():
with print_errors:
1 / 0
assert 'ZeroDivisionError' in capture(f)
assert capture(f).startswith('Traceback')
def test_print_errors_recursion():
@silent
@print_errors(stack=False)
def f(n):
if n:
f(0)
1 / 0
assert 'f(1)' in capture(f, 1)
def test_log_durations():
log = []
@log_durations(log.append)
def f():
time.sleep(0.010)
f()
with log_durations(log.append, 'hello'):
time.sleep(0.010)
for line in log:
m = re.search(r'^\s*(\d+\.\d+) ms in (f\(\)|hello)$', line)
assert m
assert 10 <= float(m.group(1)) < 20
def test_log_iter_dirations():
log = []
for item in log_iter_durations([1, 2], log.append):
pass
assert len(log) == 2
### An utility to capture stdout
import sys
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
def capture(command, *args, **kwargs):
out, sys.stdout = sys.stdout, StringIO()
try:
command(*args, **kwargs)
sys.stdout.seek(0)
return sys.stdout.read()
finally:
sys.stdout = out
|
bsd-3-clause
|
hobbe/notifry-o
|
appengine/web/browser.py
|
103
|
7679
|
"""Browser to test web applications.
(from web.py)
"""
from utils import re_compile
from net import htmlunquote
import httplib, urllib, urllib2
import copy
from StringIO import StringIO
DEBUG = False
__all__ = [
"BrowserError",
"Browser", "AppBrowser",
"AppHandler"
]
class BrowserError(Exception):
pass
class Browser:
def __init__(self):
import cookielib
self.cookiejar = cookielib.CookieJar()
self._cookie_processor = urllib2.HTTPCookieProcessor(self.cookiejar)
self.form = None
self.url = "http://0.0.0.0:8080/"
self.path = "/"
self.status = None
self.data = None
self._response = None
self._forms = None
def reset(self):
"""Clears all cookies and history."""
self.cookiejar.clear()
def build_opener(self):
"""Builds the opener using urllib2.build_opener.
Subclasses can override this function to prodive custom openers.
"""
return urllib2.build_opener()
def do_request(self, req):
if DEBUG:
print 'requesting', req.get_method(), req.get_full_url()
opener = self.build_opener()
opener.add_handler(self._cookie_processor)
try:
self._response = opener.open(req)
except urllib2.HTTPError, e:
self._response = e
self.url = self._response.geturl()
self.path = urllib2.Request(self.url).get_selector()
self.data = self._response.read()
self.status = self._response.code
self._forms = None
self.form = None
return self.get_response()
def open(self, url, data=None, headers={}):
"""Opens the specified url."""
url = urllib.basejoin(self.url, url)
req = urllib2.Request(url, data, headers)
return self.do_request(req)
def show(self):
"""Opens the current page in real web browser."""
f = open('page.html', 'w')
f.write(self.data)
f.close()
import webbrowser, os
url = 'file://' + os.path.abspath('page.html')
webbrowser.open(url)
def get_response(self):
"""Returns a copy of the current response."""
return urllib.addinfourl(StringIO(self.data), self._response.info(), self._response.geturl())
def get_soup(self):
"""Returns beautiful soup of the current document."""
import BeautifulSoup
return BeautifulSoup.BeautifulSoup(self.data)
def get_text(self, e=None):
"""Returns content of e or the current document as plain text."""
e = e or self.get_soup()
return ''.join([htmlunquote(c) for c in e.recursiveChildGenerator() if isinstance(c, unicode)])
def _get_links(self):
soup = self.get_soup()
return [a for a in soup.findAll(name='a')]
def get_links(self, text=None, text_regex=None, url=None, url_regex=None, predicate=None):
"""Returns all links in the document."""
return self._filter_links(self._get_links(),
text=text, text_regex=text_regex, url=url, url_regex=url_regex, predicate=predicate)
def follow_link(self, link=None, text=None, text_regex=None, url=None, url_regex=None, predicate=None):
if link is None:
links = self._filter_links(self.get_links(),
text=text, text_regex=text_regex, url=url, url_regex=url_regex, predicate=predicate)
link = links and links[0]
if link:
return self.open(link['href'])
else:
raise BrowserError("No link found")
def find_link(self, text=None, text_regex=None, url=None, url_regex=None, predicate=None):
links = self._filter_links(self.get_links(),
text=text, text_regex=text_regex, url=url, url_regex=url_regex, predicate=predicate)
return links and links[0] or None
def _filter_links(self, links,
text=None, text_regex=None,
url=None, url_regex=None,
predicate=None):
predicates = []
if text is not None:
predicates.append(lambda link: link.string == text)
if text_regex is not None:
predicates.append(lambda link: re_compile(text_regex).search(link.string or ''))
if url is not None:
predicates.append(lambda link: link.get('href') == url)
if url_regex is not None:
predicates.append(lambda link: re_compile(url_regex).search(link.get('href', '')))
if predicate:
predicate.append(predicate)
def f(link):
for p in predicates:
if not p(link):
return False
return True
return [link for link in links if f(link)]
def get_forms(self):
"""Returns all forms in the current document.
The returned form objects implement the ClientForm.HTMLForm interface.
"""
if self._forms is None:
import ClientForm
self._forms = ClientForm.ParseResponse(self.get_response(), backwards_compat=False)
return self._forms
def select_form(self, name=None, predicate=None, index=0):
"""Selects the specified form."""
forms = self.get_forms()
if name is not None:
forms = [f for f in forms if f.name == name]
if predicate:
forms = [f for f in forms if predicate(f)]
if forms:
self.form = forms[index]
return self.form
else:
raise BrowserError("No form selected.")
def submit(self, **kw):
"""submits the currently selected form."""
if self.form is None:
raise BrowserError("No form selected.")
req = self.form.click(**kw)
return self.do_request(req)
def __getitem__(self, key):
return self.form[key]
def __setitem__(self, key, value):
self.form[key] = value
class AppBrowser(Browser):
"""Browser interface to test web.py apps.
b = AppBrowser(app)
b.open('/')
b.follow_link(text='Login')
b.select_form(name='login')
b['username'] = 'joe'
b['password'] = 'secret'
b.submit()
assert b.path == '/'
assert 'Welcome joe' in b.get_text()
"""
def __init__(self, app):
Browser.__init__(self)
self.app = app
def build_opener(self):
return urllib2.build_opener(AppHandler(self.app))
class AppHandler(urllib2.HTTPHandler):
"""urllib2 handler to handle requests using web.py application."""
handler_order = 100
def __init__(self, app):
self.app = app
def http_open(self, req):
result = self.app.request(
localpart=req.get_selector(),
method=req.get_method(),
host=req.get_host(),
data=req.get_data(),
headers=dict(req.header_items()),
https=req.get_type() == "https"
)
return self._make_response(result, req.get_full_url())
def https_open(self, req):
return self.http_open(req)
try:
https_request = urllib2.HTTPHandler.do_request_
except AttributeError:
# for python 2.3
pass
def _make_response(self, result, url):
data = "\r\n".join(["%s: %s" % (k, v) for k, v in result.header_items])
headers = httplib.HTTPMessage(StringIO(data))
response = urllib.addinfourl(StringIO(result.data), headers, url)
code, msg = result.status.split(None, 1)
response.code, response.msg = int(code), msg
return response
|
apache-2.0
|
HuaweiSwitch/ansible
|
lib/ansible/modules/network/cloudengine/ce_vxlan_vap.py
|
47
|
34406
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'}
DOCUMENTATION = """
---
module: ce_vxlan_vap
version_added: "2.4"
short_description: Manages VXLAN virtual access point on HUAWEI CloudEngine Devices.
description:
- Manages VXLAN Virtual access point on HUAWEI CloudEngine Devices.
author: QijunPan (@CloudEngine-Ansible)
options:
bridge_domain_id:
description:
- Specifies a bridge domain ID.
The value is an integer ranging from 1 to 16777215.
required: false
default: null
bind_vlan_id:
description:
- Specifies the VLAN binding to a BD(Bridge Domain).
The value is an integer ranging ranging from 1 to 4094.
required: false
default: null
l2_sub_interface:
description:
- Specifies an Sub-Interface full name, i.e. "10GE1/0/41.1".
The value is a string of 1 to 63 case-insensitive characters, spaces supported.
required: false
default: null
encapsulation:
description:
- Specifies an encapsulation type of packets allowed to pass through a Layer 2 sub-interface.
choices: ['dot1q', 'default', 'untag', 'qinq', 'none']
required: false
default: null
ce_vid:
description:
- When I(encapsulation) is 'dot1q', specifies a VLAN ID in the outer VLAN tag.
When I(encapsulation) is 'qinq', specifies an outer VLAN ID for
double-tagged packets to be received by a Layer 2 sub-interface.
The value is an integer ranging from 1 to 4094.
required: false
default: null
pe_vid:
description:
- When I(encapsulation) is 'qinq', specifies an inner VLAN ID for
double-tagged packets to be received by a Layer 2 sub-interface.
The value is an integer ranging from 1 to 4094.
required: false
default: null
state:
description:
- Determines whether the config should be present or not
on the device.
required: false
default: present
choices: ['present', 'absent']
"""
EXAMPLES = '''
- name: vxlan vap module test
hosts: ce128
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: Create a papping between a VLAN and a BD
ce_vxlan_vap:
bridge_domain_id: 100
bind_vlan_id: 99
provider: "{{ cli }}"
- name: Bind a Layer 2 sub-interface to a BD
ce_vxlan_vap:
bridge_domain_id: 100
l2_sub_interface: 10GE2/0/20.1
provider: "{{ cli }}"
- name: Configure an encapsulation type on a Layer 2 sub-interface
ce_vxlan_vap:
l2_sub_interface: 10GE2/0/20.1
encapsulation: dot1q
provider: "{{ cli }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: verbose mode
type: dict
sample: {"bridge_domain_id": "100", "bind_vlan_id": "99", state="present"}
existing:
description: k/v pairs of existing configuration
returned: verbose mode
type: dict
sample: {"bridge_domain_id": "100", "bind_intf_list": ["10GE2/0/20.1", "10GE2/0/20.2"],
"bind_vlan_list": []}
end_state:
description: k/v pairs of configuration after module execution
returned: verbose mode
type: dict
sample: {"bridge_domain_id": "100", "bind_intf_list": ["110GE2/0/20.1", "10GE2/0/20.2"],
"bind_vlan_list": ["99"]}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["bridge-domain 100",
"l2 binding vlan 99"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
from xml.etree import ElementTree
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ce import get_nc_config, set_nc_config, ce_argument_spec
CE_NC_GET_BD_VAP = """
<filter type="subtree">
<evc xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<bds>
<bd>
<bdId>%s</bdId>
<bdBindVlan>
<vlanList></vlanList>
</bdBindVlan>
<servicePoints>
<servicePoint>
<ifName></ifName>
</servicePoint>
</servicePoints>
</bd>
</bds>
</evc>
</filter>
"""
CE_NC_MERGE_BD_VLAN = """
<config>
<evc xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<bds>
<bd>
<bdId>%s</bdId>
<bdBindVlan operation="merge">
<vlanList>%s:%s</vlanList>
</bdBindVlan>
</bd>
</bds>
</evc>
</config>
"""
CE_NC_MERGE_BD_INTF = """
<config>
<evc xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<bds>
<bd>
<bdId>%s</bdId>
<servicePoints>
<servicePoint operation="merge">
<ifName>%s</ifName>
</servicePoint>
</servicePoints>
</bd>
</bds>
</evc>
</config>
"""
CE_NC_DELETE_BD_INTF = """
<config>
<evc xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<bds>
<bd>
<bdId>%s</bdId>
<servicePoints>
<servicePoint operation="delete">
<ifName>%s</ifName>
</servicePoint>
</servicePoints>
</bd>
</bds>
</evc>
</config>
"""
CE_NC_GET_ENCAP = """
<filter type="subtree">
<ethernet xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<servicePoints>
<servicePoint>
<ifName>%s</ifName>
<flowType></flowType>
<flowDot1qs>
<dot1qVids></dot1qVids>
</flowDot1qs>
<flowQinqs>
<flowQinq>
<peVlanId></peVlanId>
<ceVids></ceVids>
</flowQinq>
</flowQinqs>
</servicePoint>
</servicePoints>
</ethernet>
</filter>
"""
CE_NC_SET_ENCAP = """
<config>
<ethernet xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<servicePoints>
<servicePoint operation="merge">
<ifName>%s</ifName>
<flowType>%s</flowType>
</servicePoint>
</servicePoints>
</ethernet>
</config>
"""
CE_NC_UNSET_ENCAP = """
<config>
<ethernet xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<servicePoints>
<servicePoint operation="merge">
<ifName>%s</ifName>
<flowType>none</flowType>
</servicePoint>
</servicePoints>
</ethernet>
</config>
"""
CE_NC_SET_ENCAP_DOT1Q = """
<config>
<ethernet xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<servicePoints>
<servicePoint operation="merge">
<ifName>%s</ifName>
<flowType>dot1q</flowType>
<flowDot1qs>
<dot1qVids>%s:%s</dot1qVids>
</flowDot1qs>
</servicePoint>
</servicePoints>
</ethernet>
</config>
"""
CE_NC_SET_ENCAP_QINQ = """
<config>
<ethernet xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<servicePoints>
<servicePoint>
<ifName>%s</ifName>
<flowType>qinq</flowType>
<flowQinqs>
<flowQinq operation="merge">
<peVlanId>%s</peVlanId>
<ceVids>%s:%s</ceVids>
</flowQinq>
</flowQinqs>
</servicePoint>
</servicePoints>
</ethernet>
</config>
"""
def vlan_vid_to_bitmap(vid):
"""convert VLAN list to VLAN bitmap"""
vlan_bit = ['0'] * 1024
int_vid = int(vid)
j = int_vid / 4
bit_int = 0x8 >> (int_vid % 4)
vlan_bit[j] = str(hex(bit_int))[2]
return ''.join(vlan_bit)
def bitmap_to_vlan_list(bitmap):
"""convert VLAN bitmap to VLAN list"""
tmp = list()
if not bitmap:
return tmp
bit_len = len(bitmap)
for i in range(bit_len):
if bitmap[i] == "0":
continue
bit = int(bitmap[i])
if bit & 0x8:
tmp.append(str(i * 4))
if bit & 0x4:
tmp.append(str(i * 4 + 1))
if bit & 0x2:
tmp.append(str(i * 4 + 2))
if bit & 0x1:
tmp.append(str(i * 4 + 3))
return tmp
def is_vlan_bitmap_empty(bitmap):
"""check VLAN bitmap empty"""
if not bitmap or len(bitmap) == 0:
return True
for bit in bitmap:
if bit != '0':
return False
return True
def is_vlan_in_bitmap(vid, bitmap):
"""check is VLAN id in bitmap"""
if is_vlan_bitmap_empty(bitmap):
return False
i = int(vid) / 4
if i > len(bitmap):
return False
if int(bitmap[i]) & (0x8 >> (int(vid) % 4)):
return True
return False
def get_interface_type(interface):
"""Gets the type of interface, such as 10GE, ETH-TRUNK, VLANIF..."""
if interface is None:
return None
iftype = None
if interface.upper().startswith('GE'):
iftype = 'ge'
elif interface.upper().startswith('10GE'):
iftype = '10ge'
elif interface.upper().startswith('25GE'):
iftype = '25ge'
elif interface.upper().startswith('4X10GE'):
iftype = '4x10ge'
elif interface.upper().startswith('40GE'):
iftype = '40ge'
elif interface.upper().startswith('100GE'):
iftype = '100ge'
elif interface.upper().startswith('VLANIF'):
iftype = 'vlanif'
elif interface.upper().startswith('LOOPBACK'):
iftype = 'loopback'
elif interface.upper().startswith('METH'):
iftype = 'meth'
elif interface.upper().startswith('ETH-TRUNK'):
iftype = 'eth-trunk'
elif interface.upper().startswith('VBDIF'):
iftype = 'vbdif'
elif interface.upper().startswith('NVE'):
iftype = 'nve'
elif interface.upper().startswith('TUNNEL'):
iftype = 'tunnel'
elif interface.upper().startswith('ETHERNET'):
iftype = 'ethernet'
elif interface.upper().startswith('FCOE-PORT'):
iftype = 'fcoe-port'
elif interface.upper().startswith('FABRIC-PORT'):
iftype = 'fabric-port'
elif interface.upper().startswith('STACK-PORT'):
iftype = 'stack-Port'
elif interface.upper().startswith('NULL'):
iftype = 'null'
else:
return None
return iftype.lower()
class VxlanVap(object):
"""
Manages VXLAN virtual access point.
"""
def __init__(self, argument_spec):
self.spec = argument_spec
self.module = None
self.__init_module__()
# module input info
self.bridge_domain_id = self.module.params['bridge_domain_id']
self.bind_vlan_id = self.module.params['bind_vlan_id']
self.l2_sub_interface = self.module.params['l2_sub_interface']
self.ce_vid = self.module.params['ce_vid']
self.pe_vid = self.module.params['pe_vid']
self.encapsulation = self.module.params['encapsulation']
self.state = self.module.params['state']
# state
self.vap_info = dict()
self.l2sub_info = dict()
self.changed = False
self.updates_cmd = list()
self.commands = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
def __init_module__(self):
"""init module"""
required_together = [()]
self.module = AnsibleModule(
argument_spec=self.spec, supports_check_mode=True)
def check_response(self, xml_str, xml_name):
"""Check if response message is already succeed."""
if "<ok/>" not in xml_str:
self.module.fail_json(msg='Error: %s failed.' % xml_name)
def get_bd_vap_dict(self):
"""get virtual access point info"""
vap_info = dict()
conf_str = CE_NC_GET_BD_VAP % self.bridge_domain_id
xml_str = get_nc_config(self.module, conf_str)
if "<data/>" in xml_str:
return vap_info
xml_str = xml_str.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
# get vap: VLAN
vap_info["bdId"] = self.bridge_domain_id
root = ElementTree.fromstring(xml_str)
vap_info["vlanList"] = ""
vap_vlan = root.find("data/evc/bds/bd/bdBindVlan")
vap_info["vlanList"] = ""
if vap_vlan:
for ele in vap_vlan:
if ele.tag == "vlanList":
vap_info["vlanList"] = ele.text
# get vap: l2 su-interface
vap_ifs = root.findall(
"data/evc/bds/bd/servicePoints/servicePoint/ifName")
if_list = list()
if vap_ifs:
for vap_if in vap_ifs:
if vap_if.tag == "ifName":
if_list.append(vap_if.text)
vap_info["intfList"] = if_list
return vap_info
def get_l2_sub_intf_dict(self, ifname):
"""get l2 sub-interface info"""
intf_info = dict()
if not ifname:
return intf_info
conf_str = CE_NC_GET_ENCAP % ifname
xml_str = get_nc_config(self.module, conf_str)
if "<data/>" in xml_str:
return intf_info
xml_str = xml_str.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
# get l2 sub interface encapsulation info
root = ElementTree.fromstring(xml_str)
bds = root.find("data/ethernet/servicePoints/servicePoint")
if not bds:
return intf_info
for ele in bds:
if ele.tag in ["ifName", "flowType"]:
intf_info[ele.tag] = ele.text.lower()
if intf_info.get("flowType") == "dot1q":
ce_vid = root.find(
"data/ethernet/servicePoints/servicePoint/flowDot1qs")
intf_info["dot1qVids"] = ""
if ce_vid:
for ele in ce_vid:
if ele.tag == "dot1qVids":
intf_info["dot1qVids"] = ele.text
elif intf_info.get("flowType") == "qinq":
vids = root.find(
"data/ethernet/servicePoints/servicePoint/flowQinqs/flowQinq")
if vids:
for ele in vids:
if ele.tag in ["peVlanId", "ceVids"]:
intf_info[ele.tag] = ele.text
return intf_info
def config_traffic_encap_dot1q(self):
"""configure traffic encapsulation type dot1q"""
xml_str = ""
self.updates_cmd.append("interface %s" % self.l2_sub_interface)
if self.state == "present":
if self.encapsulation != self.l2sub_info.get("flowType"):
if self.ce_vid:
vlan_bitmap = vlan_vid_to_bitmap(self.ce_vid)
xml_str = CE_NC_SET_ENCAP_DOT1Q % (
self.l2_sub_interface, vlan_bitmap, vlan_bitmap)
self.updates_cmd.append("encapsulation %s vid %s" % (
self.encapsulation, self.ce_vid))
else:
xml_str = CE_NC_SET_ENCAP % (
self.l2_sub_interface, self.encapsulation)
self.updates_cmd.append(
"encapsulation %s" % self.encapsulation)
else:
if self.ce_vid and not is_vlan_in_bitmap(
self.ce_vid, self.l2sub_info.get("dot1qVids")):
vlan_bitmap = vlan_vid_to_bitmap(self.ce_vid)
xml_str = CE_NC_SET_ENCAP_DOT1Q % (
self.l2_sub_interface, vlan_bitmap, vlan_bitmap)
self.updates_cmd.append("encapsulation %s vid %s" % (
self.encapsulation, self.ce_vid))
else:
if self.encapsulation == self.l2sub_info.get("flowType"):
if self.ce_vid:
if is_vlan_in_bitmap(self.ce_vid, self.l2sub_info.get("dot1qVids")):
xml_str = CE_NC_UNSET_ENCAP % self.l2_sub_interface
self.updates_cmd.append("undo encapsulation %s vid %s" % (
self.encapsulation, self.ce_vid))
else:
xml_str = CE_NC_UNSET_ENCAP % self.l2_sub_interface
self.updates_cmd.append(
"undo encapsulation %s" % self.encapsulation)
if not xml_str:
self.updates_cmd.pop()
return
recv_xml = set_nc_config(self.module, xml_str)
self.check_response(recv_xml, "CONFIG_INTF_ENCAP_DOT1Q")
self.changed = True
def config_traffic_encap_qinq(self):
"""configure traffic encapsulation type qinq"""
xml_str = ""
self.updates_cmd.append("interface %s" % self.l2_sub_interface)
if self.state == "present":
if self.encapsulation != self.l2sub_info.get("flowType"):
if self.ce_vid:
vlan_bitmap = vlan_vid_to_bitmap(self.ce_vid)
xml_str = CE_NC_SET_ENCAP_QINQ % (self.l2_sub_interface,
self.pe_vid,
vlan_bitmap,
vlan_bitmap)
self.updates_cmd.append(
"encapsulation %s vid %s ce-vid %s" % (self.encapsulation,
self.pe_vid,
self.ce_vid))
else:
xml_str = CE_NC_SET_ENCAP % (
self.l2_sub_interface, self.encapsulation)
self.updates_cmd.append(
"encapsulation %s" % self.encapsulation)
else:
if self.ce_vid:
if not is_vlan_in_bitmap(self.ce_vid, self.l2sub_info.get("ceVids")) \
or self.pe_vid != self.l2sub_info.get("peVlanId"):
vlan_bitmap = vlan_vid_to_bitmap(self.ce_vid)
xml_str = CE_NC_SET_ENCAP_QINQ % (self.l2_sub_interface,
self.pe_vid,
vlan_bitmap,
vlan_bitmap)
self.updates_cmd.append(
"encapsulation %s vid %s ce-vid %s" % (self.encapsulation,
self.pe_vid,
self.ce_vid))
else:
if self.encapsulation == self.l2sub_info.get("flowType"):
if self.ce_vid:
if is_vlan_in_bitmap(self.ce_vid, self.l2sub_info.get("ceVids")) \
and self.pe_vid == self.l2sub_info.get("peVlanId"):
xml_str = CE_NC_UNSET_ENCAP % self.l2_sub_interface
self.updates_cmd.append(
"undo encapsulation %s vid %s ce-vid %s" % (self.encapsulation,
self.pe_vid,
self.ce_vid))
else:
xml_str = CE_NC_UNSET_ENCAP % self.l2_sub_interface
self.updates_cmd.append(
"undo encapsulation %s" % self.encapsulation)
if not xml_str:
self.updates_cmd.pop()
return
recv_xml = set_nc_config(self.module, xml_str)
self.check_response(recv_xml, "CONFIG_INTF_ENCAP_QINQ")
self.changed = True
def config_traffic_encap(self):
"""configure traffic encapsulation types"""
if not self.l2sub_info:
self.module.fail_json(msg="Error: Interface %s does not exist." % self.l2_sub_interface)
if not self.encapsulation:
return
xml_str = ""
if self.encapsulation in ["default", "untag"]:
if self.state == "present":
if self.encapsulation != self.l2sub_info.get("flowType"):
xml_str = CE_NC_SET_ENCAP % (
self.l2_sub_interface, self.encapsulation)
self.updates_cmd.append(
"interface %s" % self.l2_sub_interface)
self.updates_cmd.append(
"encapsulation %s" % self.encapsulation)
else:
if self.encapsulation == self.l2sub_info.get("flowType"):
xml_str = CE_NC_UNSET_ENCAP % self.l2_sub_interface
self.updates_cmd.append(
"interface %s" % self.l2_sub_interface)
self.updates_cmd.append(
"undo encapsulation %s" % self.encapsulation)
elif self.encapsulation == "none":
if self.state == "present":
if self.encapsulation != self.l2sub_info.get("flowType"):
xml_str = CE_NC_UNSET_ENCAP % self.l2_sub_interface
self.updates_cmd.append(
"interface %s" % self.l2_sub_interface)
self.updates_cmd.append(
"undo encapsulation %s" % self.l2sub_info.get("flowType"))
elif self.encapsulation == "dot1q":
self.config_traffic_encap_dot1q()
return
elif self.encapsulation == "qinq":
self.config_traffic_encap_qinq()
return
else:
pass
if not xml_str:
return
recv_xml = set_nc_config(self.module, xml_str)
self.check_response(recv_xml, "CONFIG_INTF_ENCAP")
self.changed = True
def config_vap_sub_intf(self):
"""configure a Layer 2 sub-interface as a service access point"""
if not self.vap_info:
self.module.fail_json(msg="Error: Bridge domain %s does not exist." % self.bridge_domain_id)
xml_str = ""
if self.state == "present":
if self.l2_sub_interface not in self.vap_info["intfList"]:
self.updates_cmd.append("interface %s" % self.l2_sub_interface)
self.updates_cmd.append("bridge-domain %s" %
self.bridge_domain_id)
xml_str = CE_NC_MERGE_BD_INTF % (
self.bridge_domain_id, self.l2_sub_interface)
else:
if self.l2_sub_interface in self.vap_info["intfList"]:
self.updates_cmd.append("interface %s" % self.l2_sub_interface)
self.updates_cmd.append(
"undo bridge-domain %s" % self.bridge_domain_id)
xml_str = CE_NC_DELETE_BD_INTF % (
self.bridge_domain_id, self.l2_sub_interface)
if not xml_str:
return
recv_xml = set_nc_config(self.module, xml_str)
self.check_response(recv_xml, "CONFIG_VAP_SUB_INTERFACE")
self.changed = True
def config_vap_vlan(self):
"""configure a VLAN as a service access point"""
if not self.vap_info:
self.module.fail_json(msg="Error: Bridge domain %s does not exist." % self.bridge_domain_id)
xml_str = ""
if self.state == "present":
if not is_vlan_in_bitmap(self.bind_vlan_id, self.vap_info["vlanList"]):
self.updates_cmd.append("bridge-domain %s" %
self.bridge_domain_id)
self.updates_cmd.append(
"l2 binding vlan %s" % self.bind_vlan_id)
vlan_bitmap = vlan_vid_to_bitmap(self.bind_vlan_id)
xml_str = CE_NC_MERGE_BD_VLAN % (
self.bridge_domain_id, vlan_bitmap, vlan_bitmap)
else:
if is_vlan_in_bitmap(self.bind_vlan_id, self.vap_info["vlanList"]):
self.updates_cmd.append("bridge-domain %s" %
self.bridge_domain_id)
self.updates_cmd.append(
"undo l2 binding vlan %s" % self.bind_vlan_id)
vlan_bitmap = vlan_vid_to_bitmap(self.bind_vlan_id)
xml_str = CE_NC_MERGE_BD_VLAN % (
self.bridge_domain_id, "0" * 1024, vlan_bitmap)
if not xml_str:
return
recv_xml = set_nc_config(self.module, xml_str)
self.check_response(recv_xml, "CONFIG_VAP_VLAN")
self.changed = True
def is_vlan_valid(self, vid, name):
"""check VLAN id"""
if not vid:
return
if not vid.isdigit():
self.module.fail_json(msg="Error: %s is not digit." % name)
return
if int(vid) < 1 or int(vid) > 4094:
self.module.fail_json(
msg="Error: %s is not in the range from 1 to 4094." % name)
def is_l2_sub_intf_valid(self, ifname):
"""check l2 sub interface valid"""
if ifname.count('.') != 1:
return False
if_num = ifname.split('.')[1]
if not if_num.isdigit():
return False
if int(if_num) < 1 or int(if_num) > 4096:
self.module.fail_json(
msg="Error: Sub-interface number is not in the range from 1 to 4096.")
return False
if not get_interface_type(ifname):
return False
return True
def check_params(self):
"""Check all input params"""
# bridge domain id check
if self.bridge_domain_id:
if not self.bridge_domain_id.isdigit():
self.module.fail_json(
msg="Error: Bridge domain id is not digit.")
if int(self.bridge_domain_id) < 1 or int(self.bridge_domain_id) > 16777215:
self.module.fail_json(
msg="Error: Bridge domain id is not in the range from 1 to 16777215.")
# check bind_vlan_id
if self.bind_vlan_id:
self.is_vlan_valid(self.bind_vlan_id, "bind_vlan_id")
# check l2_sub_interface
if self.l2_sub_interface and not self.is_l2_sub_intf_valid(self.l2_sub_interface):
self.module.fail_json(msg="Error: l2_sub_interface is invalid.")
# check ce_vid
if self.ce_vid:
self.is_vlan_valid(self.ce_vid, "ce_vid")
if not self.encapsulation or self.encapsulation not in ["dot1q", "qinq"]:
self.module.fail_json(msg="Error: ce_vid can not be set "
"when encapsulation is '%s'." % self.encapsulation)
if self.encapsulation == "qinq" and not self.pe_vid:
self.module.fail_json(msg="Error: ce_vid and pe_vid must be set at the same time "
"when encapsulation is '%s'." % self.encapsulation)
# check pe_vid
if self.pe_vid:
self.is_vlan_valid(self.pe_vid, "pe_vid")
if not self.encapsulation or self.encapsulation != "qinq":
self.module.fail_json(msg="Error: pe_vid can not be set "
"when encapsulation is '%s'." % self.encapsulation)
if not self.ce_vid:
self.module.fail_json(msg="Error: ce_vid and pe_vid must be set at the same time "
"when encapsulation is '%s'." % self.encapsulation)
def get_proposed(self):
"""get proposed info"""
if self.bridge_domain_id:
self.proposed["bridge_domain_id"] = self.bridge_domain_id
if self.bind_vlan_id:
self.proposed["bind_vlan_id"] = self.bind_vlan_id
if self.l2_sub_interface:
self.proposed["l2_sub_interface"] = self.l2_sub_interface
if self.encapsulation:
self.proposed["encapsulation"] = self.encapsulation
if self.ce_vid:
self.proposed["ce_vid"] = self.ce_vid
if self.pe_vid:
self.proposed["pe_vid"] = self.pe_vid
self.proposed["state"] = self.state
def get_existing(self):
"""get existing info"""
if self.bridge_domain_id:
if self.bind_vlan_id or self.l2_sub_interface:
self.existing["bridge_domain_id"] = self.bridge_domain_id
self.existing["bind_vlan_list"] = bitmap_to_vlan_list(
self.vap_info.get("vlanList"))
self.existing["bind_intf_list"] = self.vap_info.get("intfList")
if self.encapsulation and self.l2_sub_interface:
self.existing["l2_sub_interface"] = self.l2_sub_interface
self.existing["encapsulation"] = self.l2sub_info.get("flowType")
if self.existing["encapsulation"] == "dot1q":
self.existing["ce_vid"] = bitmap_to_vlan_list(
self.l2sub_info.get("dot1qVids"))
if self.existing["encapsulation"] == "qinq":
self.existing["ce_vid"] = bitmap_to_vlan_list(
self.l2sub_info.get("ceVids"))
self.existing["pe_vid"] = self.l2sub_info.get("peVlanId")
def get_end_state(self):
"""get end state info"""
if self.bridge_domain_id:
if self.bind_vlan_id or self.l2_sub_interface:
vap_info = self.get_bd_vap_dict()
self.end_state["bridge_domain_id"] = self.bridge_domain_id
self.end_state["bind_vlan_list"] = bitmap_to_vlan_list(
vap_info.get("vlanList"))
self.end_state["bind_intf_list"] = vap_info.get("intfList")
if self.encapsulation and self.l2_sub_interface:
l2sub_info = self.get_l2_sub_intf_dict(self.l2_sub_interface)
self.end_state["l2_sub_interface"] = self.l2_sub_interface
self.end_state["encapsulation"] = l2sub_info.get("flowType")
if self.end_state["encapsulation"] == "dot1q":
self.end_state["ce_vid"] = bitmap_to_vlan_list(
l2sub_info.get("dot1qVids"))
if self.end_state["encapsulation"] == "qinq":
self.end_state["ce_vid"] = bitmap_to_vlan_list(
l2sub_info.get("ceVids"))
self.end_state["pe_vid"] = l2sub_info.get("peVlanId")
def data_init(self):
"""data init"""
if self.l2_sub_interface:
self.l2_sub_interface = self.l2_sub_interface.replace(
" ", "").upper()
if self.encapsulation and self.l2_sub_interface:
self.l2sub_info = self.get_l2_sub_intf_dict(self.l2_sub_interface)
if self.bridge_domain_id:
if self.bind_vlan_id or self.l2_sub_interface:
self.vap_info = self.get_bd_vap_dict()
def work(self):
"""worker"""
self.check_params()
self.data_init()
self.get_existing()
self.get_proposed()
# Traffic encapsulation types
if self.encapsulation and self.l2_sub_interface:
self.config_traffic_encap()
# A VXLAN service access point can be a Layer 2 sub-interface or VLAN
if self.bridge_domain_id:
if self.l2_sub_interface:
# configure a Layer 2 sub-interface as a service access point
self.config_vap_sub_intf()
if self.bind_vlan_id:
# configure a VLAN as a service access point
self.config_vap_vlan()
self.get_end_state()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
if self.changed:
self.results['updates'] = self.updates_cmd
else:
self.results['updates'] = list()
self.module.exit_json(**self.results)
def main():
"""Module main"""
argument_spec = dict(
bridge_domain_id=dict(required=False, type='str'),
bind_vlan_id=dict(required=False, type='str'),
l2_sub_interface=dict(required=False, type='str'),
encapsulation=dict(required=False, type='str',
choices=['dot1q', 'default', 'untag', 'qinq', 'none']),
ce_vid=dict(required=False, type='str'),
pe_vid=dict(required=False, type='str'),
state=dict(required=False, default='present',
choices=['present', 'absent'])
)
argument_spec.update(ce_argument_spec)
module = VxlanVap(argument_spec)
module.work()
if __name__ == '__main__':
main()
|
gpl-3.0
|
endlessm/chromium-browser
|
third_party/catapult/tracing/tracing/value/diagnostics/diagnostic.py
|
4
|
3699
|
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import uuid
try:
from py_utils import slots_metaclass
SlotsMetaclass = slots_metaclass.SlotsMetaclass # pylint: disable=invalid-name
except ImportError:
# TODO(benjhayden): Figure out why py_utils doesn't work in dev_appserver.py
SlotsMetaclass = None # pylint: disable=invalid-name
from tracing.value.diagnostics import all_diagnostics
class Diagnostic(object):
__slots__ = '_guid',
# Ensure that new subclasses remember to specify __slots__ in order to prevent
# regressing memory consumption:
if SlotsMetaclass:
__metaclass__ = SlotsMetaclass
def __init__(self):
self._guid = None
def __ne__(self, other):
return not self == other
@property
def guid(self):
if self._guid is None:
self._guid = str(uuid.uuid4())
return self._guid
@guid.setter
def guid(self, g):
assert self._guid is None
self._guid = g
@property
def has_guid(self):
return self._guid is not None
def AsDictOrReference(self):
if self._guid:
return self._guid
return self.AsDict()
def AsProtoOrReference(self):
if self._guid:
return self._guid
return self.AsProto()
def AsDict(self):
dct = {'type': self.__class__.__name__}
if self._guid:
dct['guid'] = self._guid
self._AsDictInto(dct)
return dct
def AsProto(self):
return self._AsProto()
def _AsDictInto(self, unused_dct):
raise NotImplementedError()
def _AsProto(self):
raise NotImplementedError()
@staticmethod
def FromDict(dct):
cls = all_diagnostics.GetDiagnosticClassForName(dct['type'])
if not cls:
raise ValueError('Unrecognized diagnostic type: ' + dct['type'])
diagnostic = cls.FromDict(dct)
if 'guid' in dct:
diagnostic.guid = dct['guid']
return diagnostic
@staticmethod
def FromProto(d):
# Here we figure out which field is set and downcast to the right diagnostic
# type. The diagnostic names in the proto must be the same as the class
# names in the python code, for instance Breakdown.
attr_name = d.WhichOneof('diagnostic_oneof')
assert attr_name, 'The diagnostic oneof cannot be empty.'
d = getattr(d, attr_name)
assert type(d).__name__ in all_diagnostics.GetDiagnosticTypenames(), (
'Unrecognized diagnostic type ' + type(d).__name__)
diag_type = type(d).__name__
cls = all_diagnostics.GetDiagnosticClassForName(diag_type)
return cls.FromProto(d)
def ResetGuid(self, guid=None):
if guid:
self._guid = guid
else:
self._guid = str(uuid.uuid4())
def Inline(self):
"""Inlines a shared diagnostic.
Any diagnostic that has a guid will be serialized as a reference, because it
is assumed that diagnostics with guids are shared. This method removes the
guid so that the diagnostic will be serialized by value.
Inling is used for example in the dashboard, where certain types of shared
diagnostics that vary on a per-upload basis are inlined for efficiency
reasons.
"""
self._guid = None
def CanAddDiagnostic(self, unused_other_diagnostic):
return False
def AddDiagnostic(self, unused_other_diagnostic):
raise Exception('Abstract virtual method: subclasses must override '
'this method if they override canAddDiagnostic')
def Deserialize(type_name, data, deserializer):
cls = all_diagnostics.GetDiagnosticClassForName(type_name)
if not cls:
raise ValueError('Unrecognized diagnostic type: ' + type_name)
return cls.Deserialize(data, deserializer)
|
bsd-3-clause
|
greggyNapalm/lunaport_client
|
setup.py
|
1
|
1258
|
import os
import re
from setuptools import setup, find_packages
base_path = os.path.dirname(__file__)
# Get the version (borrowed from SQLAlchemy)
fp = open(os.path.join(base_path, 'lunaport_client', '__init__.py'))
VERSION = re.compile(r".*__version__ = '(.*?)'",
re.S).match(fp.read()).group(1)
fp.close()
setup(
name='lunaport_client',
version=VERSION,
author='Gregory Komissarov',
author_email='gregory.komissarov@gmail.com',
description='Lunaport REST APIs client',
long_description='\n' + open('README.rst').read() + '\n\n' + open('CHANGES.rst').read(),
license='BSD',
url='https://github.domain/gkomissarov/lunaport_client',
keywords=['load', 'lunapark', 'api', 'client'],
packages=[
'lunaport_client',
],
zip_safe=False,
install_requires=[
'requests >= 1.2.3',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
],
)
|
apache-2.0
|
cryptobanana/ansible
|
lib/ansible/modules/network/cnos/cnos_template.py
|
35
|
7351
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
#
# Copyright (C) 2017 Lenovo, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Module to send CLI templates to Lenovo Switches
# Lenovo Networking
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cnos_template
author: "Dave Kasberg (@dkasberg)"
short_description: Manage switch configuration using templates on devices running Lenovo CNOS
description:
- This module allows you to work with the running configuration of a switch. It provides a way
to execute a set of CNOS commands on a switch by evaluating the current running configuration
and executing the commands only if the specific settings have not been already configured.
The configuration source can be a set of commands or a template written in the Jinja2 templating language.
This module uses SSH to manage network device configuration.
The results of the operation will be placed in a directory named 'results'
that must be created by the user in their local directory to where the playbook is run.
For more information about this module from Lenovo and customizing it usage for your
use cases, please visit U(http://systemx.lenovofiles.com/help/index.jsp?topic=%2Fcom.lenovo.switchmgt.ansible.doc%2Fcnos_template.html)
version_added: "2.3"
extends_documentation_fragment: cnos
options:
commandfile:
description:
- This specifies the path to the CNOS command file which needs to be applied. This usually
comes from the commands folder. Generally this file is the output of the variables applied
on a template file. So this command is preceded by a template module.
Note The command file must contain the Ansible keyword {{ inventory_hostname }} in its
filename to ensure that the command file is unique for each switch and condition.
If this is omitted, the command file will be overwritten during iteration. For example,
commandfile=./commands/clos_leaf_bgp_{{ inventory_hostname }}_commands.txt
required: true
default: Null
'''
EXAMPLES = '''
Tasks : The following are examples of using the module cnos_template. These are written in the main.yml file of the tasks directory.
---
- name: Replace Config CLI command template with values
template:
src: demo_template.j2
dest: "./commands/demo_template_{{ inventory_hostname }}_commands.txt"
vlanid1: 13
slot_chassis_number1: "1/2"
portchannel_interface_number1: 100
portchannel_mode1: "active"
- name: Applying CLI commands on Switches
cnos_template:
host: "{{ inventory_hostname }}"
username: "{{ hostvars[inventory_hostname]['username'] }}"
password: "{{ hostvars[inventory_hostname]['password'] }}"
deviceType: "{{ hostvars[inventory_hostname]['deviceType'] }}"
enablePassword: "{{ hostvars[inventory_hostname]['enablePassword'] }}"
commandfile: "./commands/demo_template_{{ inventory_hostname }}_commands.txt"
outputfile: "./results/demo_template_command_{{ inventory_hostname }}_output.txt"
'''
RETURN = '''
msg:
description: Success or failure message
returned: always
type: string
sample: "Template Applied."
'''
import sys
try:
import paramiko
HAS_PARAMIKO = True
except ImportError:
HAS_PARAMIKO = False
import time
import socket
import array
import json
import time
import re
try:
from ansible.module_utils.network.cnos import cnos
HAS_LIB = True
except:
HAS_LIB = False
from ansible.module_utils.basic import AnsibleModule
from collections import defaultdict
def main():
module = AnsibleModule(
argument_spec=dict(
commandfile=dict(required=True),
outputfile=dict(required=True),
host=dict(required=True),
deviceType=dict(required=True),
username=dict(required=True),
password=dict(required=True, no_log=True),
enablePassword=dict(required=False, no_log=True),),
supports_check_mode=False)
username = module.params['username']
password = module.params['password']
enablePassword = module.params['enablePassword']
commandfile = module.params['commandfile']
outputfile = module.params['outputfile']
deviceType = module.params['deviceType']
hostIP = module.params['host']
output = ""
if not HAS_PARAMIKO:
module.fail_json(msg='paramiko is required for this module')
# Create instance of SSHClient object
remote_conn_pre = paramiko.SSHClient()
# Automatically add untrusted hosts (make sure okay for security policy in your environment)
remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# initiate SSH connection with the switch
remote_conn_pre.connect(hostIP, username=username, password=password)
time.sleep(2)
# Use invoke_shell to establish an 'interactive session'
remote_conn = remote_conn_pre.invoke_shell()
time.sleep(2)
# Enable and enter configure terminal then send command
output = output + cnos.waitForDeviceResponse("\n", ">", 2, remote_conn)
output = output + cnos.enterEnableModeForDevice(enablePassword, 3, remote_conn)
# Make terminal length = 0
output = output + cnos.waitForDeviceResponse("terminal length 0\n", "#", 2, remote_conn)
# Go to config mode
output = output + cnos.waitForDeviceResponse("configure d\n", "(config)#", 2, remote_conn)
# Send commands one by one to the device
# with open(commandfile, "r") as f:
f = open(commandfile, "r")
for line in f:
# Omit the comment lines in template file
if not line.startswith("#"):
command = line
if not line.endswith("\n"):
command = command + "\n"
response = cnos.waitForDeviceResponse(command, "#", 2, remote_conn)
errorMsg = cnos.checkOutputForError(response)
output = output + response
if(errorMsg is not None):
break # To cater to Mufti case
# Write to memory
output = output + cnos.waitForDeviceResponse("save\n", "#", 3, remote_conn)
# Write output to file
file = open(outputfile, "a")
file.write(output)
file.close()
# Logic to check when changes occur or not
errorMsg = cnos.checkOutputForError(output)
if(errorMsg is None):
module.exit_json(changed=True, msg="Template Applied")
else:
module.fail_json(msg=errorMsg)
if __name__ == '__main__':
main()
|
gpl-3.0
|
louietsai/python-for-android
|
python3-alpha/extra_modules/atom/mock_http_core.py
|
46
|
11993
|
#!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = 'j.s@google.com (Jeff Scudder)'
import io
import pickle
import os.path
import tempfile
import atom.http_core
class Error(Exception):
pass
class NoRecordingFound(Error):
pass
class MockHttpClient(object):
debug = False
real_client = None
last_request_was_live = False
# The following members are used to construct the session cache temp file
# name.
# These are combined to form the file name
# /tmp/cache_prefix.cache_case_name.cache_test_name
cache_name_prefix = 'gdata_live_test'
cache_case_name = ''
cache_test_name = ''
def __init__(self, recordings=None, real_client=None):
self._recordings = recordings or []
if real_client is not None:
self.real_client = real_client
def add_response(self, http_request, status, reason, headers=None,
body=None):
response = MockHttpResponse(status, reason, headers, body)
# TODO Scrub the request and the response.
self._recordings.append((http_request._copy(), response))
AddResponse = add_response
def request(self, http_request):
"""Provide a recorded response, or record a response for replay.
If the real_client is set, the request will be made using the
real_client, and the response from the server will be recorded.
If the real_client is None (the default), this method will examine
the recordings and find the first which matches.
"""
request = http_request._copy()
_scrub_request(request)
if self.real_client is None:
self.last_request_was_live = False
for recording in self._recordings:
if _match_request(recording[0], request):
return recording[1]
else:
# Pass along the debug settings to the real client.
self.real_client.debug = self.debug
# Make an actual request since we can use the real HTTP client.
self.last_request_was_live = True
response = self.real_client.request(http_request)
scrubbed_response = _scrub_response(response)
self.add_response(request, scrubbed_response.status,
scrubbed_response.reason,
dict(atom.http_core.get_headers(scrubbed_response)),
scrubbed_response.read())
# Return the recording which we just added.
return self._recordings[-1][1]
raise NoRecordingFound('No recoding was found for request: %s %s' % (
request.method, str(request.uri)))
Request = request
def _save_recordings(self, filename):
recording_file = open(os.path.join(tempfile.gettempdir(), filename),
'wb')
pickle.dump(self._recordings, recording_file)
recording_file.close()
def _load_recordings(self, filename):
recording_file = open(os.path.join(tempfile.gettempdir(), filename),
'rb')
self._recordings = pickle.load(recording_file)
recording_file.close()
def _delete_recordings(self, filename):
full_path = os.path.join(tempfile.gettempdir(), filename)
if os.path.exists(full_path):
os.remove(full_path)
def _load_or_use_client(self, filename, http_client):
if os.path.exists(os.path.join(tempfile.gettempdir(), filename)):
self._load_recordings(filename)
else:
self.real_client = http_client
def use_cached_session(self, name=None, real_http_client=None):
"""Attempts to load recordings from a previous live request.
If a temp file with the recordings exists, then it is used to fulfill
requests. If the file does not exist, then a real client is used to
actually make the desired HTTP requests. Requests and responses are
recorded and will be written to the desired temprary cache file when
close_session is called.
Args:
name: str (optional) The file name of session file to be used. The file
is loaded from the temporary directory of this machine. If no name
is passed in, a default name will be constructed using the
cache_name_prefix, cache_case_name, and cache_test_name of this
object.
real_http_client: atom.http_core.HttpClient the real client to be used
if the cached recordings are not found. If the default
value is used, this will be an
atom.http_core.HttpClient.
"""
if real_http_client is None:
real_http_client = atom.http_core.HttpClient()
if name is None:
self._recordings_cache_name = self.get_cache_file_name()
else:
self._recordings_cache_name = name
self._load_or_use_client(self._recordings_cache_name, real_http_client)
def close_session(self):
"""Saves recordings in the temporary file named in use_cached_session."""
if self.real_client is not None:
self._save_recordings(self._recordings_cache_name)
def delete_session(self, name=None):
"""Removes recordings from a previous live request."""
if name is None:
self._delete_recordings(self._recordings_cache_name)
else:
self._delete_recordings(name)
def get_cache_file_name(self):
return '%s.%s.%s' % (self.cache_name_prefix, self.cache_case_name,
self.cache_test_name)
def _dump(self):
"""Provides debug information in a string."""
output = 'MockHttpClient\n real_client: %s\n cache file name: %s\n' % (
self.real_client, self.get_cache_file_name())
output += ' recordings:\n'
i = 0
for recording in self._recordings:
output += ' recording %i is for: %s %s\n' % (
i, recording[0].method, str(recording[0].uri))
i += 1
return output
def _match_request(http_request, stored_request):
"""Determines whether a request is similar enough to a stored request
to cause the stored response to be returned."""
# Check to see if the host names match.
if (http_request.uri.host is not None
and http_request.uri.host != stored_request.uri.host):
return False
# Check the request path in the URL (/feeds/private/full/x)
elif http_request.uri.path != stored_request.uri.path:
return False
# Check the method used in the request (GET, POST, etc.)
elif http_request.method != stored_request.method:
return False
# If there is a gsession ID in either request, make sure that it is matched
# exactly.
elif ('gsessionid' in http_request.uri.query
or 'gsessionid' in stored_request.uri.query):
if 'gsessionid' not in stored_request.uri.query:
return False
elif 'gsessionid' not in http_request.uri.query:
return False
elif (http_request.uri.query['gsessionid']
!= stored_request.uri.query['gsessionid']):
return False
# Ignores differences in the query params (?start-index=5&max-results=20),
# the body of the request, the port number, HTTP headers, just to name a
# few.
return True
def _scrub_request(http_request):
""" Removes email address and password from a client login request.
Since the mock server saves the request and response in plantext, sensitive
information like the password should be removed before saving the
recordings. At the moment only requests sent to a ClientLogin url are
scrubbed.
"""
if (http_request and http_request.uri and http_request.uri.path and
http_request.uri.path.endswith('ClientLogin')):
# Remove the email and password from a ClientLogin request.
http_request._body_parts = []
http_request.add_form_inputs(
{'form_data': 'client login request has been scrubbed'})
else:
# We can remove the body of the post from the recorded request, since
# the request body is not used when finding a matching recording.
http_request._body_parts = []
return http_request
def _scrub_response(http_response):
return http_response
class EchoHttpClient(object):
"""Sends the request data back in the response.
Used to check the formatting of the request as it was sent. Always responds
with a 200 OK, and some information from the HTTP request is returned in
special Echo-X headers in the response. The following headers are added
in the response:
'Echo-Host': The host name and port number to which the HTTP connection is
made. If no port was passed in, the header will contain
host:None.
'Echo-Uri': The path portion of the URL being requested. /example?x=1&y=2
'Echo-Scheme': The beginning of the URL, usually 'http' or 'https'
'Echo-Method': The HTTP method being used, 'GET', 'POST', 'PUT', etc.
"""
def request(self, http_request):
return self._http_request(http_request.uri, http_request.method,
http_request.headers, http_request._body_parts)
def _http_request(self, uri, method, headers=None, body_parts=None):
body = io.StringIO()
response = atom.http_core.HttpResponse(status=200, reason='OK', body=body)
if headers is None:
response._headers = {}
else:
# Copy headers from the request to the response but convert values to
# strings. Server response headers always come in as strings, so an int
# should be converted to a corresponding string when echoing.
for header, value in headers.items():
response._headers[header] = str(value)
response._headers['Echo-Host'] = '%s:%s' % (uri.host, str(uri.port))
response._headers['Echo-Uri'] = uri._get_relative_path()
response._headers['Echo-Scheme'] = uri.scheme
response._headers['Echo-Method'] = method
for part in body_parts:
if isinstance(part, str):
body.write(part)
elif hasattr(part, 'read'):
body.write(part.read())
body.seek(0)
return response
class SettableHttpClient(object):
"""An HTTP Client which responds with the data given in set_response."""
def __init__(self, status, reason, body, headers):
"""Configures the response for the server.
See set_response for details on the arguments to the constructor.
"""
self.set_response(status, reason, body, headers)
self.last_request = None
def set_response(self, status, reason, body, headers):
"""Determines the response which will be sent for each request.
Args:
status: An int for the HTTP status code, example: 200, 404, etc.
reason: String for the HTTP reason, example: OK, NOT FOUND, etc.
body: The body of the HTTP response as a string or a file-like
object (something with a read method).
headers: dict of strings containing the HTTP headers in the response.
"""
self.response = atom.http_core.HttpResponse(status=status, reason=reason,
body=body)
self.response._headers = headers.copy()
def request(self, http_request):
self.last_request = http_request
return self.response
class MockHttpResponse(atom.http_core.HttpResponse):
def __init__(self, status=None, reason=None, headers=None, body=None):
self._headers = headers or {}
if status is not None:
self.status = status
if reason is not None:
self.reason = reason
if body is not None:
# Instead of using a file-like object for the body, store as a string
# so that reads can be repeated.
if hasattr(body, 'read'):
self._body = body.read()
else:
self._body = body
def read(self):
return self._body
|
apache-2.0
|
rotofly/odoo
|
openerp/workflow/workitem.py
|
294
|
14389
|
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2014 OpenERP S.A. (<http://openerp.com).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#
# TODO:
# cr.execute('delete from wkf_triggers where model=%s and res_id=%s', (res_type,res_id))
#
import logging
import instance
from openerp.workflow.helpers import Session
from openerp.workflow.helpers import Record
from openerp.workflow.helpers import WorkflowActivity
logger = logging.getLogger(__name__)
import openerp
from openerp.tools.safe_eval import safe_eval as eval
class Environment(dict):
"""
Dictionary class used as an environment to evaluate workflow code (such as
the condition on transitions).
This environment provides sybmols for cr, uid, id, model name, model
instance, column names, and all the record (the one obtained by browsing
the provided ID) attributes.
"""
def __init__(self, session, record):
self.cr = session.cr
self.uid = session.uid
self.model = record.model
self.id = record.id
self.ids = [record.id]
self.obj = openerp.registry(self.cr.dbname)[self.model]
def __getitem__(self, key):
records = self.obj.browse(self.cr, self.uid, self.ids)
if hasattr(records, key):
return getattr(records, key)
else:
return super(Environment, self).__getitem__(key)
class WorkflowItem(object):
def __init__(self, session, record, work_item_values):
assert isinstance(session, Session)
assert isinstance(record, Record)
self.session = session
self.record = record
if not work_item_values:
work_item_values = {}
assert isinstance(work_item_values, dict)
self.workitem = work_item_values
@classmethod
def create(cls, session, record, activity, instance_id, stack):
assert isinstance(session, Session)
assert isinstance(record, Record)
assert isinstance(activity, dict)
assert isinstance(instance_id, (long, int))
assert isinstance(stack, list)
cr = session.cr
cr.execute("select nextval('wkf_workitem_id_seq')")
id_new = cr.fetchone()[0]
cr.execute("insert into wkf_workitem (id,act_id,inst_id,state) values (%s,%s,%s,'active')", (id_new, activity['id'], instance_id))
cr.execute('select * from wkf_workitem where id=%s',(id_new,))
work_item_values = cr.dictfetchone()
logger.info('Created workflow item in activity %s',
activity['id'],
extra={'ident': (session.uid, record.model, record.id)})
workflow_item = WorkflowItem(session, record, work_item_values)
workflow_item.process(stack=stack)
@classmethod
def create_all(cls, session, record, activities, instance_id, stack):
assert isinstance(activities, list)
for activity in activities:
cls.create(session, record, activity, instance_id, stack)
def process(self, signal=None, force_running=False, stack=None):
assert isinstance(force_running, bool)
assert stack is not None
cr = self.session.cr
cr.execute('select * from wkf_activity where id=%s', (self.workitem['act_id'],))
activity = cr.dictfetchone()
triggers = False
if self.workitem['state'] == 'active':
triggers = True
if not self._execute(activity, stack):
return False
if force_running or self.workitem['state'] == 'complete':
ok = self._split_test(activity['split_mode'], signal, stack)
triggers = triggers and not ok
if triggers:
cr.execute('select * from wkf_transition where act_from=%s ORDER BY sequence,id', (self.workitem['act_id'],))
for trans in cr.dictfetchall():
if trans['trigger_model']:
ids = self.wkf_expr_eval_expr(trans['trigger_expr_id'])
for res_id in ids:
cr.execute('select nextval(\'wkf_triggers_id_seq\')')
id =cr.fetchone()[0]
cr.execute('insert into wkf_triggers (model,res_id,instance_id,workitem_id,id) values (%s,%s,%s,%s,%s)', (trans['trigger_model'],res_id, self.workitem['inst_id'], self.workitem['id'], id))
return True
def _execute(self, activity, stack):
"""Send a signal to parenrt workflow (signal: subflow.signal_name)"""
result = True
cr = self.session.cr
signal_todo = []
if (self.workitem['state']=='active') and activity['signal_send']:
# signal_send']:
cr.execute("select i.id,w.osv,i.res_id from wkf_instance i left join wkf w on (i.wkf_id=w.id) where i.id IN (select inst_id from wkf_workitem where subflow_id=%s)", (self.workitem['inst_id'],))
for instance_id, model_name, record_id in cr.fetchall():
record = Record(model_name, record_id)
signal_todo.append((instance_id, record, activity['signal_send']))
if activity['kind'] == WorkflowActivity.KIND_DUMMY:
if self.workitem['state']=='active':
self._state_set(activity, 'complete')
if activity['action_id']:
res2 = self.wkf_expr_execute_action(activity)
if res2:
stack.append(res2)
result=res2
elif activity['kind'] == WorkflowActivity.KIND_FUNCTION:
if self.workitem['state']=='active':
self._state_set(activity, 'running')
returned_action = self.wkf_expr_execute(activity)
if type(returned_action) in (dict,):
stack.append(returned_action)
if activity['action_id']:
res2 = self.wkf_expr_execute_action(activity)
# A client action has been returned
if res2:
stack.append(res2)
result=res2
self._state_set(activity, 'complete')
elif activity['kind'] == WorkflowActivity.KIND_STOPALL:
if self.workitem['state']=='active':
self._state_set(activity, 'running')
cr.execute('delete from wkf_workitem where inst_id=%s and id<>%s', (self.workitem['inst_id'], self.workitem['id']))
if activity['action']:
self.wkf_expr_execute(activity)
self._state_set(activity, 'complete')
elif activity['kind'] == WorkflowActivity.KIND_SUBFLOW:
if self.workitem['state']=='active':
self._state_set(activity, 'running')
if activity.get('action', False):
id_new = self.wkf_expr_execute(activity)
if not id_new:
cr.execute('delete from wkf_workitem where id=%s', (self.workitem['id'],))
return False
assert type(id_new)==type(1) or type(id_new)==type(1L), 'Wrong return value: '+str(id_new)+' '+str(type(id_new))
cr.execute('select id from wkf_instance where res_id=%s and wkf_id=%s', (id_new, activity['subflow_id']))
id_new = cr.fetchone()[0]
else:
inst = instance.WorkflowInstance(self.session, self.record)
id_new = inst.create(activity['subflow_id'])
cr.execute('update wkf_workitem set subflow_id=%s where id=%s', (id_new, self.workitem['id']))
self.workitem['subflow_id'] = id_new
if self.workitem['state']=='running':
cr.execute("select state from wkf_instance where id=%s", (self.workitem['subflow_id'],))
state = cr.fetchone()[0]
if state=='complete':
self._state_set(activity, 'complete')
for instance_id, record, signal_send in signal_todo:
wi = instance.WorkflowInstance(self.session, record, {'id': instance_id})
wi.validate(signal_send, force_running=True)
return result
def _state_set(self, activity, state):
self.session.cr.execute('update wkf_workitem set state=%s where id=%s', (state, self.workitem['id']))
self.workitem['state'] = state
logger.info('Changed state of work item %s to "%s" in activity %s',
self.workitem['id'], state, activity['id'],
extra={'ident': (self.session.uid, self.record.model, self.record.id)})
def _split_test(self, split_mode, signal, stack):
cr = self.session.cr
cr.execute('select * from wkf_transition where act_from=%s ORDER BY sequence,id', (self.workitem['act_id'],))
test = False
transitions = []
alltrans = cr.dictfetchall()
if split_mode in ('XOR', 'OR'):
for transition in alltrans:
if self.wkf_expr_check(transition,signal):
test = True
transitions.append((transition['id'], self.workitem['inst_id']))
if split_mode=='XOR':
break
else:
test = True
for transition in alltrans:
if not self.wkf_expr_check(transition, signal):
test = False
break
cr.execute('select count(*) from wkf_witm_trans where trans_id=%s and inst_id=%s', (transition['id'], self.workitem['inst_id']))
if not cr.fetchone()[0]:
transitions.append((transition['id'], self.workitem['inst_id']))
if test and transitions:
cr.executemany('insert into wkf_witm_trans (trans_id,inst_id) values (%s,%s)', transitions)
cr.execute('delete from wkf_workitem where id=%s', (self.workitem['id'],))
for t in transitions:
self._join_test(t[0], t[1], stack)
return True
return False
def _join_test(self, trans_id, inst_id, stack):
cr = self.session.cr
cr.execute('select * from wkf_activity where id=(select act_to from wkf_transition where id=%s)', (trans_id,))
activity = cr.dictfetchone()
if activity['join_mode']=='XOR':
WorkflowItem.create(self.session, self.record, activity, inst_id, stack=stack)
cr.execute('delete from wkf_witm_trans where inst_id=%s and trans_id=%s', (inst_id,trans_id))
else:
cr.execute('select id from wkf_transition where act_to=%s ORDER BY sequence,id', (activity['id'],))
trans_ids = cr.fetchall()
ok = True
for (id,) in trans_ids:
cr.execute('select count(*) from wkf_witm_trans where trans_id=%s and inst_id=%s', (id,inst_id))
res = cr.fetchone()[0]
if not res:
ok = False
break
if ok:
for (id,) in trans_ids:
cr.execute('delete from wkf_witm_trans where trans_id=%s and inst_id=%s', (id,inst_id))
WorkflowItem.create(self.session, self.record, activity, inst_id, stack=stack)
def wkf_expr_eval_expr(self, lines):
"""
Evaluate each line of ``lines`` with the ``Environment`` environment, returning
the value of the last line.
"""
assert lines, 'You used a NULL action in a workflow, use dummy node instead.'
result = False
for line in lines.split('\n'):
line = line.strip()
if not line:
continue
if line == 'True':
result = True
elif line == 'False':
result = False
else:
env = Environment(self.session, self.record)
result = eval(line, env, nocopy=True)
return result
def wkf_expr_execute_action(self, activity):
"""
Evaluate the ir.actions.server action specified in the activity.
"""
context = {
'active_model': self.record.model,
'active_id': self.record.id,
'active_ids': [self.record.id]
}
ir_actions_server = openerp.registry(self.session.cr.dbname)['ir.actions.server']
result = ir_actions_server.run(self.session.cr, self.session.uid, [activity['action_id']], context)
return result
def wkf_expr_execute(self, activity):
"""
Evaluate the action specified in the activity.
"""
return self.wkf_expr_eval_expr(activity['action'])
def wkf_expr_check(self, transition, signal):
"""
Test if a transition can be taken. The transition can be taken if:
- the signal name matches,
- the uid is SUPERUSER_ID or the user groups contains the transition's
group,
- the condition evaluates to a truish value.
"""
if transition['signal'] and signal != transition['signal']:
return False
if self.session.uid != openerp.SUPERUSER_ID and transition['group_id']:
registry = openerp.registry(self.session.cr.dbname)
user_groups = registry['res.users'].read(self.session.cr, self.session.uid, [self.session.uid], ['groups_id'])[0]['groups_id']
if transition['group_id'] not in user_groups:
return False
return self.wkf_expr_eval_expr(transition['condition'])
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
ademmers/ansible
|
test/lib/ansible_test/_internal/thread.py
|
68
|
1866
|
"""Python threading tools."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import threading
import sys
try:
# noinspection PyPep8Naming
import Queue as queue
except ImportError:
# noinspection PyUnresolvedReferences
import queue # pylint: disable=locally-disabled, import-error
class WrappedThread(threading.Thread):
"""Wrapper around Thread which captures results and exceptions."""
def __init__(self, action):
"""
:type action: () -> any
"""
# noinspection PyOldStyleClasses
super(WrappedThread, self).__init__()
self._result = queue.Queue()
self.action = action
self.result = None
def run(self):
"""
Run action and capture results or exception.
Do not override. Do not call directly. Executed by the start() method.
"""
# We truly want to catch anything that the worker thread might do including call sys.exit.
# Therefore we catch *everything* (including old-style class exceptions)
# noinspection PyBroadException, PyPep8
try:
self._result.put((self.action(), None))
# pylint: disable=locally-disabled, bare-except
except: # noqa
self._result.put((None, sys.exc_info()))
def wait_for_result(self):
"""
Wait for thread to exit and return the result or raise an exception.
:rtype: any
"""
result, exception = self._result.get()
if exception:
if sys.version_info[0] > 2:
raise exception[1].with_traceback(exception[2])
# noinspection PyRedundantParentheses
exec('raise exception[0], exception[1], exception[2]') # pylint: disable=locally-disabled, exec-used
self.result = result
return result
|
gpl-3.0
|
balp/googletest
|
scripts/gen_gtest_pred_impl.py
|
2538
|
21986
|
#!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""gen_gtest_pred_impl.py v0.1
Generates the implementation of Google Test predicate assertions and
accompanying tests.
Usage:
gen_gtest_pred_impl.py MAX_ARITY
where MAX_ARITY is a positive integer.
The command generates the implementation of up-to MAX_ARITY-ary
predicate assertions, and writes it to file gtest_pred_impl.h in the
directory where the script is. It also generates the accompanying
unit test in file gtest_pred_impl_unittest.cc.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import sys
import time
# Where this script is.
SCRIPT_DIR = os.path.dirname(sys.argv[0])
# Where to store the generated header.
HEADER = os.path.join(SCRIPT_DIR, '../include/gtest/gtest_pred_impl.h')
# Where to store the generated unit test.
UNIT_TEST = os.path.join(SCRIPT_DIR, '../test/gtest_pred_impl_unittest.cc')
def HeaderPreamble(n):
"""Returns the preamble for the header file.
Args:
n: the maximum arity of the predicate macros to be generated.
"""
# A map that defines the values used in the preamble template.
DEFS = {
'today' : time.strftime('%m/%d/%Y'),
'year' : time.strftime('%Y'),
'command' : '%s %s' % (os.path.basename(sys.argv[0]), n),
'n' : n
}
return (
"""// Copyright 2006, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// This file is AUTOMATICALLY GENERATED on %(today)s by command
// '%(command)s'. DO NOT EDIT BY HAND!
//
// Implements a family of generic predicate assertion macros.
#ifndef GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
#define GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
// Makes sure this header is not included before gtest.h.
#ifndef GTEST_INCLUDE_GTEST_GTEST_H_
# error Do not include gtest_pred_impl.h directly. Include gtest.h instead.
#endif // GTEST_INCLUDE_GTEST_GTEST_H_
// This header implements a family of generic predicate assertion
// macros:
//
// ASSERT_PRED_FORMAT1(pred_format, v1)
// ASSERT_PRED_FORMAT2(pred_format, v1, v2)
// ...
//
// where pred_format is a function or functor that takes n (in the
// case of ASSERT_PRED_FORMATn) values and their source expression
// text, and returns a testing::AssertionResult. See the definition
// of ASSERT_EQ in gtest.h for an example.
//
// If you don't care about formatting, you can use the more
// restrictive version:
//
// ASSERT_PRED1(pred, v1)
// ASSERT_PRED2(pred, v1, v2)
// ...
//
// where pred is an n-ary function or functor that returns bool,
// and the values v1, v2, ..., must support the << operator for
// streaming to std::ostream.
//
// We also define the EXPECT_* variations.
//
// For now we only support predicates whose arity is at most %(n)s.
// Please email googletestframework@googlegroups.com if you need
// support for higher arities.
// GTEST_ASSERT_ is the basic statement to which all of the assertions
// in this file reduce. Don't use this in your code.
#define GTEST_ASSERT_(expression, on_failure) \\
GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\
if (const ::testing::AssertionResult gtest_ar = (expression)) \\
; \\
else \\
on_failure(gtest_ar.failure_message())
""" % DEFS)
def Arity(n):
"""Returns the English name of the given arity."""
if n < 0:
return None
elif n <= 3:
return ['nullary', 'unary', 'binary', 'ternary'][n]
else:
return '%s-ary' % n
def Title(word):
"""Returns the given word in title case. The difference between
this and string's title() method is that Title('4-ary') is '4-ary'
while '4-ary'.title() is '4-Ary'."""
return word[0].upper() + word[1:]
def OneTo(n):
"""Returns the list [1, 2, 3, ..., n]."""
return range(1, n + 1)
def Iter(n, format, sep=''):
"""Given a positive integer n, a format string that contains 0 or
more '%s' format specs, and optionally a separator string, returns
the join of n strings, each formatted with the format string on an
iterator ranged from 1 to n.
Example:
Iter(3, 'v%s', sep=', ') returns 'v1, v2, v3'.
"""
# How many '%s' specs are in format?
spec_count = len(format.split('%s')) - 1
return sep.join([format % (spec_count * (i,)) for i in OneTo(n)])
def ImplementationForArity(n):
"""Returns the implementation of n-ary predicate assertions."""
# A map the defines the values used in the implementation template.
DEFS = {
'n' : str(n),
'vs' : Iter(n, 'v%s', sep=', '),
'vts' : Iter(n, '#v%s', sep=', '),
'arity' : Arity(n),
'Arity' : Title(Arity(n))
}
impl = """
// Helper function for implementing {EXPECT|ASSERT}_PRED%(n)s. Don't use
// this in your code.
template <typename Pred""" % DEFS
impl += Iter(n, """,
typename T%s""")
impl += """>
AssertionResult AssertPred%(n)sHelper(const char* pred_text""" % DEFS
impl += Iter(n, """,
const char* e%s""")
impl += """,
Pred pred"""
impl += Iter(n, """,
const T%s& v%s""")
impl += """) {
if (pred(%(vs)s)) return AssertionSuccess();
""" % DEFS
impl += ' return AssertionFailure() << pred_text << "("'
impl += Iter(n, """
<< e%s""", sep=' << ", "')
impl += ' << ") evaluates to false, where"'
impl += Iter(n, """
<< "\\n" << e%s << " evaluates to " << v%s""")
impl += """;
}
// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT%(n)s.
// Don't use this in your code.
#define GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, on_failure)\\
GTEST_ASSERT_(pred_format(%(vts)s, %(vs)s), \\
on_failure)
// Internal macro for implementing {EXPECT|ASSERT}_PRED%(n)s. Don't use
// this in your code.
#define GTEST_PRED%(n)s_(pred, %(vs)s, on_failure)\\
GTEST_ASSERT_(::testing::AssertPred%(n)sHelper(#pred""" % DEFS
impl += Iter(n, """, \\
#v%s""")
impl += """, \\
pred"""
impl += Iter(n, """, \\
v%s""")
impl += """), on_failure)
// %(Arity)s predicate assertion macros.
#define EXPECT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\
GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_NONFATAL_FAILURE_)
#define EXPECT_PRED%(n)s(pred, %(vs)s) \\
GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_NONFATAL_FAILURE_)
#define ASSERT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\
GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_FATAL_FAILURE_)
#define ASSERT_PRED%(n)s(pred, %(vs)s) \\
GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_FATAL_FAILURE_)
""" % DEFS
return impl
def HeaderPostamble():
"""Returns the postamble for the header file."""
return """
#endif // GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_
"""
def GenerateFile(path, content):
"""Given a file path and a content string, overwrites it with the
given content."""
print 'Updating file %s . . .' % path
f = file(path, 'w+')
print >>f, content,
f.close()
print 'File %s has been updated.' % path
def GenerateHeader(n):
"""Given the maximum arity n, updates the header file that implements
the predicate assertions."""
GenerateFile(HEADER,
HeaderPreamble(n)
+ ''.join([ImplementationForArity(i) for i in OneTo(n)])
+ HeaderPostamble())
def UnitTestPreamble():
"""Returns the preamble for the unit test file."""
# A map that defines the values used in the preamble template.
DEFS = {
'today' : time.strftime('%m/%d/%Y'),
'year' : time.strftime('%Y'),
'command' : '%s %s' % (os.path.basename(sys.argv[0]), sys.argv[1]),
}
return (
"""// Copyright 2006, Google Inc.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// This file is AUTOMATICALLY GENERATED on %(today)s by command
// '%(command)s'. DO NOT EDIT BY HAND!
// Regression test for gtest_pred_impl.h
//
// This file is generated by a script and quite long. If you intend to
// learn how Google Test works by reading its unit tests, read
// gtest_unittest.cc instead.
//
// This is intended as a regression test for the Google Test predicate
// assertions. We compile it as part of the gtest_unittest target
// only to keep the implementation tidy and compact, as it is quite
// involved to set up the stage for testing Google Test using Google
// Test itself.
//
// Currently, gtest_unittest takes ~11 seconds to run in the testing
// daemon. In the future, if it grows too large and needs much more
// time to finish, we should consider separating this file into a
// stand-alone regression test.
#include <iostream>
#include "gtest/gtest.h"
#include "gtest/gtest-spi.h"
// A user-defined data type.
struct Bool {
explicit Bool(int val) : value(val != 0) {}
bool operator>(int n) const { return value > Bool(n).value; }
Bool operator+(const Bool& rhs) const { return Bool(value + rhs.value); }
bool operator==(const Bool& rhs) const { return value == rhs.value; }
bool value;
};
// Enables Bool to be used in assertions.
std::ostream& operator<<(std::ostream& os, const Bool& x) {
return os << (x.value ? "true" : "false");
}
""" % DEFS)
def TestsForArity(n):
"""Returns the tests for n-ary predicate assertions."""
# A map that defines the values used in the template for the tests.
DEFS = {
'n' : n,
'es' : Iter(n, 'e%s', sep=', '),
'vs' : Iter(n, 'v%s', sep=', '),
'vts' : Iter(n, '#v%s', sep=', '),
'tvs' : Iter(n, 'T%s v%s', sep=', '),
'int_vs' : Iter(n, 'int v%s', sep=', '),
'Bool_vs' : Iter(n, 'Bool v%s', sep=', '),
'types' : Iter(n, 'typename T%s', sep=', '),
'v_sum' : Iter(n, 'v%s', sep=' + '),
'arity' : Arity(n),
'Arity' : Title(Arity(n)),
}
tests = (
"""// Sample functions/functors for testing %(arity)s predicate assertions.
// A %(arity)s predicate function.
template <%(types)s>
bool PredFunction%(n)s(%(tvs)s) {
return %(v_sum)s > 0;
}
// The following two functions are needed to circumvent a bug in
// gcc 2.95.3, which sometimes has problem with the above template
// function.
bool PredFunction%(n)sInt(%(int_vs)s) {
return %(v_sum)s > 0;
}
bool PredFunction%(n)sBool(%(Bool_vs)s) {
return %(v_sum)s > 0;
}
""" % DEFS)
tests += """
// A %(arity)s predicate functor.
struct PredFunctor%(n)s {
template <%(types)s>
bool operator()(""" % DEFS
tests += Iter(n, 'const T%s& v%s', sep=""",
""")
tests += """) {
return %(v_sum)s > 0;
}
};
""" % DEFS
tests += """
// A %(arity)s predicate-formatter function.
template <%(types)s>
testing::AssertionResult PredFormatFunction%(n)s(""" % DEFS
tests += Iter(n, 'const char* e%s', sep=""",
""")
tests += Iter(n, """,
const T%s& v%s""")
tests += """) {
if (PredFunction%(n)s(%(vs)s))
return testing::AssertionSuccess();
return testing::AssertionFailure()
<< """ % DEFS
tests += Iter(n, 'e%s', sep=' << " + " << ')
tests += """
<< " is expected to be positive, but evaluates to "
<< %(v_sum)s << ".";
}
""" % DEFS
tests += """
// A %(arity)s predicate-formatter functor.
struct PredFormatFunctor%(n)s {
template <%(types)s>
testing::AssertionResult operator()(""" % DEFS
tests += Iter(n, 'const char* e%s', sep=""",
""")
tests += Iter(n, """,
const T%s& v%s""")
tests += """) const {
return PredFormatFunction%(n)s(%(es)s, %(vs)s);
}
};
""" % DEFS
tests += """
// Tests for {EXPECT|ASSERT}_PRED_FORMAT%(n)s.
class Predicate%(n)sTest : public testing::Test {
protected:
virtual void SetUp() {
expected_to_finish_ = true;
finished_ = false;""" % DEFS
tests += """
""" + Iter(n, 'n%s_ = ') + """0;
}
"""
tests += """
virtual void TearDown() {
// Verifies that each of the predicate's arguments was evaluated
// exactly once."""
tests += ''.join(["""
EXPECT_EQ(1, n%s_) <<
"The predicate assertion didn't evaluate argument %s "
"exactly once.";""" % (i, i + 1) for i in OneTo(n)])
tests += """
// Verifies that the control flow in the test function is expected.
if (expected_to_finish_ && !finished_) {
FAIL() << "The predicate assertion unexpactedly aborted the test.";
} else if (!expected_to_finish_ && finished_) {
FAIL() << "The failed predicate assertion didn't abort the test "
"as expected.";
}
}
// true iff the test function is expected to run to finish.
static bool expected_to_finish_;
// true iff the test function did run to finish.
static bool finished_;
""" % DEFS
tests += Iter(n, """
static int n%s_;""")
tests += """
};
bool Predicate%(n)sTest::expected_to_finish_;
bool Predicate%(n)sTest::finished_;
""" % DEFS
tests += Iter(n, """int Predicate%%(n)sTest::n%s_;
""") % DEFS
tests += """
typedef Predicate%(n)sTest EXPECT_PRED_FORMAT%(n)sTest;
typedef Predicate%(n)sTest ASSERT_PRED_FORMAT%(n)sTest;
typedef Predicate%(n)sTest EXPECT_PRED%(n)sTest;
typedef Predicate%(n)sTest ASSERT_PRED%(n)sTest;
""" % DEFS
def GenTest(use_format, use_assert, expect_failure,
use_functor, use_user_type):
"""Returns the test for a predicate assertion macro.
Args:
use_format: true iff the assertion is a *_PRED_FORMAT*.
use_assert: true iff the assertion is a ASSERT_*.
expect_failure: true iff the assertion is expected to fail.
use_functor: true iff the first argument of the assertion is
a functor (as opposed to a function)
use_user_type: true iff the predicate functor/function takes
argument(s) of a user-defined type.
Example:
GenTest(1, 0, 0, 1, 0) returns a test that tests the behavior
of a successful EXPECT_PRED_FORMATn() that takes a functor
whose arguments have built-in types."""
if use_assert:
assrt = 'ASSERT' # 'assert' is reserved, so we cannot use
# that identifier here.
else:
assrt = 'EXPECT'
assertion = assrt + '_PRED'
if use_format:
pred_format = 'PredFormat'
assertion += '_FORMAT'
else:
pred_format = 'Pred'
assertion += '%(n)s' % DEFS
if use_functor:
pred_format_type = 'functor'
pred_format += 'Functor%(n)s()'
else:
pred_format_type = 'function'
pred_format += 'Function%(n)s'
if not use_format:
if use_user_type:
pred_format += 'Bool'
else:
pred_format += 'Int'
test_name = pred_format_type.title()
if use_user_type:
arg_type = 'user-defined type (Bool)'
test_name += 'OnUserType'
if expect_failure:
arg = 'Bool(n%s_++)'
else:
arg = 'Bool(++n%s_)'
else:
arg_type = 'built-in type (int)'
test_name += 'OnBuiltInType'
if expect_failure:
arg = 'n%s_++'
else:
arg = '++n%s_'
if expect_failure:
successful_or_failed = 'failed'
expected_or_not = 'expected.'
test_name += 'Failure'
else:
successful_or_failed = 'successful'
expected_or_not = 'UNEXPECTED!'
test_name += 'Success'
# A map that defines the values used in the test template.
defs = DEFS.copy()
defs.update({
'assert' : assrt,
'assertion' : assertion,
'test_name' : test_name,
'pf_type' : pred_format_type,
'pf' : pred_format,
'arg_type' : arg_type,
'arg' : arg,
'successful' : successful_or_failed,
'expected' : expected_or_not,
})
test = """
// Tests a %(successful)s %(assertion)s where the
// predicate-formatter is a %(pf_type)s on a %(arg_type)s.
TEST_F(%(assertion)sTest, %(test_name)s) {""" % defs
indent = (len(assertion) + 3)*' '
extra_indent = ''
if expect_failure:
extra_indent = ' '
if use_assert:
test += """
expected_to_finish_ = false;
EXPECT_FATAL_FAILURE({ // NOLINT"""
else:
test += """
EXPECT_NONFATAL_FAILURE({ // NOLINT"""
test += '\n' + extra_indent + """ %(assertion)s(%(pf)s""" % defs
test = test % defs
test += Iter(n, ',\n' + indent + extra_indent + '%(arg)s' % defs)
test += ');\n' + extra_indent + ' finished_ = true;\n'
if expect_failure:
test += ' }, "");\n'
test += '}\n'
return test
# Generates tests for all 2**6 = 64 combinations.
tests += ''.join([GenTest(use_format, use_assert, expect_failure,
use_functor, use_user_type)
for use_format in [0, 1]
for use_assert in [0, 1]
for expect_failure in [0, 1]
for use_functor in [0, 1]
for use_user_type in [0, 1]
])
return tests
def UnitTestPostamble():
"""Returns the postamble for the tests."""
return ''
def GenerateUnitTest(n):
"""Returns the tests for up-to n-ary predicate assertions."""
GenerateFile(UNIT_TEST,
UnitTestPreamble()
+ ''.join([TestsForArity(i) for i in OneTo(n)])
+ UnitTestPostamble())
def _Main():
"""The entry point of the script. Generates the header file and its
unit test."""
if len(sys.argv) != 2:
print __doc__
print 'Author: ' + __author__
sys.exit(1)
n = int(sys.argv[1])
GenerateHeader(n)
GenerateUnitTest(n)
if __name__ == '__main__':
_Main()
|
bsd-3-clause
|
geraldinepascal/FROGS
|
tools/demultiplex/demultiplex.py
|
1
|
19651
|
#!/usr/bin/env python3
#
# Copyright (C) 2018 INRA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
__author__ = 'Plateforme bioinformatique Toulouse and SIGENAE'
__copyright__ = 'Copyright (C) 2015 INRA'
__license__ = 'GNU General Public License'
__version__ = '3.2.3'
__email__ = 'frogs-support@inrae.fr'
__status__ = 'prod'
import os
import sys
import gzip
import time
import tarfile
import argparse
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
# PATH
BIN_DIR = os.path.abspath(os.path.join(os.path.dirname(CURRENT_DIR), "libexec"))
os.environ['PATH'] = BIN_DIR + os.pathsep + os.environ['PATH']
# PYTHONPATH
LIB_DIR = os.path.abspath(os.path.join(os.path.dirname(CURRENT_DIR), "lib"))
sys.path.append(LIB_DIR)
if os.getenv('PYTHONPATH') is None: os.environ['PYTHONPATH'] = LIB_DIR
else: os.environ['PYTHONPATH'] = LIB_DIR + os.pathsep + os.environ['PYTHONPATH']
from frogsUtils import *
##################################################################################################################################################
#
# COMMAND LINES
#
##################################################################################################################################################
class Demultiplex(Cmd):
"""
@summary : Demultiplex samples.
"""
def __init__(self, R1_input_file, R2_input_file, barcode_file, mismatches, end, global_tmp_files, R1_output_files, R2_output_files, demultiplex_err_files1, demultiplex_err_files2, demultiplex_log):
"""
@param R1_input_file : [str] Path to the R1 fastq file.
@param R2_input_file : [str] Path to the R2 fastq file.
@param barcode_file : [str] Path to barcodes and samples (one line by sample) description file. Line format : SAMPLE_NAME<TAB>BARCODE.
@param mismatches : [int] Number of mismatches allowed
@param end : [str] barcode ends ? forward : bol or reverse : eol (def bol)
@param global_tmp_files : [str] Path for R1 and R2 files.
@param R1_output_files : [list] Paths to the R1 fastq files (one by sample). User provides an empty list.
@param R2_output_files : [list] Paths to the R2 fastq files (one by sample). User provides an empty list.
@param demultiplex_err_files : [list] Paths to the files with ambiguous and unmatched reads. User provides an empty list.
"""
tmp_files = TmpFiles( global_tmp_files.tmp_dir )
tmp_folder = os.path.join( global_tmp_files.tmp_dir, global_tmp_files.prefix + "_tmp", tmp_files.prefix )
global_tmp_files.dirs.append(tmp_folder)
if not os.path.exists(tmp_folder):
os.mkdir(tmp_folder)
self.samples_names = list()
# Update output data
FH_barcode = open( barcode_file )
for line in FH_barcode:
sample_name, barcode = line.strip().rsplit(None, 1)
R1_output_files.append( os.path.join(tmp_folder, sample_name + '_R1.fastq') )
global_tmp_files.files.append(os.path.join(tmp_folder, sample_name + '_R1.fastq') )
if R2_input_file != None:
R2_output_files.append( os.path.join(tmp_folder, sample_name + '_R2.fastq') )
global_tmp_files.files.append(os.path.join(tmp_folder, sample_name + '_R2.fastq'))
self.samples_names.append( sample_name.replace(' ', '_') )
FH_barcode.close()
self.R1_input_file = R1_input_file
self.ambiguous = os.path.join(tmp_folder, 'ambiguous_R1.fastq')
self.unmatched = os.path.join(tmp_folder, 'unmatched_R1.fastq')
demultiplex_err_files1.extend( [self.ambiguous,self.unmatched] )
global_tmp_files.files.extend( [self.ambiguous,self.unmatched] )
if R2_input_file != None:
demultiplex_err_files2.extend( [os.path.join(tmp_folder, 'ambiguous_R2.fastq'),os.path.join(tmp_folder, 'unmatched_R2.fastq') ])
global_tmp_files.files.extend( [os.path.join(tmp_folder, 'ambiguous_R2.fastq'),os.path.join(tmp_folder, 'unmatched_R2.fastq') ])
# Set class
if R2_input_file != None:
Cmd.__init__( self,
'splitbc.pl',
'Demultiplex reads.',
R1_input_file + ' ' + R2_input_file + ' --' + end + ' --bcfile ' + barcode_file + ' --mismatches ' + repr(mismatches) + ' --trim --no_adapt --prefix-r1 ' + os.path.join(tmp_folder, '%_R1.fastq') +\
' --prefix-r2 ' + os.path.join(tmp_folder, '%_R2.fastq') + ' >> ' + demultiplex_log,
None )
else:
Cmd.__init__( self,
'splitbc.pl',
'Demultiplex reads.',
R1_input_file + ' --' + end + ' --bcfile ' + barcode_file + ' --mismatches ' + repr(mismatches) + ' --trim --no_adapt --prefix-r1 ' + os.path.join(tmp_folder, '%_R1.fastq') +\
' >> ' + demultiplex_log,
None )
def parser(self, log_file):
"""
@summary : Parse the command results to add information in log_file.
@log_file : [str] Path to the sample process log file.
"""
# Parse output
nb_seq_before = get_fastq_nb_seq(self.R1_input_file)
nb_seq_unmatched = get_fastq_nb_seq(self.unmatched)
nb_seq_ambiguous = get_fastq_nb_seq(self.ambiguous)
# Write result
FH_log = Logger( log_file )
FH_log.write( 'Results :\n' )
FH_log.write( '\tnb seq before demultiplexing : ' + str(nb_seq_before) + '\n' )
FH_log.write( '\tnb seq after process matched : ' + str(nb_seq_before - nb_seq_unmatched) + '\n' )
FH_log.write( '\tnb seq after process non-ambiguous : ' + str(nb_seq_before - nb_seq_unmatched - nb_seq_ambiguous) + '\n' )
FH_log.close()
def get_version(self):
"""
@summary : Returns the program version number.
@return : version number if this is possible, otherwise this method return 'unknown'.
"""
return Cmd.get_version(self, 'stdout')
class Archive(Cmd):
"""
@summary : Creates an archive with files.
"""
def __init__(self, archived_files, archive_path):
"""
@param archived_files: [list] Files added in final archive.
@param archive_path: [str] Path to the new archive.
"""
tmp_files=TmpFiles( os.path.dirname(archive_path) )
tmp_folder = os.path.join( tmp_files.tmp_dir, tmp_files.prefix)
tmp_files.dirs.append(tmp_folder)
if not os.path.exists(tmp_folder):
os.makedirs(tmp_folder)
if len(archived_files) == 0:
raise_exception( Exception( "\n\n#ERROR : At least one file must be add to the archive '" + archive_path + "'.\n\n" ))
archived_basenames = list()
for current in archived_files:
if not os.path.dirname(current) == tmp_folder:
os.rename(current, os.path.join(tmp_folder,os.path.basename(current)))
tmp_files.files.append(os.path.join(tmp_folder,os.path.basename(current)))
archived_basenames.append(os.path.basename(current))
Cmd.__init__( self,
'tar',
'Archives files.',
'-zcf ' + archive_path + ' -C ' + tmp_folder + " " + " ".join(archived_basenames),
None )
self.Files=tmp_files
def parser(self,log_file):
self.Files.deleteAll()
##################################################################################################################################################
#
# FUNCTIONS
#
##################################################################################################################################################
def is_gzip( file ):
"""
@return: [bool] True if the file is gziped.
@param file : [str] Path to processed file.
"""
is_gzip = None
FH_input = gzip.open( file )
try:
FH_input.readline()
is_gzip = True
except:
is_gzip = False
finally:
FH_input.close()
return is_gzip
def split_barcode_file( barcode_file, barcodes_file_list, global_tmp_files ):
"""
@summary: In case of double multiplexe, split barcode file in one forward and multiple reverse barcode files
@param barcode_file: [str] Path to the input barcode file
@param barcodes_file_list: [list] List of path to the ouput barcode files
@param out_dir: [str] path to the output directory to write barcode files
"""
out_dir = global_tmp_files.tmp_dir
barcode_input = open(barcode_file,"rt")
barcode_dict={}
for l in barcode_input.readlines():
[s,f,r]=l.strip().split()
if not "forward_bc" in barcode_dict:
barcode_dict["forward_bc"] = [f+"\t"+f]
elif not f+"\t"+f in barcode_dict["forward_bc"]:
barcode_dict["forward_bc"].append( f+"\t"+f)
if not f+"_reverse_bc" in barcode_dict:
barcode_dict[f+"_reverse_bc"] = [s+"\t"+r]
else :
barcode_dict[f+"_reverse_bc"].append(s+"\t"+r)
f=barcode_dict.pop("forward_bc")
barcodes_file_list.append(os.path.join(out_dir,"forward_bc"))
global_tmp_files.files.append(os.path.join(out_dir,"forward_bc"))
FH_out = open(os.path.join(out_dir,"forward_bc"),"wt")
FH_out.write("\n".join(f)+"\n")
FH_out.close()
for bc_file in barcode_dict:
barcodes_file_list.append(os.path.join(out_dir,bc_file))
global_tmp_files.files.append(os.path.join(out_dir,bc_file))
FH_out = open(os.path.join(out_dir,bc_file),"wt")
FH_out.write("\n".join(barcode_dict[bc_file])+"\n")
FH_out.close()
def get_fastq_nb_seq( fastq_file ):
"""
@summary: Returns the number of sequences in fastq_file.
@param fastq_file: [str] Path to the fastq file processed.
@return: [int] The number of sequences.
"""
FH_input = None
if not is_gzip(fastq_file):
FH_input = open( fastq_file )
else:
FH_input = gzip.open( fastq_file )
nb_line = 0
for line in FH_input:
nb_line += 1
FH_input.close()
nb_seq = nb_line/4
return nb_seq
def concat_files(list_input, output_file):
FH_out=open(output_file,"wt")
for f in list_input :
FH_in = open(f)
string=""
i=0
for line in FH_in:
string+= line
i+=1
if i==2000 :
FH_out.write(string)
string=""
i=0
if i != 0:
FH_out.write(string)
FH_in.close()
FH_out.close()
def summarise_results( summary_file, barcode_file, log_file ):
"""
@summary: Writes one summary of results from several logs.
@param summary_file: [str] The output file.
@param log_files: [list] The list of path to log files (one log file by sample).
"""
sample_dict=dict()
FH_barcode= open(barcode_file)
for line in FH_barcode:
sample_dict[line.split()[0]]=0
FH_summary = open(summary_file, "wt")
FH_summary.write( "#sample\tcount\n")
FH_log = open(log_file,"rt")
sample_dict["unmatched"]=0
sample_dict["ambiguous"]=0
for line in FH_log.readlines():
if line.startswith("Barcode") or line.startswith("total") :
pass
else :
l=line.replace('(','\t').split()
if l[0] in sample_dict:
sample_dict[l[0]] += int(l[1])
for s in sample_dict:
FH_summary.write(s + '\t' + str(sample_dict[s]) + '\n')
FH_summary.close()
##################################################################################################################################################
#
# MAIN
#
##################################################################################################################################################
if __name__ == "__main__":
# Manage parameters
parser = argparse.ArgumentParser(
description='Split by samples the reads in function of inner barcode.'
)
parser.add_argument('-m', '--mismatches', type=int, default=0, help="Number of mismatches allowed in barcode. [Default: %(default)s]")
parser.add_argument('-e', '--end', type=str, default="bol", help="barcode is at the begining of the forward end (bol) or of the reverse (eol) or both (both). [Default: %(default)s]")
parser.add_argument( '--debug', default=False, action='store_true', help="Keep temporary files to debug program." )
parser.add_argument( '-v', '--version', action='version', version=__version__ )
# Inputs
group_input = parser.add_argument_group( 'Inputs' )
group_input.add_argument( '--input-R1', required=True, help='The R1 sequence file with all samples (format: fastq).' )
group_input.add_argument( '--input-R2', default=None, help='The R2 sequence file with all samples (format: fastq).' )
group_input.add_argument( '--input-barcode', help='This file describes barcodes and samples (one line by sample). Line format : SAMPLE_NAME<TAB>BARCODE or SAMPLE_NAME<TAB>BARCODE_FW<TAB>BARCODE_RV.' )
group_output = parser.add_argument_group( 'Outputs' )
# Outputs
group_output.add_argument( '--output-demultiplexed', default="demultiplexed_read.tar.gz", help='The tar file containing R1 files and R2 files for each sample (format: tar). [Default: %(default)s]' )
group_output.add_argument( '--output-excluded', default="undemultiplexed_read.tar.gz", help='The tar file containing R1 files and R2 files not demultiplexed (format: tar). [Default: %(default)s]' )
group_output.add_argument( '-s', '--summary', default='demultiplex_summary.tsv', help='TSV file with summary of filters results (format: TSV). [Default: %(default)s]')
group_output.add_argument( '-l', '--log-file', default=sys.stdout, help='This output file will contain several informations on executed commands.')
args = parser.parse_args()
prevent_shell_injections(args)
Logger.static_write(args.log_file, "## Application\nSoftware :" + sys.argv[0] + " (version : " + str(__version__) + ")\nCommand : " + " ".join(sys.argv) + "\n\n")
# Process
R1_files = list()
R2_files = list()
tmp_barcode_files = list()
tmp_R1_files = list()
tmp_R2_files = list()
demultiplex_err_files1 = list()
demultiplex_err_files2 = list()
excluded_R1_file = os.path.join(os.path.split(args.output_demultiplexed)[0],os.path.basename(args.input_R1)+"_excluded_demult")
if args.input_R2 != None :
excluded_R2_file = os.path.join(os.path.split(args.output_demultiplexed)[0],os.path.basename(args.input_R2)+"_excluded_demult")
uniq_id = str(time.time()) + "_" + str(os.getpid())
tmp_files = TmpFiles( os.path.split(args.output_demultiplexed)[0] )
demultiplex_log = tmp_files.add("Demult.log")
tmp_folder=tmp_files.add_dir("tmp")
os.mkdir(tmp_folder)
sample_list=[]
try:
# Process
if args.end == "bol" or args.end == "eol" :
info="\n#Demultiplexing " + os.path.basename(args.input_R1)
if args.input_R2 != None:
info+= " and " + os.path.basename(args.input_R2)
info += " with " + os.path.basename(args.input_barcode) + " in " + args.end + " strand\n"
Logger.static_write(args.log_file,info)
Demultiplex(args.input_R1, args.input_R2, args.input_barcode, args.mismatches, args.end, tmp_files, R1_files, R2_files, demultiplex_err_files1,demultiplex_err_files2, demultiplex_log).submit( args.log_file )
else:
split_barcode_file(args.input_barcode, tmp_barcode_files, tmp_files)
info="\n#Demultiplexing " + os.path.basename(args.input_R1)
if args.input_R2 != None:
info+= " and " + os.path.basename(args.input_R2)
info += " with " + os.path.basename(tmp_barcode_files[0]) + " in bol strand\n"
Logger.static_write(args.log_file,info)
Demultiplex(args.input_R1, args.input_R2, tmp_barcode_files[0], args.mismatches, "bol", tmp_files, tmp_R1_files, tmp_R2_files, demultiplex_err_files1,demultiplex_err_files2, demultiplex_log).submit( args.log_file )
for idx,read1_file in enumerate(tmp_R1_files):
bc = os.path.basename(read1_file).replace("_R1.fastq","")
if os.path.join(tmp_files.tmp_dir,bc+"_reverse_bc") in tmp_barcode_files:
if os.stat(tmp_R1_files[idx]).st_size != 0 :
info="\n#Demultiplexing " + os.path.basename(tmp_R1_files[idx])
if args.input_R2 != None:
info+= " and " + os.path.basename(tmp_R2_files[idx])
info += " with " + bc+"_reverse_bc" + " in eol strand\n"
Logger.static_write(args.log_file,info)
if args.input_R2 != None:
Demultiplex(tmp_R1_files[idx], tmp_R2_files[idx], os.path.join(tmp_files.tmp_dir,bc+"_reverse_bc"), args.mismatches, "eol", tmp_files, R1_files, R2_files, demultiplex_err_files1, demultiplex_err_files2, demultiplex_log).submit( args.log_file )
else:
Demultiplex(tmp_R1_files[idx], None, os.path.join(tmp_files.tmp_dir,bc+"_reverse_bc"), args.mismatches, "eol", tmp_files, R1_files, R2_files, demultiplex_err_files1, demultiplex_err_files2, demultiplex_log).submit( args.log_file )
Logger.static_write(args.log_file,"\n#Summarising result\n")
summarise_results( args.summary, args.input_barcode, demultiplex_log )
Logger.static_write(args.log_file,"\n#Concatenation of undemultiplexed files 1\n")
concat_files(demultiplex_err_files1, excluded_R1_file )
if len(R2_files) > 0:
Logger.static_write(args.log_file,"\n#Concatenation of undemultiplexed files 2\n")
concat_files(demultiplex_err_files2, excluded_R2_file )
Logger.static_write(args.log_file,"\n#Archive demultiplexed R1 and R2 files\n")
Archive(R1_files + R2_files, args.output_demultiplexed).submit( args.log_file )
Logger.static_write(args.log_file,"\n#Archive undemultiplexed R1 and R2 files\n")
Archive([excluded_R1_file,excluded_R2_file], args.output_excluded).submit( args.log_file )
else:
Logger.static_write(args.log_file,"\n#Archive demultiplexed files\n")
Archive(R1_files, args.output_demultiplexed).submit( args.log_file )
Logger.static_write(args.log_file,"\n#Archive undemultiplexed files\n")
Archive([excluded_R1_file], args.output_excluded).submit( args.log_file )
# Remove temporary files
finally:
if not args.debug:
Logger.static_write(args.log_file,"\n#Removing temporary files\n")
tmp_files.deleteAll()
|
gpl-3.0
|
lolhi/at1-S0834211
|
tools/perf/scripts/python/netdev-times.py
|
11271
|
15048
|
# Display a process of packets and processed time.
# It helps us to investigate networking or network device.
#
# options
# tx: show only tx chart
# rx: show only rx chart
# dev=: show only thing related to specified device
# debug: work with debug mode. It shows buffer status.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
all_event_list = []; # insert all tracepoint event related with this script
irq_dic = {}; # key is cpu and value is a list which stacks irqs
# which raise NET_RX softirq
net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry
# and a list which stacks receive
receive_hunk_list = []; # a list which include a sequence of receive events
rx_skb_list = []; # received packet list for matching
# skb_copy_datagram_iovec
buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and
# tx_xmit_list
of_count_rx_skb_list = 0; # overflow count
tx_queue_list = []; # list of packets which pass through dev_queue_xmit
of_count_tx_queue_list = 0; # overflow count
tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit
of_count_tx_xmit_list = 0; # overflow count
tx_free_list = []; # list of packets which is freed
# options
show_tx = 0;
show_rx = 0;
dev = 0; # store a name of device specified by option "dev="
debug = 0;
# indices of event_info tuple
EINFO_IDX_NAME= 0
EINFO_IDX_CONTEXT=1
EINFO_IDX_CPU= 2
EINFO_IDX_TIME= 3
EINFO_IDX_PID= 4
EINFO_IDX_COMM= 5
# Calculate a time interval(msec) from src(nsec) to dst(nsec)
def diff_msec(src, dst):
return (dst - src) / 1000000.0
# Display a process of transmitting a packet
def print_transmit(hunk):
if dev != 0 and hunk['dev'].find(dev) < 0:
return
print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \
(hunk['dev'], hunk['len'],
nsecs_secs(hunk['queue_t']),
nsecs_nsecs(hunk['queue_t'])/1000,
diff_msec(hunk['queue_t'], hunk['xmit_t']),
diff_msec(hunk['xmit_t'], hunk['free_t']))
# Format for displaying rx packet processing
PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)"
PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)"
PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)"
PF_JOINT= " |"
PF_WJOINT= " | |"
PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)"
PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)"
PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)"
PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)"
PF_CONS_SKB= " | consume_skb(+%.3fmsec)"
# Display a process of received packets and interrputs associated with
# a NET_RX softirq
def print_receive(hunk):
show_hunk = 0
irq_list = hunk['irq_list']
cpu = irq_list[0]['cpu']
base_t = irq_list[0]['irq_ent_t']
# check if this hunk should be showed
if dev != 0:
for i in range(len(irq_list)):
if irq_list[i]['name'].find(dev) >= 0:
show_hunk = 1
break
else:
show_hunk = 1
if show_hunk == 0:
return
print "%d.%06dsec cpu=%d" % \
(nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu)
for i in range(len(irq_list)):
print PF_IRQ_ENTRY % \
(diff_msec(base_t, irq_list[i]['irq_ent_t']),
irq_list[i]['irq'], irq_list[i]['name'])
print PF_JOINT
irq_event_list = irq_list[i]['event_list']
for j in range(len(irq_event_list)):
irq_event = irq_event_list[j]
if irq_event['event'] == 'netif_rx':
print PF_NET_RX % \
(diff_msec(base_t, irq_event['time']),
irq_event['skbaddr'])
print PF_JOINT
print PF_SOFT_ENTRY % \
diff_msec(base_t, hunk['sirq_ent_t'])
print PF_JOINT
event_list = hunk['event_list']
for i in range(len(event_list)):
event = event_list[i]
if event['event_name'] == 'napi_poll':
print PF_NAPI_POLL % \
(diff_msec(base_t, event['event_t']), event['dev'])
if i == len(event_list) - 1:
print ""
else:
print PF_JOINT
else:
print PF_NET_RECV % \
(diff_msec(base_t, event['event_t']), event['skbaddr'],
event['len'])
if 'comm' in event.keys():
print PF_WJOINT
print PF_CPY_DGRAM % \
(diff_msec(base_t, event['comm_t']),
event['pid'], event['comm'])
elif 'handle' in event.keys():
print PF_WJOINT
if event['handle'] == "kfree_skb":
print PF_KFREE_SKB % \
(diff_msec(base_t,
event['comm_t']),
event['location'])
elif event['handle'] == "consume_skb":
print PF_CONS_SKB % \
diff_msec(base_t,
event['comm_t'])
print PF_JOINT
def trace_begin():
global show_tx
global show_rx
global dev
global debug
for i in range(len(sys.argv)):
if i == 0:
continue
arg = sys.argv[i]
if arg == 'tx':
show_tx = 1
elif arg =='rx':
show_rx = 1
elif arg.find('dev=',0, 4) >= 0:
dev = arg[4:]
elif arg == 'debug':
debug = 1
if show_tx == 0 and show_rx == 0:
show_tx = 1
show_rx = 1
def trace_end():
# order all events in time
all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME],
b[EINFO_IDX_TIME]))
# process all events
for i in range(len(all_event_list)):
event_info = all_event_list[i]
name = event_info[EINFO_IDX_NAME]
if name == 'irq__softirq_exit':
handle_irq_softirq_exit(event_info)
elif name == 'irq__softirq_entry':
handle_irq_softirq_entry(event_info)
elif name == 'irq__softirq_raise':
handle_irq_softirq_raise(event_info)
elif name == 'irq__irq_handler_entry':
handle_irq_handler_entry(event_info)
elif name == 'irq__irq_handler_exit':
handle_irq_handler_exit(event_info)
elif name == 'napi__napi_poll':
handle_napi_poll(event_info)
elif name == 'net__netif_receive_skb':
handle_netif_receive_skb(event_info)
elif name == 'net__netif_rx':
handle_netif_rx(event_info)
elif name == 'skb__skb_copy_datagram_iovec':
handle_skb_copy_datagram_iovec(event_info)
elif name == 'net__net_dev_queue':
handle_net_dev_queue(event_info)
elif name == 'net__net_dev_xmit':
handle_net_dev_xmit(event_info)
elif name == 'skb__kfree_skb':
handle_kfree_skb(event_info)
elif name == 'skb__consume_skb':
handle_consume_skb(event_info)
# display receive hunks
if show_rx:
for i in range(len(receive_hunk_list)):
print_receive(receive_hunk_list[i])
# display transmit hunks
if show_tx:
print " dev len Qdisc " \
" netdevice free"
for i in range(len(tx_free_list)):
print_transmit(tx_free_list[i])
if debug:
print "debug buffer status"
print "----------------------------"
print "xmit Qdisc:remain:%d overflow:%d" % \
(len(tx_queue_list), of_count_tx_queue_list)
print "xmit netdevice:remain:%d overflow:%d" % \
(len(tx_xmit_list), of_count_tx_xmit_list)
print "receive:remain:%d overflow:%d" % \
(len(rx_skb_list), of_count_rx_skb_list)
# called from perf, when it finds a correspoinding event
def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm,
irq, irq_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
irq, irq_name)
all_event_list.append(event_info)
def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret)
all_event_list.append(event_info)
def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
napi, dev_name)
all_event_list.append(event_info)
def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, rc, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, rc ,dev_name)
all_event_list.append(event_info)
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, protocol, location)
all_event_list.append(event_info)
def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr)
all_event_list.append(event_info)
def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen)
all_event_list.append(event_info)
def handle_irq_handler_entry(event_info):
(name, context, cpu, time, pid, comm, irq, irq_name) = event_info
if cpu not in irq_dic.keys():
irq_dic[cpu] = []
irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time}
irq_dic[cpu].append(irq_record)
def handle_irq_handler_exit(event_info):
(name, context, cpu, time, pid, comm, irq, ret) = event_info
if cpu not in irq_dic.keys():
return
irq_record = irq_dic[cpu].pop()
if irq != irq_record['irq']:
return
irq_record.update({'irq_ext_t':time})
# if an irq doesn't include NET_RX softirq, drop.
if 'event_list' in irq_record.keys():
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_raise(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'sirq_raise'})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_entry(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]}
def handle_irq_softirq_exit(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
irq_list = []
event_list = 0
if cpu in irq_dic.keys():
irq_list = irq_dic[cpu]
del irq_dic[cpu]
if cpu in net_rx_dic.keys():
sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t']
event_list = net_rx_dic[cpu]['event_list']
del net_rx_dic[cpu]
if irq_list == [] or event_list == 0:
return
rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time,
'irq_list':irq_list, 'event_list':event_list}
# merge information realted to a NET_RX softirq
receive_hunk_list.append(rec_data)
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name) = event_info
if cpu in net_rx_dic.keys():
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name':'napi_poll',
'dev':dev_name, 'event_t':time}
event_list.append(rec_data)
def handle_netif_rx(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'netif_rx',
'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_netif_receive_skb(event_info):
global of_count_rx_skb_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu in net_rx_dic.keys():
rec_data = {'event_name':'netif_receive_skb',
'event_t':time, 'skbaddr':skbaddr, 'len':skblen}
event_list = net_rx_dic[cpu]['event_list']
event_list.append(rec_data)
rx_skb_list.insert(0, rec_data)
if len(rx_skb_list) > buffer_budget:
rx_skb_list.pop()
of_count_rx_skb_list += 1
def handle_net_dev_queue(event_info):
global of_count_tx_queue_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time}
tx_queue_list.insert(0, skb)
if len(tx_queue_list) > buffer_budget:
tx_queue_list.pop()
of_count_tx_queue_list += 1
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, rc, dev_name) = event_info
if rc == 0: # NETDEV_TX_OK
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
skb['xmit_t'] = time
tx_xmit_list.insert(0, skb)
del tx_queue_list[i]
if len(tx_xmit_list) > buffer_budget:
tx_xmit_list.pop()
of_count_tx_xmit_list += 1
return
def handle_kfree_skb(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, protocol, location) = event_info
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
del tx_queue_list[i]
return
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if rec_data['skbaddr'] == skbaddr:
rec_data.update({'handle':"kfree_skb",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
def handle_consume_skb(event_info):
(name, context, cpu, time, pid, comm, skbaddr) = event_info
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
def handle_skb_copy_datagram_iovec(event_info):
(name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if skbaddr == rec_data['skbaddr']:
rec_data.update({'handle':"skb_copy_datagram_iovec",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
|
gpl-2.0
|
cosmoharrigan/pylearn2
|
pylearn2/models/dbm/ising.py
|
32
|
54938
|
"""
Implementation of a densely connected Ising model in the
pylearn2.models.dbm framework
Notes
-----
If :math:`h` can be -1 or 1, and
.. math::
p(h) = \exp(T\dot z \dot h),
then the expected value of :math:`h` is given by
.. math::
\\tanh(T \dot z),
and the probability that :math:`h` is 1 is given by
.. math::
\sigma(2T \dot z)
"""
__authors__ = ["Ian Goodfellow", "Vincent Dumoulin"]
__copyright__ = "Copyright 2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
import operator
import numpy as np
from theano import function
from theano.gof.op import get_debug_values
from theano.compat.six.moves import reduce
from theano.compile.sharedvalue import SharedVariable
import theano.tensor as T
import warnings
from pylearn2.compat import OrderedDict
from pylearn2.expr.nnet import sigmoid_numpy
from pylearn2.linear.matrixmul import MatrixMul
from pylearn2.model_extensions.norm_constraint import MaxL2FilterNorm
from pylearn2.models.dbm import init_sigmoid_bias_from_array
from pylearn2.models.dbm.layer import HiddenLayer, VisibleLayer
from pylearn2.space import Conv2DSpace
from pylearn2.space import VectorSpace
from pylearn2.utils import sharedX
from pylearn2.utils.rng import make_theano_rng
def init_tanh_bias_from_marginals(dataset, use_y=False):
"""
.. todo::
WRITEME
"""
if use_y:
X = dataset.y
else:
X = dataset.get_design_matrix()
if not (X.max() == 1):
raise ValueError("Expected design matrix to consist entirely "
"of 0s and 1s, but maximum value is "+str(X.max()))
assert X.min() == -1.
mean = X.mean(axis=0)
mean = np.clip(mean, 1e-7, 1-1e-7)
init_bias = np.arctanh(mean)
return init_bias
class IsingVisible(VisibleLayer):
"""
A DBM visible layer consisting of random variables living
in a `VectorSpace`, with values in {-1, 1}.
Implements the energy function term :math:`-\mathbf{b}^T \mathbf{h}`.
Parameters
----------
nvis : int
The dimension of the space
beta : theano shared variable
Shared variable representing a multiplicative factor of the
energy function (the inverse temperature)
learn_beta : boolean, optional
Whether or not the inverse temperature should be considered as a
learned parameter
bias_from_marginals : `pylearn2.datasets.dataset.Dataset`, optional
A dataset whose marginals are used to initialize the visible
biases
"""
def __init__(self, nvis, beta, learn_beta=False, bias_from_marginals=None):
if not isinstance(beta, SharedVariable):
raise ValueError("beta needs to be a theano shared variable.")
self.__dict__.update(locals())
del self.self
# Don't serialize the dataset
del self.bias_from_marginals
self.space = VectorSpace(nvis)
self.input_space = self.space
origin = self.space.get_origin()
if bias_from_marginals is None:
init_bias = np.zeros((nvis,))
else:
init_bias = init_tanh_bias_from_marginals(bias_from_marginals)
self.bias = sharedX(init_bias, 'visible_bias')
def get_biases(self):
"""
.. todo::
WRITEME
"""
return self.bias.get_value()
def set_biases(self, biases, recenter=False):
"""
.. todo::
WRITEME
"""
self.bias.set_value(biases)
if recenter:
assert self.center
self.offset.set_value(sigmoid_numpy(self.bias.get_value()))
def upward_state(self, total_state):
"""
.. todo::
WRITEME
"""
return total_state
def get_params(self):
"""
.. todo::
WRITEME
"""
rval = [self.bias]
if self.learn_beta:
rval.append(self.beta)
return rval
def mf_update(self, state_above, layer_above):
"""
.. todo::
WRITEME
"""
msg = layer_above.downward_message(state_above)
bias = self.bias
z = msg + bias
rval = T.tanh(self.beta * z)
return rval
def sample(self, state_below=None, state_above=None, layer_above=None,
theano_rng=None):
"""
.. todo::
WRITEME
"""
assert state_below is None
msg = layer_above.downward_message(state_above)
bias = self.bias
z = msg + bias
phi = T.nnet.sigmoid(2. * self.beta * z)
rval = theano_rng.binomial(size=phi.shape, p=phi, dtype=phi.dtype, n=1)
return rval * 2. - 1.
def make_state(self, num_examples, numpy_rng):
"""
.. todo::
WRITEME
"""
driver = numpy_rng.uniform(0., 1., (num_examples, self.nvis))
on_prob = sigmoid_numpy(2. * self.beta.get_value() *
self.bias.get_value())
sample = 2. * (driver < on_prob) - 1.
rval = sharedX(sample, name='v_sample_shared')
return rval
def make_symbolic_state(self, num_examples, theano_rng):
"""
.. todo::
WRITEME
"""
mean = T.nnet.sigmoid(2. * self.beta * self.b)
rval = theano_rng.binomial(size=(num_examples, self.nvis), p=mean)
rval = 2. * (rval) - 1.
return rval
def expected_energy_term(self, state, average, state_below=None,
average_below=None):
"""
.. todo::
WRITEME
"""
assert state_below is None
assert average_below is None
assert average in [True, False]
self.space.validate(state)
# Energy function is linear so it doesn't matter if we're averaging
# or not
rval = -(self.beta * T.dot(state, self.bias))
assert rval.ndim == 1
return rval
class IsingHidden(HiddenLayer):
"""
A hidden layer with :math:`\mathbf{h}` being a vector in {-1, 1},
implementing the energy function term
.. math::
-\mathbf{v}^T \mathbf{W}\mathbf{h} -\mathbf{b}^T \mathbf{h}
where :math:`\mathbf{W}` and :math:`\mathbf{b}` are parameters of this
layer, and :math:`\mathbf{v}` is the upward state of the layer below.
Parameters
----------
dim : WRITEME
layer_name : WRITEME
beta : theano shared variable
Shared variable representing a multiplicative factor of the energy
function (the inverse temperature)
learn_beta : boolean, optional
Whether or not the inverse temperature should be considered as a
learned parameter
irange : WRITEME
sparse_init : WRITEME
sparse_stdev : WRITEME
include_prob : float, optional
Probability of including a weight element in the set of weights
initialized to U(-irange, irange). If not included it is
initialized to 0.
init_bias : WRITEME
W_lr_scale : WRITEME
b_lr_scale : WRITEME
max_col_norm : WRITEME
"""
def __init__(self,
dim,
layer_name,
beta,
learn_beta=False,
irange=None,
sparse_init=None,
sparse_stdev=1.,
include_prob=1.0,
init_bias=0.,
W_lr_scale=None,
b_lr_scale=None,
max_col_norm=None):
if not isinstance(beta, SharedVariable):
raise ValueError("beta needs to be a theano shared variable.")
self.__dict__.update(locals())
del self.self
self.b = sharedX(np.zeros((self.dim,)) + init_bias,
name=layer_name + '_b')
if max_col_norm is not None:
self.extensions.append(MaxL2FilterNorm(max_col_norm, axis=0))
def get_lr_scalers(self):
"""
.. todo::
WRITEME
"""
if not hasattr(self, 'W_lr_scale'):
self.W_lr_scale = None
if not hasattr(self, 'b_lr_scale'):
self.b_lr_scale = None
rval = OrderedDict()
if self.W_lr_scale is not None:
W, = self.transformer.get_params()
rval[W] = self.W_lr_scale
if self.b_lr_scale is not None:
rval[self.b] = self.b_lr_scale
return rval
def set_input_space(self, space):
"""
.. todo::
WRITEME properly
Notes
-----
Note: this resets parameters!
"""
self.input_space = space
if isinstance(space, VectorSpace):
self.requires_reformat = False
self.input_dim = space.dim
else:
self.requires_reformat = True
self.input_dim = space.get_total_dimension()
self.desired_space = VectorSpace(self.input_dim)
self.output_space = VectorSpace(self.dim)
rng = self.dbm.rng
if self.irange is not None:
assert self.sparse_init is None
W = rng.uniform(-self.irange, self.irange,
(self.input_dim, self.dim)) * \
(rng.uniform(0., 1., (self.input_dim, self.dim))
< self.include_prob)
else:
assert self.sparse_init is not None
W = np.zeros((self.input_dim, self.dim))
W *= self.sparse_stdev
W = sharedX(W)
W.name = self.layer_name + '_W'
self.transformer = MatrixMul(W)
W, = self.transformer.get_params()
assert W.name is not None
def get_total_state_space(self):
"""
.. todo::
WRITEME
"""
return VectorSpace(self.dim)
def get_params(self):
"""
.. todo::
WRITEME
"""
assert self.b.name is not None
W, = self.transformer.get_params()
assert W.name is not None
rval = self.transformer.get_params()
assert not isinstance(rval, set)
rval = list(rval)
assert self.b not in rval
rval.append(self.b)
if self.learn_beta:
rval.append(self.beta)
return rval
def get_weight_decay(self, coeff):
"""
.. todo::
WRITEME
"""
if isinstance(coeff, str):
coeff = float(coeff)
assert isinstance(coeff, float) or hasattr(coeff, 'dtype')
W, = self.transformer.get_params()
return coeff * T.sqr(W).sum()
def get_weights(self):
"""
.. todo::
WRITEME
"""
if self.requires_reformat:
# This is not really an unimplemented case.
# We actually don't know how to format the weights
# in design space. We got the data in topo space
# and we don't have access to the dataset
raise NotImplementedError()
W, = self.transformer.get_params()
return W.get_value()
def set_weights(self, weights):
"""
.. todo::
WRITEME
"""
W, = self.transformer.get_params()
W.set_value(weights)
def set_biases(self, biases, recenter=False):
"""
.. todo::
WRITEME
"""
self.b.set_value(biases)
if recenter:
assert self.center
if self.pool_size != 1:
raise NotImplementedError()
self.offset.set_value(sigmoid_numpy(self.b.get_value()))
def get_biases(self):
"""
.. todo::
WRITEME
"""
return self.b.get_value()
def get_weights_format(self):
"""
.. todo::
WRITEME
"""
return ('v', 'h')
def get_weights_topo(self):
"""
.. todo::
WRITEME
"""
if not isinstance(self.input_space, Conv2DSpace):
raise NotImplementedError()
W, = self.transformer.get_params()
W = W.T
W = W.reshape(
(self.detector_layer_dim, self.input_space.shape[0],
self.input_space.shape[1], self.input_space.nchannels)
)
W = Conv2DSpace.convert(W, self.input_space.axes, ('b', 0, 1, 'c'))
return function([], W)()
def upward_state(self, total_state):
"""
.. todo::
WRITEME
"""
return total_state
def downward_state(self, total_state):
"""
.. todo::
WRITEME
"""
return total_state
def get_monitoring_channels(self):
"""
.. todo::
WRITEME
"""
W, = self.transformer.get_params()
assert W.ndim == 2
sq_W = T.sqr(W)
row_norms = T.sqrt(sq_W.sum(axis=1))
col_norms = T.sqrt(sq_W.sum(axis=0))
return OrderedDict([
('row_norms_min', row_norms.min()),
('row_norms_mean', row_norms.mean()),
('row_norms_max', row_norms.max()),
('col_norms_min', col_norms.min()),
('col_norms_mean', col_norms.mean()),
('col_norms_max', col_norms.max()),
])
def get_monitoring_channels_from_state(self, state):
"""
.. todo::
WRITEME
"""
P = state
rval = OrderedDict()
vars_and_prefixes = [(P, '')]
for var, prefix in vars_and_prefixes:
v_max = var.max(axis=0)
v_min = var.min(axis=0)
v_mean = var.mean(axis=0)
v_range = v_max - v_min
# max_x.mean_u is "the mean over *u*nits of the max over
# e*x*amples". The x and u are included in the name because
# otherwise its hard to remember which axis is which when reading
# the monitor I use inner.outer rather than outer_of_inner or
# something like that because I want mean_x.* to appear next to
# each other in the alphabetical list, as these are commonly
# plotted together
for key, val in [
('max_x.max_u', v_max.max()),
('max_x.mean_u', v_max.mean()),
('max_x.min_u', v_max.min()),
('min_x.max_u', v_min.max()),
('min_x.mean_u', v_min.mean()),
('min_x.min_u', v_min.min()),
('range_x.max_u', v_range.max()),
('range_x.mean_u', v_range.mean()),
('range_x.min_u', v_range.min()),
('mean_x.max_u', v_mean.max()),
('mean_x.mean_u', v_mean.mean()),
('mean_x.min_u', v_mean.min()),
]:
rval[prefix+key] = val
return rval
def sample(self, state_below=None, state_above=None, layer_above=None,
theano_rng=None):
"""
.. todo::
WRITEME
"""
if theano_rng is None:
raise ValueError("theano_rng is required; it just defaults to " +
"None so that it may appear after layer_above " +
"/ state_above in the list.")
if state_above is not None:
msg = layer_above.downward_message(state_above)
else:
msg = None
if self.requires_reformat:
state_below = self.input_space.format_as(state_below,
self.desired_space)
z = self.transformer.lmul(state_below) + self.b
if msg is not None:
z = z + msg
on_prob = T.nnet.sigmoid(2. * self.beta * z)
samples = theano_rng.binomial(p=on_prob, n=1, size=on_prob.shape,
dtype=on_prob.dtype) * 2. - 1.
return samples
def downward_message(self, downward_state):
"""
.. todo::
WRITEME
"""
rval = self.transformer.lmul_T(downward_state)
if self.requires_reformat:
rval = self.desired_space.format_as(rval, self.input_space)
return rval
def init_mf_state(self):
"""
.. todo::
WRITEME
"""
# work around theano bug with broadcasted vectors
z = T.alloc(0., self.dbm.batch_size,
self.dim).astype(self.b.dtype) + \
self.b.dimshuffle('x', 0)
rval = T.tanh(self.beta * z)
return rval
def make_state(self, num_examples, numpy_rng):
"""
.. todo::
WRITEME properly
Returns a shared variable containing an actual state
(not a mean field state) for this variable.
"""
driver = numpy_rng.uniform(0., 1., (num_examples, self.dim))
on_prob = sigmoid_numpy(2. * self.beta.get_value() *
self.b.get_value())
sample = 2. * (driver < on_prob) - 1.
rval = sharedX(sample, name='v_sample_shared')
return rval
def make_symbolic_state(self, num_examples, theano_rng):
"""
.. todo::
WRITEME
"""
mean = T.nnet.sigmoid(2. * self.beta * self.b)
rval = theano_rng.binomial(size=(num_examples, self.nvis), p=mean)
rval = 2. * (rval) - 1.
return rval
def expected_energy_term(self, state, average, state_below, average_below):
"""
.. todo::
WRITEME
"""
# state = Print('h_state', attrs=['min', 'max'])(state)
self.input_space.validate(state_below)
if self.requires_reformat:
if not isinstance(state_below, tuple):
for sb in get_debug_values(state_below):
if sb.shape[0] != self.dbm.batch_size:
raise ValueError("self.dbm.batch_size is %d but got " +
"shape of %d" % (self.dbm.batch_size,
sb.shape[0]))
assert reduce(operator.mul, sb.shape[1:]) == self.input_dim
state_below = self.input_space.format_as(state_below,
self.desired_space)
# Energy function is linear so it doesn't matter if we're averaging or
# not. Specifically, our terms are -u^T W d - b^T d where u is the
# upward state of layer below and d is the downward state of this layer
bias_term = T.dot(state, self.b)
weights_term = (self.transformer.lmul(state_below) * state).sum(axis=1)
rval = -bias_term - weights_term
rval *= self.beta
assert rval.ndim == 1
return rval
def linear_feed_forward_approximation(self, state_below):
"""
.. todo::
WRITEME properly
Used to implement TorontoSparsity. Unclear exactly what properties of
it are important or how to implement it for other layers.
Properties it must have:
output is same kind of data structure (ie, tuple of theano
2-tensors) as mf_update
Properties it probably should have for other layer types:
An infinitesimal change in state_below or the parameters should
cause the same sign of change in the output of
linear_feed_forward_approximation and in mf_update
Should not have any non-linearities that cause the gradient to
shrink
Should disregard top-down feedback
"""
z = self.beta * (self.transformer.lmul(state_below) + self.b)
if self.pool_size != 1:
# Should probably implement sum pooling for the non-pooled version,
# but in reality it's not totally clear what the right answer is
raise NotImplementedError()
return z, z
def mf_update(self, state_below, state_above, layer_above=None,
double_weights=False, iter_name=None):
"""
.. todo::
WRITEME
"""
self.input_space.validate(state_below)
if self.requires_reformat:
if not isinstance(state_below, tuple):
for sb in get_debug_values(state_below):
if sb.shape[0] != self.dbm.batch_size:
raise ValueError("self.dbm.batch_size is %d but got " +
"shape of %d" % (self.dbm.batch_size,
sb.shape[0]))
assert reduce(operator.mul, sb.shape[1:]) == self.input_dim
state_below = self.input_space.format_as(state_below,
self.desired_space)
if iter_name is None:
iter_name = 'anon'
if state_above is not None:
assert layer_above is not None
msg = layer_above.downward_message(state_above)
msg.name = 'msg_from_' + layer_above.layer_name + '_to_' + \
self.layer_name + '[' + iter_name + ']'
else:
msg = None
if double_weights:
state_below = 2. * state_below
state_below.name = self.layer_name + '_'+iter_name + '_2state'
z = self.transformer.lmul(state_below) + self.b
if self.layer_name is not None and iter_name is not None:
z.name = self.layer_name + '_' + iter_name + '_z'
if msg is not None:
z = z + msg
h = T.tanh(self.beta * z)
return h
class BoltzmannIsingVisible(VisibleLayer):
"""
An IsingVisible whose parameters are defined in Boltzmann machine space.
Notes
-----
All parameter noise/clipping is handled by BoltzmannIsingHidden.
.. todo::
WRITEME properly
Parameters
----------
nvis : int
Number of visible units
beta : theano shared variable
Shared variable representing a multiplicative factor of the energy
function (the inverse temperature)
learn_beta : boolean, optional
Whether or not the inverse temperature should be considered
as a learned parameter
bias_from_marginals : `pylearn2.datasets.dataset.Dataset`, optional
A dataset whose marginals are used to initialize the visible
biases
sampling_b_stdev : WRITEME
min_ising_b : WRITEME
max_ising_b : WRITEME
"""
def __init__(self, nvis, beta, learn_beta=False, bias_from_marginals=None,
sampling_b_stdev=None, min_ising_b=None, max_ising_b=None):
if not isinstance(beta, SharedVariable):
raise ValueError("beta needs to be a theano shared " +
"variable.")
self.__dict__.update(locals())
del self.self
# Don't serialize the dataset
del self.bias_from_marginals
self.space = VectorSpace(nvis)
self.input_space = self.space
if bias_from_marginals is None:
init_bias = np.zeros((nvis,))
else:
# data is in [-1, 1], but want biases for a sigmoid
init_bias = \
init_sigmoid_bias_from_array(bias_from_marginals.X / 2. + 0.5)
# init_bias =
self.boltzmann_bias = sharedX(init_bias, 'visible_bias')
self.resample_fn = None
def finalize_initialization(self):
"""
.. todo::
WRITEME
"""
if self.sampling_b_stdev is not None:
self.noisy_sampling_b = \
sharedX(np.zeros((self.layer_above.dbm.batch_size, self.nvis)))
updates = OrderedDict()
updates[self.boltzmann_bias] = self.boltzmann_bias
updates[self.layer_above.W] = self.layer_above.W
self.enforce_constraints()
def _modify_updates(self, updates):
"""
.. todo::
WRITEME
"""
beta = self.beta
if beta in updates:
updated_beta = updates[beta]
updates[beta] = T.clip(updated_beta, 1., 1000.)
if any(constraint is not None for constraint in [self.min_ising_b,
self.max_ising_b]):
bmn = self.min_ising_b
if bmn is None:
bmn = - 1e6
bmx = self.max_ising_b
if bmx is None:
bmx = 1e6
wmn_above = self.layer_above.min_ising_W
if wmn_above is None:
wmn_above = - 1e6
wmx_above = self.layer_above.max_ising_W
if wmx_above is None:
wmx_above = 1e6
b = updates[self.boltzmann_bias]
W_above = updates[self.layer_above.W]
ising_b = 0.5 * b + 0.25 * W_above.sum(axis=1)
ising_b = T.clip(ising_b, bmn, bmx)
ising_W_above = 0.25 * W_above
ising_W_above = T.clip(ising_W_above, wmn_above, wmx_above)
bhn = 2. * (ising_b - ising_W_above.sum(axis=1))
updates[self.boltzmann_bias] = bhn
if self.noisy_sampling_b is not None:
theano_rng = make_theano_rng(None, self.dbm.rng.randint(2**16),
which_method="normal")
b = updates[self.boltzmann_bias]
W_above = updates[self.layer_above.W]
ising_b = 0.5 * b + 0.25 * W_above.sum(axis=1)
noisy_sampling_b = \
theano_rng.normal(avg=ising_b.dimshuffle('x', 0),
std=self.sampling_b_stdev,
size=self.noisy_sampling_b.shape,
dtype=ising_b.dtype)
updates[self.noisy_sampling_b] = noisy_sampling_b
def resample_bias_noise(self, batch_size_changed=False):
"""
.. todo::
WRITEME
"""
if batch_size_changed:
self.resample_fn = None
if self.resample_fn is None:
updates = OrderedDict()
if self.sampling_b_stdev is not None:
self.noisy_sampling_b = \
sharedX(np.zeros((self.dbm.batch_size, self.nvis)))
if self.noisy_sampling_b is not None:
theano_rng = make_theano_rng(
None,
self.dbm.rng.randint(2**16),
which_method="normal"
)
b = self.boltzmann_bias
W_above = self.layer_above.W
ising_b = 0.5 * b + 0.25 * W_above.sum(axis=1)
noisy_sampling_b = \
theano_rng.normal(avg=ising_b.dimshuffle('x', 0),
std=self.sampling_b_stdev,
size=self.noisy_sampling_b.shape,
dtype=ising_b.dtype)
updates[self.noisy_sampling_b] = noisy_sampling_b
self.resample_fn = function([], updates=updates)
self.resample_fn()
def get_biases(self):
"""
.. todo::
WRITEME
"""
warnings.warn("BoltzmannIsingVisible.get_biases returns the " +
"BOLTZMANN biases, is that what we want?")
return self.boltzmann_bias.get_value()
def set_biases(self, biases, recenter=False):
"""
.. todo::
WRITEME
"""
assert False # not really sure what this should do for this layer
def ising_bias(self, for_sampling=False):
"""
.. todo::
WRITEME
"""
if for_sampling and self.layer_above.sampling_b_stdev is not None:
return self.noisy_sampling_b
return \
0.5 * self.boltzmann_bias + 0.25 * self.layer_above.W.sum(axis=1)
def ising_bias_numpy(self):
"""
.. todo::
WRITEME
"""
return 0.5 * self.boltzmann_bias.get_value() + \
0.25 * self.layer_above.W.get_value().sum(axis=1)
def upward_state(self, total_state):
"""
.. todo::
WRITEME
"""
return total_state
def get_params(self):
"""
.. todo::
WRITEME
"""
rval = [self.boltzmann_bias]
if self.learn_beta:
rval.append(self.beta)
return rval
def sample(self, state_below=None, state_above=None, layer_above=None,
theano_rng=None):
"""
.. todo::
WRITEME
"""
assert state_below is None
msg = layer_above.downward_message(state_above, for_sampling=True)
bias = self.ising_bias(for_sampling=True)
z = msg + bias
phi = T.nnet.sigmoid(2. * self.beta * z)
rval = theano_rng.binomial(size=phi.shape, p=phi, dtype=phi.dtype, n=1)
return rval * 2. - 1.
def make_state(self, num_examples, numpy_rng):
"""
.. todo::
WRITEME
"""
driver = numpy_rng.uniform(0., 1., (num_examples, self.nvis))
on_prob = sigmoid_numpy(2. * self.beta.get_value() *
self.ising_bias_numpy())
sample = 2. * (driver < on_prob) - 1.
rval = sharedX(sample, name='v_sample_shared')
return rval
def make_symbolic_state(self, num_examples, theano_rng):
"""
.. todo::
WRITEME
"""
mean = T.nnet.sigmoid(2. * self.beta * self.ising_bias())
rval = theano_rng.binomial(size=(num_examples, self.nvis), p=mean)
rval = 2. * (rval) - 1.
return rval
def mf_update(self, state_above, layer_above):
"""
.. todo::
WRITEME
"""
msg = layer_above.downward_message(state_above, for_sampling=True)
bias = self.ising_bias(for_sampling=True)
z = msg + bias
rval = T.tanh(self.beta * z)
return rval
def expected_energy_term(self, state, average, state_below=None,
average_below=None):
"""
.. todo::
WRITEME
"""
# state = Print('v_state', attrs=['min', 'max'])(state)
assert state_below is None
assert average_below is None
assert average in [True, False]
self.space.validate(state)
# Energy function is linear so it doesn't matter if we're averaging
# or not
rval = -(self.beta * T.dot(state, self.ising_bias()))
assert rval.ndim == 1
return rval
def get_monitoring_channels(self):
"""
.. todo::
WRITEME
"""
rval = OrderedDict()
ising_b = self.ising_bias()
rval['ising_b_min'] = ising_b.min()
rval['ising_b_max'] = ising_b.max()
rval['beta'] = self.beta
if hasattr(self, 'noisy_sampling_b'):
rval['noisy_sampling_b_min'] = self.noisy_sampling_b.min()
rval['noisy_sampling_b_max'] = self.noisy_sampling_b.max()
return rval
class BoltzmannIsingHidden(HiddenLayer):
"""
An IsingHidden whose parameters are defined in Boltzmann machine space.
.. todo::
WRITEME properly
Parameters
----------
dim : WRITEME
layer_name : WRITEME
layer_below : WRITEME
beta : theano shared variable
Shared variable representing a multiplicative factor of the energy
function (the inverse temperature)
learn_beta : boolean, optional
Whether or not the inverse temperature should be considered as a
learned parameter
irange : WRITEME
sparse_init : WRITEME
sparse_stdev : WRITEME
include_prob : WRITEME
init_bias : WRITEME
W_lr_scale : WRITEME
b_lr_scale : WRITEME
beta_lr_scale : WRITEME
max_col_norm : WRITEME
min_ising_b : WRITEME
max_ising_b : WRITEME
min_ising_W : WRITEME
max_ising_W : WRITEME
sampling_W_stdev : WRITEME
sampling_b_stdev : WRITEME
"""
def __init__(self,
dim,
layer_name,
layer_below,
beta,
learn_beta=False,
irange=None,
sparse_init=None,
sparse_stdev=1.,
include_prob=1.0,
init_bias=0.,
W_lr_scale=None,
b_lr_scale=None,
beta_lr_scale=None,
max_col_norm=None,
min_ising_b=None,
max_ising_b=None,
min_ising_W=None,
max_ising_W=None,
sampling_W_stdev=None,
sampling_b_stdev=None):
if not isinstance(beta, SharedVariable):
raise ValueError("beta needs to be a theano shared variable.")
self.__dict__.update(locals())
del self.self
layer_below.layer_above = self
self.layer_above = None
self.resample_fn = None
if max_col_norm is not None:
self.extensions.append(MaxL2FilterNorm(max_col_norm, axis=0))
def get_lr_scalers(self):
"""
.. todo::
WRITEME
"""
if not hasattr(self, 'W_lr_scale'):
self.W_lr_scale = None
if not hasattr(self, 'b_lr_scale'):
self.b_lr_scale = None
if not hasattr(self, 'beta_lr_scale'):
self.beta_lr_scale = None
rval = OrderedDict()
if self.W_lr_scale is not None:
W = self.W
rval[W] = self.W_lr_scale
if self.b_lr_scale is not None:
rval[self.boltzmann_b] = self.b_lr_scale
if self.beta_lr_scale is not None:
rval[self.beta] = self.beta_lr_scale
return rval
def set_input_space(self, space):
"""
.. todo::
WRITEME properly
Note: this resets parameters!
"""
self.input_space = space
if isinstance(space, VectorSpace):
self.requires_reformat = False
self.input_dim = space.dim
else:
self.requires_reformat = True
self.input_dim = space.get_total_dimension()
self.desired_space = VectorSpace(self.input_dim)
self.output_space = VectorSpace(self.dim)
rng = self.dbm.rng
if self.irange is not None:
assert self.sparse_init is None
W = rng.uniform(-self.irange, self.irange,
(self.input_dim, self.dim)) * \
(rng.uniform(0., 1., (self.input_dim, self.dim))
< self.include_prob)
else:
assert self.sparse_init is not None
W = np.zeros((self.input_dim, self.dim))
W *= self.sparse_stdev
W = sharedX(W)
W.name = self.layer_name + '_W'
self.W = W
self.boltzmann_b = sharedX(np.zeros((self.dim,)) + self.init_bias,
name=self.layer_name + '_b')
def finalize_initialization(self):
"""
.. todo::
WRITEME
"""
if self.sampling_b_stdev is not None:
self.noisy_sampling_b = \
sharedX(np.zeros((self.dbm.batch_size, self.dim)))
if self.sampling_W_stdev is not None:
self.noisy_sampling_W = \
sharedX(np.zeros((self.input_dim, self.dim)),
'noisy_sampling_W')
updates = OrderedDict()
updates[self.boltzmann_b] = self.boltzmann_b
updates[self.W] = self.W
if self.layer_above is not None:
updates[self.layer_above.W] = self.layer_above.W
self.enforce_constraints()
def _modify_updates(self, updates):
"""
.. todo::
WRITEME
"""
beta = self.beta
if beta in updates:
updated_beta = updates[beta]
updates[beta] = T.clip(updated_beta, 1., 1000.)
if any(constraint is not None for constraint in [self.min_ising_b,
self.max_ising_b,
self.min_ising_W,
self.max_ising_W]):
bmn = self.min_ising_b
if bmn is None:
bmn = - 1e6
bmx = self.max_ising_b
if bmx is None:
bmx = 1e6
wmn = self.min_ising_W
if wmn is None:
wmn = - 1e6
wmx = self.max_ising_W
if wmx is None:
wmx = 1e6
if self.layer_above is not None:
wmn_above = self.layer_above.min_ising_W
if wmn_above is None:
wmn_above = - 1e6
wmx_above = self.layer_above.max_ising_W
if wmx_above is None:
wmx_above = 1e6
W = updates[self.W]
ising_W = 0.25 * W
ising_W = T.clip(ising_W, wmn, wmx)
b = updates[self.boltzmann_b]
if self.layer_above is not None:
W_above = updates[self.layer_above.W]
ising_b = 0.5 * b + 0.25 * W.sum(axis=0) \
+ 0.25 * W_above.sum(axis=1)
else:
ising_b = 0.5 * b + 0.25 * W.sum(axis=0)
ising_b = T.clip(ising_b, bmn, bmx)
if self.layer_above is not None:
ising_W_above = 0.25 * W_above
ising_W_above = T.clip(ising_W_above, wmn_above, wmx_above)
bhn = 2. * (ising_b - ising_W.sum(axis=0)
- ising_W_above.sum(axis=1))
else:
bhn = 2. * (ising_b - ising_W.sum(axis=0))
Wn = 4. * ising_W
updates[self.W] = Wn
updates[self.boltzmann_b] = bhn
if self.noisy_sampling_W is not None:
theano_rng = make_theano_rng(None, self.dbm.rng.randint(2**16),
which_method="normal")
W = updates[self.W]
ising_W = 0.25 * W
noisy_sampling_W = \
theano_rng.normal(avg=ising_W, std=self.sampling_W_stdev,
size=ising_W.shape, dtype=ising_W.dtype)
updates[self.noisy_sampling_W] = noisy_sampling_W
b = updates[self.boltzmann_b]
if self.layer_above is not None:
W_above = updates[self.layer_above.W]
ising_b = 0.5 * b + 0.25 * W.sum(axis=0) \
+ 0.25 * W_above.sum(axis=1)
else:
ising_b = 0.5 * b + 0.25 * W.sum(axis=0)
noisy_sampling_b = \
theano_rng.normal(avg=ising_b.dimshuffle('x', 0),
std=self.sampling_b_stdev,
size=self.noisy_sampling_b.shape,
dtype=ising_b.dtype)
updates[self.noisy_sampling_b] = noisy_sampling_b
def resample_bias_noise(self, batch_size_changed=False):
"""
.. todo::
WRITEME
"""
if batch_size_changed:
self.resample_fn = None
if self.resample_fn is None:
updates = OrderedDict()
if self.sampling_b_stdev is not None:
self.noisy_sampling_b = \
sharedX(np.zeros((self.dbm.batch_size, self.dim)))
if self.noisy_sampling_b is not None:
theano_rng = make_theano_rng(
None,
self.dbm.rng.randint(2**16),
which_method="normal"
)
b = self.boltzmann_b
if self.layer_above is not None:
W_above = self.layer_above.W
ising_b = 0.5 * b + 0.25 * self.W.sum(axis=0) \
+ 0.25 * W_above.sum(axis=1)
else:
ising_b = 0.5 * b + 0.25 * self.W.sum(axis=0)
noisy_sampling_b = \
theano_rng.normal(avg=ising_b.dimshuffle('x', 0),
std=self.sampling_b_stdev,
size=self.noisy_sampling_b.shape,
dtype=ising_b.dtype)
updates[self.noisy_sampling_b] = noisy_sampling_b
self.resample_fn = function([], updates=updates)
self.resample_fn()
def get_total_state_space(self):
"""
.. todo::
WRITEME
"""
return VectorSpace(self.dim)
def get_params(self):
"""
.. todo::
WRITEME
"""
assert self.boltzmann_b.name is not None
W = self.W
assert W.name is not None
rval = [W]
assert not isinstance(rval, set)
rval = list(rval)
assert self.boltzmann_b not in rval
rval.append(self.boltzmann_b)
if self.learn_beta:
rval.append(self.beta)
return rval
def ising_weights(self, for_sampling=False):
"""
.. todo::
WRITEME
"""
if not hasattr(self, 'sampling_W_stdev'):
self.sampling_W_stdev = None
if for_sampling and self.sampling_W_stdev is not None:
return self.noisy_sampling_W
return 0.25 * self.W
def ising_b(self, for_sampling=False):
"""
.. todo::
WRITEME
"""
if not hasattr(self, 'sampling_b_stdev'):
self.sampling_b_stdev = None
if for_sampling and self.sampling_b_stdev is not None:
return self.noisy_sampling_b
else:
if self.layer_above is not None:
return 0.5 * self.boltzmann_b + \
0.25 * self.W.sum(axis=0) + \
0.25 * self.layer_above.W.sum(axis=1)
else:
return 0.5 * self.boltzmann_b + 0.25 * self.W.sum(axis=0)
def ising_b_numpy(self):
"""
.. todo::
WRITEME
"""
if self.layer_above is not None:
return 0.5 * self.boltzmann_b.get_value() + \
0.25 * self.W.get_value().sum(axis=0) + \
0.25 * self.layer_above.W.get_value().sum(axis=1)
else:
return 0.5 * self.boltzmann_b.get_value() + \
0.25 * self.W.get_value().sum(axis=0)
def get_weight_decay(self, coeff):
"""
.. todo::
WRITEME
"""
if isinstance(coeff, str):
coeff = float(coeff)
assert isinstance(coeff, float) or hasattr(coeff, 'dtype')
W = self.W
return coeff * T.sqr(W).sum()
def get_weights(self):
"""
.. todo::
WRITEME
"""
warnings.warn("BoltzmannIsingHidden.get_weights returns the " +
"BOLTZMANN weights, is that what we want?")
W = self.W
return W.get_value()
def set_weights(self, weights):
"""
.. todo::
WRITEME
"""
warnings.warn("BoltzmannIsingHidden.set_weights sets the BOLTZMANN " +
"weights, is that what we want?")
W = self.W
W.set_value(weights)
def set_biases(self, biases, recenter=False):
"""
.. todo::
WRITEME
"""
self.boltzmann_b.set_value(biases)
assert not recenter # not really sure what this should do if True
def get_biases(self):
"""
.. todo::
WRITEME
"""
warnings.warn("BoltzmannIsingHidden.get_biases returns the " +
"BOLTZMANN biases, is that what we want?")
return self.boltzmann_b.get_value()
def get_weights_format(self):
"""
.. todo::
WRITEME
"""
return ('v', 'h')
def get_weights_topo(self):
"""
.. todo::
WRITEME
"""
warnings.warn("BoltzmannIsingHidden.get_weights_topo returns the " +
"BOLTZMANN weights, is that what we want?")
if not isinstance(self.input_space, Conv2DSpace):
raise NotImplementedError()
W = self.W
W = W.T
W = W.reshape((self.detector_layer_dim, self.input_space.shape[0],
self.input_space.shape[1], self.input_space.nchannels))
W = Conv2DSpace.convert(W, self.input_space.axes, ('b', 0, 1, 'c'))
return function([], W)()
def upward_state(self, total_state):
"""
.. todo::
WRITEME
"""
return total_state
def downward_state(self, total_state):
"""
.. todo::
WRITEME
"""
return total_state
def get_monitoring_channels(self):
"""
.. todo::
WRITEME
"""
W = self.W
assert W.ndim == 2
sq_W = T.sqr(W)
row_norms = T.sqrt(sq_W.sum(axis=1))
col_norms = T.sqrt(sq_W.sum(axis=0))
rval = OrderedDict([
('boltzmann_row_norms_min', row_norms.min()),
('boltzmann_row_norms_mean', row_norms.mean()),
('boltzmann_row_norms_max', row_norms.max()),
('boltzmann_col_norms_min', col_norms.min()),
('boltzmann_col_norms_mean', col_norms.mean()),
('boltzmann_col_norms_max', col_norms.max()),
])
ising_W = self.ising_weights()
rval['ising_W_min'] = ising_W.min()
rval['ising_W_max'] = ising_W.max()
ising_b = self.ising_b()
rval['ising_b_min'] = ising_b.min()
rval['ising_b_max'] = ising_b.max()
if hasattr(self, 'noisy_sampling_W'):
rval['noisy_sampling_W_min'] = self.noisy_sampling_W.min()
rval['noisy_sampling_W_max'] = self.noisy_sampling_W.max()
rval['noisy_sampling_b_min'] = self.noisy_sampling_b.min()
rval['noisy_sampling_b_max'] = self.noisy_sampling_b.max()
return rval
def get_monitoring_channels_from_state(self, state):
"""
.. todo::
WRITEME
"""
P = state
rval = OrderedDict()
vars_and_prefixes = [(P, '')]
for var, prefix in vars_and_prefixes:
v_max = var.max(axis=0)
v_min = var.min(axis=0)
v_mean = var.mean(axis=0)
v_range = v_max - v_min
# max_x.mean_u is "the mean over *u*nits of the max over
# e*x*amples". The x and u are included in the name because
# otherwise its hard to remember which axis is which when reading
# the monitor I use inner.outer rather than outer_of_inner or
# something like that because I want mean_x.* to appear next to
# each other in the alphabetical list, as these are commonly
# plotted together
for key, val in [
('max_x.max_u', v_max.max()),
('max_x.mean_u', v_max.mean()),
('max_x.min_u', v_max.min()),
('min_x.max_u', v_min.max()),
('min_x.mean_u', v_min.mean()),
('min_x.min_u', v_min.min()),
('range_x.max_u', v_range.max()),
('range_x.mean_u', v_range.mean()),
('range_x.min_u', v_range.min()),
('mean_x.max_u', v_mean.max()),
('mean_x.mean_u', v_mean.mean()),
('mean_x.min_u', v_mean.min())
]:
rval[prefix+key] = val
return rval
def sample(self, state_below=None, state_above=None, layer_above=None,
theano_rng=None):
"""
.. todo::
WRITEME
"""
if theano_rng is None:
raise ValueError("theano_rng is required; it just defaults to " +
"None so that it may appear after layer_above " +
"/ state_above in the list.")
if state_above is not None:
msg = layer_above.downward_message(state_above, for_sampling=True)
else:
msg = None
if self.requires_reformat:
state_below = self.input_space.format_as(state_below,
self.desired_space)
z = T.dot(state_below, self.ising_weights(for_sampling=True)) + \
self.ising_b(for_sampling=True)
if msg is not None:
z = z + msg
on_prob = T.nnet.sigmoid(2. * self.beta * z)
samples = theano_rng.binomial(p=on_prob, n=1, size=on_prob.shape,
dtype=on_prob.dtype) * 2. - 1.
return samples
def downward_message(self, downward_state, for_sampling=False):
"""
.. todo::
WRITEME
"""
rval = T.dot(downward_state,
self.ising_weights(for_sampling=for_sampling).T)
if self.requires_reformat:
rval = self.desired_space.format_as(rval, self.input_space)
return rval
def init_mf_state(self):
"""
.. todo::
WRITEME
"""
# work around theano bug with broadcasted vectors
z = T.alloc(0., self.dbm.batch_size,
self.dim).astype(self.boltzmann_b.dtype) + \
self.ising_b().dimshuffle('x', 0)
rval = T.tanh(self.beta * z)
return rval
def make_state(self, num_examples, numpy_rng):
"""
.. todo::
WRITEME properly
Returns a shared variable containing an actual state
(not a mean field state) for this variable.
"""
driver = numpy_rng.uniform(0., 1., (num_examples, self.dim))
on_prob = sigmoid_numpy(2. * self.beta.get_value() *
self.ising_b_numpy())
sample = 2. * (driver < on_prob) - 1.
rval = sharedX(sample, name='v_sample_shared')
return rval
def make_symbolic_state(self, num_examples, theano_rng):
"""
.. todo::
WRITEME
"""
mean = T.nnet.sigmoid(2. * self.beta * self.ising_b())
rval = theano_rng.binomial(size=(num_examples, self.dim), p=mean)
rval = 2. * (rval) - 1.
return rval
def expected_energy_term(self, state, average, state_below, average_below):
"""
.. todo::
WRITEME
"""
# state = Print('h_state', attrs=['min', 'max'])(state)
self.input_space.validate(state_below)
if self.requires_reformat:
if not isinstance(state_below, tuple):
for sb in get_debug_values(state_below):
if sb.shape[0] != self.dbm.batch_size:
raise ValueError("self.dbm.batch_size is %d but got " +
"shape of %d" % (self.dbm.batch_size,
sb.shape[0]))
assert reduce(operator.mul, sb.shape[1:]) == self.input_dim
state_below = self.input_space.format_as(state_below,
self.desired_space)
# Energy function is linear so it doesn't matter if we're averaging or
# not. Specifically, our terms are -u^T W d - b^T d where u is the
# upward state of layer below and d is the downward state of this layer
bias_term = T.dot(state, self.ising_b())
weights_term = \
(T.dot(state_below, self.ising_weights()) * state).sum(axis=1)
rval = -bias_term - weights_term
rval *= self.beta
assert rval.ndim == 1
return rval
def linear_feed_forward_approximation(self, state_below):
"""
.. todo::
WRITEME properly
Used to implement TorontoSparsity. Unclear exactly what properties of
it are important or how to implement it for other layers.
Properties it must have:
output is same kind of data structure (ie, tuple of theano
2-tensors) as mf_update
Properties it probably should have for other layer types:
An infinitesimal change in state_below or the parameters should
cause the same sign of change in the output of
linear_feed_forward_approximation and in mf_update
Should not have any non-linearities that cause the gradient to
shrink
Should disregard top-down feedback
"""
z = self.beta * (T.dot(state_below,
self.ising_weights()) + self.ising_b())
return z
def mf_update(self, state_below, state_above, layer_above=None,
double_weights=False, iter_name=None):
"""
.. todo::
WRITEME
"""
self.input_space.validate(state_below)
if self.requires_reformat:
if not isinstance(state_below, tuple):
for sb in get_debug_values(state_below):
if sb.shape[0] != self.dbm.batch_size:
raise ValueError("self.dbm.batch_size is %d but got " +
"shape of %d" % (self.dbm.batch_size,
sb.shape[0]))
assert reduce(operator.mul, sb.shape[1:]) == self.input_dim
state_below = self.input_space.format_as(state_below,
self.desired_space)
if iter_name is None:
iter_name = 'anon'
if state_above is not None:
assert layer_above is not None
msg = layer_above.downward_message(state_above)
msg.name = 'msg_from_' + layer_above.layer_name + '_to_' + \
self.layer_name + '[' + iter_name+']'
else:
msg = None
if double_weights:
state_below = 2. * state_below
state_below.name = self.layer_name + '_'+iter_name + '_2state'
z = T.dot(state_below, self.ising_weights()) + self.ising_b()
if self.layer_name is not None and iter_name is not None:
z.name = self.layer_name + '_' + iter_name + '_z'
if msg is not None:
z = z + msg
h = T.tanh(self.beta * z)
return h
def get_l2_act_cost(self, state, target, coeff):
"""
.. todo::
WRITEME
"""
avg = state.mean(axis=0)
diff = avg - target
return coeff * T.sqr(diff).mean()
|
bsd-3-clause
|
landscapeio/astroid
|
__pkginfo__.py
|
2
|
1746
|
# copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of astroid.
#
# astroid is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# astroid is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with astroid. If not, see <http://www.gnu.org/licenses/>.
"""astroid packaging information"""
distname = 'astroid'
modname = 'astroid'
numversion = (1, 0, 0)
version = '.'.join([str(num) for num in numversion])
install_requires = ['logilab-common >= 0.60.0']
license = 'LGPL'
author = 'Logilab'
author_email = 'python-projects@lists.logilab.org'
mailinglist = "mailto://%s" % author_email
web = 'http://bitbucket.org/logilab/astroid'
description = "rebuild a new abstract syntax tree from Python's ast"
from os.path import join
include_dirs = ['brain',
join('test', 'regrtest_data'),
join('test', 'data'), join('test', 'data2')]
classifiers = ["Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Software Development :: Quality Assurance",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
]
|
gpl-2.0
|
MingwangLin/automatic-colorization-of-sketch
|
util/helper.py
|
1
|
9155
|
import cv2
import time
import string
import random
from scipy import ndimage
import numpy as np
from datetime import datetime
def get_normal_map(img):
img = img.astype(np.float)
img = img / 255.0
img = - img + 1
img[img < 0] = 0
img[img > 1] = 1
return img
def get_gray_map(img):
gray = cv2.cvtColor(img.astype(np.uint8), cv2.COLOR_BGR2GRAY)
highPass = gray.astype(np.float)
highPass = highPass / 255.0
highPass = 1 - highPass
highPass = highPass[None]
return highPass.transpose((1, 2, 0))
def get_light_map(img):
gray = cv2.cvtColor(img.astype(np.uint8), cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(gray, (0, 0), 3)
highPass = gray.astype(int) - blur.astype(int)
highPass = highPass.astype(np.float)
# highPass = highPass / 10000.0
highPass = highPass[None]
return highPass.transpose((1, 2, 0))
def get_light_map_single(img):
gray = img
gray = gray[None]
gray = gray.transpose((1, 2, 0))
blur = cv2.GaussianBlur(gray, (0, 0), 3)
# print('blur', blur.shape)
gray = gray.reshape((gray.shape[0], gray.shape[1]))
highPass = gray.astype(int) - blur.astype(int)
highPass = highPass.astype(np.float)
highPass = highPass / 64.0
# print('highPass', highPass.shape, highPass)
return highPass
def normalize_pic(img):
if np.max(img) != 0:
img = img / (np.max(img))
img = img
return img
def adjust_and_save_img(img, new_img_size, path):
mat = img.astype(np.float)
threshold = 0.0
mat[mat < threshold] = 0
mat = - mat + 1
mat = (mat * 255.0)
mat[mat < 0] = 0
mat[mat > 255] = 255
mat = mat.astype(np.uint8)
mat = ndimage.median_filter(mat, 1)
cv2.imwrite(path[0], mat)
img = cv2.resize(mat, (new_img_size, new_img_size), interpolation=cv2.INTER_AREA)
cv2.imwrite(path[1], img)
return
def get_light_map_drawer(img):
gray = cv2.cvtColor(img.astype(np.uint8), cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(gray, (0, 0), 3)
highPass = gray.astype(int) - blur.astype(int) + 255
highPass[highPass < 0] = 0
highPass[highPass > 255] = 255
highPass = highPass.astype(np.float)
highPass = highPass / 255.0
highPass = 1 - highPass
highPass = highPass[None]
return highPass.transpose((1, 2, 0))
def get_light_map_drawer2(img):
ret = img.copy()
ret = ret.astype(np.float)
ret[:, :, 0] = get_light_map_drawer3(img[:, :, 0])
ret[:, :, 1] = get_light_map_drawer3(img[:, :, 1])
ret[:, :, 2] = get_light_map_drawer3(img[:, :, 2])
ret = np.amax(ret, 2)
return ret
def get_light_map_drawer3(img):
gray = img
blur = cv2.blur(gray, ksize=(5, 5))
highPass = gray.astype(int) - blur.astype(int) + 255
highPass[highPass < 0] = 0
highPass[highPass > 255] = 255
highPass = highPass.astype(np.float)
highPass = highPass / 255.0
highPass = 1 - highPass
# highPass = highPass.astype(np.uint8)
return highPass
def dodgeV2(image, mask):
return cv2.divide(image, 255 - mask, scale=256)
def to_pencil_sketch(img):
img_gray = cv2.cvtColor(img.astype(np.uint8), cv2.COLOR_BGR2GRAY)
img_blur = cv2.GaussianBlur(img_gray, (3, 3), 0)
print('gray', img_gray)
print('blur', img_blur)
high_pass = dodgeV2(img_gray, img_blur)
print('highpass', high_pass.shape, high_pass[125:150])
return high_pass
def high_pass(img):
gray = cv2.cvtColor(img.astype(np.uint8), cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(gray, (5, 5), 0)
print('gray', gray)
print('blur', blur)
highPass = (gray.astype(int) - blur.astype(int)) + 255
print('highpass', highPass.shape, highPass)
# highPass = 255 - blur.astype(int)
# highPass[highPass < 0] = 0
# highPass[highPass > 255] = 255
# #
# # highPass = highPass.astype(np.float)
# highPass = highPass / 255.0
# highPass = (1 - highPass)*255
# highPass = highPass.astype(np.uint8)
# highPass = cv2.bitwise_not(highPass)
print('highpass', highPass.shape, highPass)
return highPass
def high_pass_sketchkeras(img):
mat_color = get_light_map(img)
print('mat_color_divide', mat_color.shape, mat_color)
mat_color = normalize_pic(mat_color)
print('mat_color_norm', mat_color.shape, mat_color)
# mat_color = resize_img_512(mat_color)
mat = mat_color.astype(np.float)
print('mat_color_float', mat.shape, mat)
# threshold = 0.1
# mat[mat < threshold] = 0
mat = (1 + mat / 128) * 255.0
print('mat_color_multi', mat.shape, mat)
mat[mat < 0] = 0
mat[mat > 255] = 255
mat = mat.astype(np.uint8)
print('mat_color', mat_color.shape)
return mat
def superlize_pic(img):
img = img * 2.33333
img[img > 1] = 1
return img
def mask_pic(img, mask):
mask_mat = mask
mask_mat = mask_mat.astype(np.float)
mask_mat = cv2.GaussianBlur(mask_mat, (0, 0), 1)
mask_mat = mask_mat / np.max(mask_mat)
mask_mat = mask_mat * 255
mask_mat[mask_mat < 255] = 0
mask_mat = mask_mat.astype(np.uint8)
mask_mat = cv2.GaussianBlur(mask_mat, (0, 0), 3)
mask_mat = get_gray_map(mask_mat)
mask_mat = normalize_pic(mask_mat)
mask_mat = resize_img_512(mask_mat)
super_from = np.multiply(img, mask_mat)
return super_from
def resize_img_512(img):
zeros = np.zeros((512, 512, img.shape[2]), dtype=np.float)
zeros[:img.shape[0], :img.shape[1]] = img
return zeros
def resize_img_512_3d(img):
zeros = np.zeros((1, 3, 512, 512), dtype=np.float)
zeros[0, 0: img.shape[0], 0: img.shape[1], 0: img.shape[2]] = img
return zeros.transpose((1, 2, 3, 0))
def broadcast_img_to_3d(img):
zeros = np.zeros((1, img.shape[0], img.shape[1], img.shape[2]), dtype=np.float)
zeros[0, :, :, :] = img
return zeros.transpose((1, 2, 3, 0))
def show_active_img_and_save(name, img, path):
mat = img.astype(np.float)
mat = - mat + 1
mat = mat * 255.0
mat[mat < 0] = 0
mat[mat > 255] = 255
mat = mat.astype(np.uint8)
cv2.imshow(name, mat)
cv2.imwrite(path, mat)
return
def denoise_mat(img, i):
return ndimage.median_filter(img, i)
def show_active_img_and_save_denoise(name, img, path):
mat = img.astype(np.float)
mat = - mat + 1
mat = mat * 255.0
mat[mat < 0] = 0
mat[mat > 255] = 255
mat = mat.astype(np.uint8)
mat = ndimage.median_filter(mat, 1)
cv2.imshow(name, mat)
cv2.imwrite(path, mat)
return
def show_active_img_and_save_denoise_filter(name, img, path):
mat = img.astype(np.float)
threshold = 0.18
mat[mat < threshold] = 0
mat = - mat + 1
mat = mat * 255.0
mat[mat < 0] = 0
mat[mat > 255] = 255
mat = mat.astype(np.uint8)
mat = ndimage.median_filter(mat, 1)
cv2.imshow(name, mat)
cv2.imwrite(path, mat)
return
def show_active_img_and_save_denoise_filter2(name, img, path):
mat = img.astype(np.float)
threshold = 0.1
mat[mat < threshold] = 0
mat = - mat + 1
mat = mat * 255.0
mat[mat < 0] = 0
mat[mat > 255] = 255
mat = mat.astype(np.uint8)
mat = ndimage.median_filter(mat, 1)
cv2.imshow(name, mat)
cv2.imwrite(path, mat)
return
def show_active_img(name, img):
mat = img.astype(np.float)
mat = - mat + 1
mat = mat * 255.0
mat[mat < 0] = 0
mat[mat > 255] = 255
mat = mat.astype(np.uint8)
cv2.imshow(name, mat)
return
def get_active_img(img):
mat = img.astype(np.float)
mat = - mat + 1
mat = mat * 255.0
mat[mat < 0] = 0
mat[mat > 255] = 255
mat = mat.astype(np.uint8)
return mat
def get_active_img_fil(img):
mat = img.astype(np.float)
mat[mat < 0.18] = 0
mat = - mat + 1
mat = mat * 255.0
mat[mat < 0] = 0
mat[mat > 255] = 255
mat = mat.astype(np.uint8)
return mat
def show_double_active_img(name, img):
mat = img.astype(np.float)
mat = mat * 128.0
mat = mat + 127.0
mat[mat < 0] = 0
mat[mat > 255] = 255
cv2.imshow(name, mat.astype(np.uint8))
return
def debug_pic_helper():
for index in range(1130):
gray_path = 'data\\gray\\' + str(index) + '.jpg'
color_path = 'data\\color\\' + str(index) + '.jpg'
mat_color = cv2.imread(color_path)
mat_color = get_light_map(mat_color)
mat_color = normalize_pic(mat_color)
mat_color = resize_img_512(mat_color)
show_double_active_img('mat_color', mat_color)
mat_gray = cv2.imread(gray_path)
mat_gray = get_gray_map(mat_gray)
mat_gray = normalize_pic(mat_gray)
mat_gray = resize_img_512(mat_gray)
show_active_img('mat_gray', mat_gray)
cv2.waitKey(1000)
def log(*args):
# t = time.time()
# tt = time.strftime(r'%Y/%m/%d %H:%M:%S', time.localtime(t))
# current_milli_time = t * 1000
tt = datetime.now().strftime("%H:%M:%S.%f")
print(tt, *args)
return
# 2016/6/22 21:40:10.000
def string_generator(length):
chars = string.ascii_lowercase + string.digits
# chars = string.digits
return ''.join(random.SystemRandom().choice(chars) for _ in range(length))
|
apache-2.0
|
plq/spyne
|
examples/validation.py
|
2
|
2587
|
#!/usr/bin/env python
# encoding: utf8
#
# Copyright © Burak Arslan <burak at arskom dot com dot tr>,
# Arskom Ltd. http://www.arskom.com.tr
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the owner nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""Use:
curl http://localhost:9912/get_name_of_month?month=12
to use this service.
"""
host = '127.0.0.1'
port = 8000
import logging
from datetime import datetime
from spyne import Integer, Unicode, rpc, Service
class NameOfMonthService(Service):
@rpc(Integer(ge=1, le=12), _returns=Unicode)
def get_name_of_month(ctx, month):
return datetime(2000, month, 1).strftime("%B")
from spyne.application import Application
from spyne.protocol.http import HttpRpc
rest = Application([NameOfMonthService],
tns='spyne.examples.multiprot',
in_protocol=HttpRpc(validator='soft'),
out_protocol=HttpRpc()
)
from spyne.server.wsgi import WsgiApplication
from wsgiref.simple_server import make_server
server = make_server(host, port, WsgiApplication(rest))
logging.basicConfig(level=logging.DEBUG)
logging.info("listening to http://%s:%d" % (host, port))
server.serve_forever()
|
lgpl-2.1
|
cgundogan/RIOT
|
tests/gnrc_ipv6_ext_frag/tests-as-root/01-run.py
|
7
|
15314
|
#!/usr/bin/env python3
# Copyright (C) 2019 Freie Universität Berlin
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import re
import os
import pexpect
import socket
import sys
import subprocess
import time
from scapy.all import Ether, ICMPv6PacketTooBig, IPv6, IPv6ExtHdrFragment, \
UDP, raw, sendp, srp1
from testrunner import run, check_unittests
RECV_BUFSIZE = 2 * 1500
TEST_SAMPLE = b"This is a test. Failure might sometimes be an option, but " \
b"not today. "
EXT_HDR_NH = {
IPv6ExtHdrFragment: 44,
}
def pktbuf_empty(child):
child.sendline("pktbuf")
child.expect(r"packet buffer: first byte: (?P<first_byte>0x[0-9a-fA-F]+), "
r"last byte: 0x[0-9a-fA-F]+ \(size: (?P<size>\d+)\)")
first_byte = child.match.group("first_byte")
size = child.match.group("size")
child.expect(
r"~ unused: {} \(next: (\(nil\)|0), size: {}\) ~".format(
first_byte, size))
def pktbuf_size(child):
child.sendline("pktbuf")
child.expect(r"packet buffer: first byte: (?P<first_byte>0x[0-9a-fA-F]+), "
r"last byte: 0x[0-9a-fA-F]+ \(size: (?P<size>\d+)\)")
size = child.match.group("size")
return int(size)
def start_udp_server(child, port):
child.sendline("udp server start {}".format(port))
child.expect_exact("Success: started UDP server on port {}".format(port))
def stop_udp_server(child):
child.sendline("udp server stop")
# either way: it is stopped
child.expect(["Success: stopped UDP server",
"Error: server was not running"])
def udp_send(child, addr, iface, port, length, num=1, delay=1000000):
child.sendline("udp send {addr}%{iface} {port} {length} {num} {delay}"
.format(**vars()))
child.expect(r"Success: send {length} byte to \[[0-9a-f:]+\]:{port}"
.format(**vars()))
def check_and_search_output(cmd, pattern, res_group, *args, **kwargs):
output = subprocess.check_output(cmd, *args, **kwargs).decode("utf-8")
for line in output.splitlines():
m = re.search(pattern, line)
if m is not None:
return m.group(res_group)
return None
def get_bridge(tap):
res = check_and_search_output(
["bridge", "link"],
r"{}.+master\s+(?P<master>[^\s]+)".format(tap),
"master"
)
return tap if res is None else res
def get_host_lladdr(tap):
res = check_and_search_output(
["ip", "addr", "show", "dev", tap, "scope", "link"],
r"inet6 (?P<lladdr>[0-9A-Fa-f:]+)/64",
"lladdr"
)
if res is None:
raise AssertionError(
"Can't find host link-local address on interface {}".format(tap)
)
else:
return res
def get_host_mtu(tap):
res = check_and_search_output(
["ip", "link", "show", tap],
r"mtu (?P<mtu>1500)",
"mtu"
)
if res is None:
raise AssertionError(
"Can't find host link-local address on interface {}".format(tap)
)
else:
return int(res)
def test_reass_successful_udp(child, iface, hw_dst, ll_dst, ll_src):
port = 1337
mtu = get_host_mtu(iface)
byte_max = 0xff
payload_len = (byte_max * ((mtu // byte_max) + 1))
if not (mtu % byte_max):
payload_len += 1
start_udp_server(child, port)
with socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) as s:
res = socket.getaddrinfo("{}%{}".format(ll_src, iface), None)
s.bind(res[0][4])
s.sendto(bytes(i for i in range(byte_max)) * (payload_len // byte_max),
(ll_dst, port))
child.expect(
r"~~ SNIP 0 - size: {} byte, type: NETTYPE_UNDEF \(\d+\)"
.format(payload_len)
)
# 4 snips: payload, UDP header, IPv6 header, netif header
# (fragmentation header was removed)
child.expect(
r"~~ PKT - 4 snips, total size: (\d+) byte"
)
size = int(child.match.group(1))
# 40 = IPv6 header length; 8 = UDP header length
# >= since netif header also has a length
assert size >= (payload_len + 40 + 8)
stop_udp_server(child)
pktbuf_empty(child)
def test_reass_too_short_header(child, iface, hw_dst, ll_dst, ll_src):
sendp(Ether(dst=hw_dst) / IPv6(dst=ll_dst, src=ll_src,
nh=EXT_HDR_NH[IPv6ExtHdrFragment]) / "\x11",
iface=iface, verbose=0)
pktbuf_empty(child)
def test_reass_offset_too_large(child, iface, hw_dst, ll_dst, ll_src):
size = pktbuf_size(child)
sendp(Ether(dst=hw_dst) / IPv6(dst=ll_dst, src=ll_src) /
IPv6ExtHdrFragment(offset=((size * 2) // 8)) / "x" * 128,
iface=iface, verbose=0)
pktbuf_empty(child)
def test_ipv6_ext_frag_shell_test_0(child, s, iface, ll_dst):
child.sendline("test {} 0".format(ll_dst))
data, _ = s.recvfrom(RECV_BUFSIZE)
assert data == TEST_SAMPLE
pktbuf_empty(child)
def test_ipv6_ext_frag_shell_test_1(child, s, iface, ll_dst):
child.sendline("test {} 1".format(ll_dst))
data, _ = s.recvfrom(RECV_BUFSIZE)
offset = 0
while (offset < len(data)):
assert data[offset:(offset + len(TEST_SAMPLE))] == TEST_SAMPLE
offset += len(TEST_SAMPLE)
pktbuf_empty(child)
def _check_iface(child):
# get TAP MAC address
child.sendline("ifconfig")
ethos_id = None
mock_id = None
hwaddr = None
for _ in range(2):
child.expect(r"Iface\s+(\d+)\s+.*")
match = re.search(r"HWaddr:\s+([0-9A-F:]{17})\s+",
child.match.group(0))
if match is not None:
# interface has a hardware address
ethos_id = int(child.match.group(1))
hwaddr = match.group(1)
else:
mock_id = int(child.match.group(1))
# consume MTU for later calls of `ifconfig {mock_id}`
child.expect(r"MTU:(\d+)")
# check if interface is configured properly
assert ethos_id is not None
assert mock_id is not None
assert hwaddr is not None
return ethos_id, mock_id, hwaddr
def test_ipv6_ext_frag_send_success(child, s, iface, ll_dst):
length = get_host_mtu(iface)
port = s.getsockname()[1]
ethos_id, _, _ = _check_iface(child)
udp_send(child, ll_dst, ethos_id, port, length)
data, _ = s.recvfrom(length)
assert len(data) == length
pktbuf_empty(child)
def test_ipv6_ext_frag_send_last_fragment_filled(child, s, iface, ll_dst):
# every fragment has an IPv6 header and a fragmentation header so subtract
# them
mtu = get_host_mtu(iface) - len(IPv6() / IPv6ExtHdrFragment())
# first fragment has UDP header (so subtract it) and is rounded down to
# the nearest multiple of 8
length = (mtu - len(UDP())) & 0xfff8
# second fragment fills the whole available MTU
length += mtu
port = s.getsockname()[1]
ethos_id, _, _ = _check_iface(child)
udp_send(child, ll_dst, ethos_id, port, length)
data, _ = s.recvfrom(length)
assert len(data) == length
pktbuf_empty(child)
def test_ipv6_ext_frag_send_last_fragment_only_one_byte(child, s,
iface, ll_dst):
mtu = get_host_mtu(iface)
# subtract IPv6 and UDP header as they are not part of the UDP payload
length = (mtu - len(IPv6() / UDP()))
length += 1
port = s.getsockname()[1]
ethos_id, _, _ = _check_iface(child)
udp_send(child, ll_dst, ethos_id, port, length)
data, _ = s.recvfrom(length)
assert len(data) == length
pktbuf_empty(child)
def test_ipv6_ext_frag_send_full_pktbuf(child, s, iface, ll_dst):
length = pktbuf_size(child)
# remove some slack for meta-data and header and 1 addition fragment header
length -= (len(IPv6() / IPv6ExtHdrFragment() / UDP()) +
(len(IPv6() / IPv6ExtHdrFragment())) + 96)
port = s.getsockname()[1]
ethos_id, _, _ = _check_iface(child)
# trigger neighbor discovery so it doesn't fill the packet buffer
udp_send(child, ll_dst, ethos_id, port, 1)
data, _ = s.recvfrom(1)
last_nd = time.time()
count = 0
while True:
if (time.time() - last_nd) > 5:
# trigger neighbor discovery so it doesn't fill the packet buffer
udp_send(child, ll_dst, ethos_id, port, 1)
data, _ = s.recvfrom(1)
last_nd = time.time()
udp_send(child, ll_dst, ethos_id, port, length)
count += 1
try:
data, _ = s.recvfrom(length)
except socket.timeout:
# 8 is the alignment unit of the packet buffer
# and 20 the size of a packet snip, so take next multiple of 8 to
# 28
length -= 24
else:
break
finally:
pktbuf_empty(child)
assert(count > 1)
def _fwd_setup(child, ll_dst, g_src, g_dst):
ethos_id, mock_id, hwaddr = _check_iface(child)
child.sendline("ifconfig {}".format(mock_id))
child.expect(r"MTU:(\d+)")
mtu = int(child.match.group(1))
# configure routes
child.sendline("nib route add {} {}/128 fe80::1".format(mock_id, g_dst))
child.sendline("nib route add {} {}/128 {}"
.format(ethos_id, g_src, ll_dst))
child.sendline("nib route")
child.expect(r"{}/128 via fe80::1 dev #{}".format(g_dst, mock_id))
child.expect(r"{}/128 via {} dev #{}".format(g_src, ll_dst, ethos_id))
child.sendline("nib neigh add {} fe80::1".format(mock_id))
child.sendline("nib neigh")
child.expect(r"fe80::1 dev #{} lladdr\s+-".format(mock_id))
return mock_id, mtu, hwaddr
def _fwd_teardown(child, mock_id):
# remove route
child.sendline("nib neigh del {} fe80::1".format(mock_id))
child.sendline("nib route del {} affe::/64".format(mock_id))
def test_ipv6_ext_frag_fwd_success(child, s, iface, ll_dst):
mock_id, mtu, dst_mac = _fwd_setup(child, ll_dst, "beef::1", "affe::1")
payload_fit = mtu - len(IPv6() / IPv6ExtHdrFragment() / UDP())
pkt = Ether(dst=dst_mac) / IPv6(src="beef::1", dst="affe::1") / \
IPv6ExtHdrFragment(m=True, id=0x477384a9) / \
UDP(sport=1337, dport=1337) / ("x" * payload_fit)
# fill missing fields
pkt = Ether(raw(pkt))
sendp(pkt, verbose=0, iface=iface)
# check hexdump of mock device
ipv6 = pkt[IPv6]
ipv6.hlim -= 1 # the packet will have passed a hop
# segment packet as GNRC does
segments = [bytes(ipv6)[:40], bytes(ipv6.payload)]
for seg in segments:
addr = 0
for i in range(0, len(seg), 16):
bs = seg[i:i+16]
exp_str = ("{:08X}" + (" {:02X}") * len(bs)).format(addr, *bs)
child.expect_exact(exp_str)
addr += 16
_fwd_teardown(child, mock_id)
def test_ipv6_ext_frag_fwd_too_big(child, s, iface, ll_dst):
mock_id, mtu, dst_mac = _fwd_setup(child, ll_dst, "beef::1", "affe::1")
assert(get_host_mtu(iface) > mtu)
payload_fit = get_host_mtu(iface) - len(IPv6() / IPv6ExtHdrFragment() /
UDP())
pkt = srp1(Ether(dst=dst_mac) / IPv6(src="beef::1", dst="affe::1") /
IPv6ExtHdrFragment(m=True, id=0x477384a9) /
UDP(sport=1337, dport=1337) / ("x" * payload_fit),
timeout=2, verbose=0, iface=iface)
# packet should not be fragmented further but an ICMPv6 error should be
# returned instead
assert(pkt is not None)
assert(ICMPv6PacketTooBig in pkt)
assert(IPv6ExtHdrFragment in pkt)
assert(pkt[IPv6ExtHdrFragment].id == 0x477384a9)
_fwd_teardown(child, mock_id)
def testfunc(child):
tap = get_bridge(os.environ["TAP"])
child.sendline("unittests")
# wait for and check result of unittests
print("." * check_unittests(child), end="", flush=True)
lladdr_src = get_host_lladdr(tap)
def run_sock_test(func, s):
if child.logfile == sys.stdout:
func(child, s, tap, lladdr_src)
else:
try:
func(child, s, tap, lladdr_src)
print(".", end="", flush=True)
except PermissionError:
print("\n\x1b[1;33mSkipping {} because of missing "
"privileges\x1b[0m".format(func.__name__))
except Exception as e:
print("FAILED")
raise e
child.sendline("send-test-pkt")
child.expect(r"Sending UDP test packets to port (\d+)\r\n")
port = int(child.match.group(1))
with socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) as s:
res = socket.getaddrinfo("{}%{}".format(lladdr_src, tap), port)
s.bind(res[0][4])
s.settimeout(.3)
run_sock_test(test_ipv6_ext_frag_shell_test_0, s)
run_sock_test(test_ipv6_ext_frag_shell_test_1, s)
run_sock_test(test_ipv6_ext_frag_send_success, s)
run_sock_test(test_ipv6_ext_frag_send_last_fragment_filled, s)
run_sock_test(test_ipv6_ext_frag_send_last_fragment_only_one_byte, s)
run_sock_test(test_ipv6_ext_frag_send_full_pktbuf, s)
run_sock_test(test_ipv6_ext_frag_fwd_success, s)
run_sock_test(test_ipv6_ext_frag_fwd_too_big, s)
if os.environ.get("BOARD", "") != "native":
# ethos currently can't handle the larger, rapidly sent packets by the
# IPv6 fragmentation of the Linux Kernel
print("SUCCESS")
print("Skipping datagram reception tests due to ethos bug.")
return
# datagram reception tests
res = 1
count = 0
while res:
# check `ifconfig` and also get addresses from it until
# link-local address becomes valid
time.sleep(1)
child.sendline("ifconfig")
child.expect(r"HWaddr: (?P<hwaddr>[A-Fa-f:0-9]+)\s")
hwaddr_dst = child.match.group("hwaddr").lower()
res = child.expect([
r"(?P<lladdr>fe80::[A-Fa-f:0-9]+)\s+scope:\s+link\s+VAL",
pexpect.TIMEOUT
])
count += 1
if res and (count > 5):
raise pexpect.TIMEOUT("Link-local address did not become valid")
lladdr_dst = child.match.group("lladdr").lower()
def run(func):
if child.logfile == sys.stdout:
func(child, tap, hwaddr_dst, lladdr_dst, lladdr_src)
else:
try:
func(child, tap, hwaddr_dst, lladdr_dst, lladdr_src)
print(".", end="", flush=True)
except PermissionError:
print("\n\x1b[1;33mSkipping {} because of missing "
"privileges\x1b[0m".format(func.__name__))
except Exception as e:
print("FAILED")
raise e
run(test_reass_successful_udp)
run(test_reass_too_short_header)
run(test_reass_offset_too_large)
print("SUCCESS")
if __name__ == "__main__":
if os.geteuid() != 0:
print("\x1b[1;31mThis test requires root privileges.\n"
"It's constructing and sending Ethernet frames.\x1b[0m\n",
file=sys.stderr)
sys.exit(1)
sys.exit(run(testfunc, timeout=2, echo=False))
|
lgpl-2.1
|
chunywang/crosswalk-test-suite
|
tools/lint/sourcefile.py
|
12
|
9976
|
import os
import urlparse
from fnmatch import fnmatch
try:
from xml.etree import cElementTree as ElementTree
except ImportError:
from xml.etree import ElementTree
import html5lib
import vcs
from item import Stub, ManualTest, WebdriverSpecTest, RefTest, TestharnessTest
from utils import rel_path_to_url, ContextManagerStringIO, cached_property
wd_pattern = "*.py"
class SourceFile(object):
parsers = {"html":lambda x:html5lib.parse(x, treebuilder="etree"),
"xhtml":ElementTree.parse,
"svg":ElementTree.parse}
def __init__(self, tests_root, rel_path, url_base, use_committed=False):
"""Object representing a file in a source tree.
:param tests_root: Path to the root of the source tree
:param rel_path: File path relative to tests_root
:param url_base: Base URL used when converting file paths to urls
:param use_committed: Work with the last committed version of the file
rather than the on-disk version.
"""
self.tests_root = tests_root
self.rel_path = rel_path
self.url_base = url_base
self.use_committed = use_committed
self.url = rel_path_to_url(rel_path, url_base)
self.path = os.path.join(tests_root, rel_path)
self.dir_path, self.filename = os.path.split(self.path)
self.name, self.ext = os.path.splitext(self.filename)
self.type_flag = None
if "-" in self.name:
self.type_flag = self.name.rsplit("-", 1)[1]
self.meta_flags = self.name.split(".")[1:]
def __getstate__(self):
# Remove computed properties if we pickle this class
rv = self.__dict__.copy()
if "__cached_properties__" in rv:
cached_properties = rv["__cached_properties__"]
for key in rv.keys():
if key in cached_properties:
del rv[key]
del rv["__cached_properties__"]
return rv
def name_prefix(self, prefix):
"""Check if the filename starts with a given prefix
:param prefix: The prefix to check"""
return self.filename.startswith(prefix)
def open(self):
"""Return a File object opened for reading the file contents,
or the contents of the file when last committed, if
use_comitted is true."""
if self.use_committed:
git = vcs.get_git_func(os.path.dirname(__file__))
blob = git("show", "HEAD:%s" % self.rel_path)
file_obj = ContextManagerStringIO(blob)
else:
file_obj = open(self.path)
return file_obj
@property
def name_is_non_test(self):
"""Check if the file name matches the conditions for the file to
be a non-test file"""
return (os.path.isdir(self.path) or
self.name_prefix("MANIFEST") or
self.filename.startswith("."))
@property
def name_is_stub(self):
"""Check if the file name matches the conditions for the file to
be a stub file"""
return self.name_prefix("stub-")
@property
def name_is_manual(self):
"""Check if the file name matches the conditions for the file to
be a manual test file"""
return self.type_flag == "manual"
@property
def name_is_worker(self):
"""Check if the file name matches the conditions for the file to
be a worker js test file"""
return "worker" in self.meta_flags and self.ext == ".js"
@property
def name_is_webdriver(self):
"""Check if the file name matches the conditions for the file to
be a webdriver spec test file"""
# wdspec tests are in subdirectories of /webdriver excluding __init__.py
# files.
rel_dir_tree = self.rel_path.split(os.path.sep)
return (rel_dir_tree[0] == "webdriver" and
len(rel_dir_tree) > 2 and
self.filename != "__init__.py" and
fnmatch(self.filename, wd_pattern))
@property
def name_is_reference(self):
"""Check if the file name matches the conditions for the file to
be a reference file (not a reftest)"""
return self.type_flag in ("ref", "notref")
@property
def markup_type(self):
"""Return the type of markup contained in a file, based on its extension,
or None if it doesn't contain markup"""
ext = self.ext
if not ext:
return None
if ext[0] == ".":
ext = ext[1:]
if ext in ["html", "htm"]:
return "html"
if ext in ["xhtml", "xht"]:
return "xhtml"
if ext == "svg":
return "svg"
return None
@cached_property
def root(self):
"""Return an ElementTree Element for the root node of the file if it contains
markup, or None if it does not"""
if not self.markup_type:
return None
parser = self.parsers[self.markup_type]
with self.open() as f:
try:
tree = parser(f)
except Exception:
return None
if hasattr(tree, "getroot"):
root = tree.getroot()
else:
root = tree
return root
@cached_property
def timeout_nodes(self):
"""List of ElementTree Elements corresponding to nodes in a test that
specify timeouts"""
return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='timeout']")
@cached_property
def timeout(self):
"""The timeout of a test or reference file. "long" if the file has an extended timeout
or None otherwise"""
if not self.root:
return
if self.timeout_nodes:
timeout_str = self.timeout_nodes[0].attrib.get("content", None)
if timeout_str and timeout_str.lower() == "long":
return timeout_str
@cached_property
def testharness_nodes(self):
"""List of ElementTree Elements corresponding to nodes representing a
testharness.js script"""
nodes = []
lists = self.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src]")
for item in lists:
if item.attrib['src'].endswith('testharness.js'):
nodes.append(item)
return nodes
@cached_property
def content_is_testharness(self):
"""Boolean indicating whether the file content represents a
testharness.js test"""
if not self.root:
return None
return bool(self.testharness_nodes)
@cached_property
def testharnessreport_nodes(self):
"""List of ElementTree Elements corresponding to nodes representing a
testharnessreport.js script"""
nodes = []
lists = self.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src]")
for item in lists:
if item.attrib['src'].endswith('testharnessreport.js'):
nodes.append(item)
return nodes
@cached_property
def variant_nodes(self):
"""List of ElementTree Elements corresponding to nodes representing a
test variant"""
return self.root.findall(".//{http://www.w3.org/1999/xhtml}meta[@name='variant']")
@cached_property
def test_variants(self):
rv = []
for element in self.variant_nodes:
if "content" in element.attrib:
variant = element.attrib["content"]
assert variant == "" or variant[0] in ["#", "?"]
rv.append(variant)
if not rv:
rv = [""]
return rv
@cached_property
def reftest_nodes(self):
"""List of ElementTree Elements corresponding to nodes representing a
to a reftest <link>"""
if not self.root:
return []
match_links = self.root.findall(".//{http://www.w3.org/1999/xhtml}link[@rel='match']")
mismatch_links = self.root.findall(".//{http://www.w3.org/1999/xhtml}link[@rel='mismatch']")
return match_links + mismatch_links
@cached_property
def references(self):
"""List of (ref_url, relation) tuples for any reftest references specified in
the file"""
rv = []
rel_map = {"match": "==", "mismatch": "!="}
for item in self.reftest_nodes:
if "href" in item.attrib:
ref_url = urlparse.urljoin(self.url, item.attrib["href"])
ref_type = rel_map[item.attrib["rel"]]
rv.append((ref_url, ref_type))
return rv
@cached_property
def content_is_ref_node(self):
"""Boolean indicating whether the file is a non-leaf node in a reftest
graph (i.e. if it contains any <link rel=[mis]match>"""
return bool(self.references)
def manifest_items(self):
"""List of manifest items corresponding to the file. There is typically one
per test, but in the case of reftests a node may have corresponding manifest
items without being a test itself."""
if self.name_is_non_test:
rv = []
elif self.name_is_stub:
rv = [Stub(self, self.url)]
elif self.name_is_manual:
rv = [ManualTest(self, self.url)]
elif self.name_is_worker:
rv = [TestharnessTest(self, self.url[:-3])]
elif self.name_is_webdriver:
rv = [WebdriverSpecTest(self)]
elif self.content_is_testharness:
rv = []
for variant in self.test_variants:
url = self.url + variant
rv.append(TestharnessTest(self, url, timeout=self.timeout))
elif self.content_is_ref_node:
rv = [RefTest(self, self.url, self.references, timeout=self.timeout)]
else:
# If nothing else it's a helper file, which we don't have a specific type for
rv = []
return rv
|
bsd-3-clause
|
noelbk/neutron-juniper
|
neutron/tests/unit/nec/test_portbindings.py
|
9
|
15003
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 NEC Corporation
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Akihiro Motoki, NEC Corporation
from testtools import matchers
from webob import exc
from neutron.common import exceptions as q_exc
from neutron import context
from neutron.extensions import portbindings
from neutron.tests.unit import _test_extension_portbindings as test_bindings
from neutron.tests.unit.nec import test_nec_plugin
from neutron.tests.unit import test_security_groups_rpc as test_sg_rpc
class TestNecPortBinding(test_bindings.PortBindingsTestCase,
test_nec_plugin.NecPluginV2TestCase):
VIF_TYPE = portbindings.VIF_TYPE_OVS
HAS_PORT_FILTER = True
FIREWALL_DRIVER = test_sg_rpc.FIREWALL_HYBRID_DRIVER
def setUp(self):
test_sg_rpc.set_firewall_driver(self.FIREWALL_DRIVER)
super(TestNecPortBinding, self).setUp()
class TestNecPortBindingNoSG(TestNecPortBinding):
HAS_PORT_FILTER = False
FIREWALL_DRIVER = test_sg_rpc.FIREWALL_NOOP_DRIVER
class TestNecPortBindingHost(
test_bindings.PortBindingsHostTestCaseMixin,
test_nec_plugin.NecPluginV2TestCase):
pass
class TestNecPortBindingPortInfo(test_nec_plugin.NecPluginV2TestCase):
def _get_portinfo(self, datapath_id=None, port_no=None, prefix=None):
if datapath_id is None:
datapath_id = '0xabc'
if port_no is None:
port_no = 1
if prefix is None:
prefix = 'portinfo:'
return {prefix + 'datapath_id': datapath_id,
prefix + 'port_no': port_no}
def _check_response_portbinding_profile(self, port, datapath_id=None,
port_no=None):
expected = self._get_portinfo(datapath_id, port_no, prefix='')
profile = port[portbindings.PROFILE]
self.assertEqual(len(profile), 2)
self.assertEqual(profile['portinfo:datapath_id'],
expected['datapath_id'])
self.assertEqual(profile['portinfo:port_no'],
expected['port_no'])
def _check_response_portbinding_no_profile(self, port):
self.assertIn('status', port)
self.assertNotIn(portbindings.PROFILE, port)
def _get_non_admin_context(self):
return context.Context(user_id=None,
tenant_id=self._tenant_id,
is_admin=False,
read_deleted="no")
def test_port_create_portinfo(self):
profile_arg = {portbindings.PROFILE: self._get_portinfo()}
with self.port(arg_list=(portbindings.PROFILE,),
**profile_arg) as port:
port_id = port['port']['id']
# Check a response of create_port
self._check_response_portbinding_profile(port['port'])
self.assertEqual(self.ofc.create_ofc_port.call_count, 1)
# Check a response of get_port
ctx = context.get_admin_context()
port = self._show('ports', port_id, neutron_context=ctx)['port']
self._check_response_portbinding_profile(port)
# By default user is admin - now test non admin user
ctx = self._get_non_admin_context()
non_admin_port = self._show(
'ports', port_id, neutron_context=ctx)['port']
self._check_response_portbinding_no_profile(non_admin_port)
# port-update with non admin user should fail
self._update('ports', port_id,
{'port': profile_arg},
expected_code=404,
neutron_context=ctx)
def test_port_update_portinfo(self):
profile_arg = {portbindings.PROFILE: self._get_portinfo()}
with self.port() as port:
self.assertEqual(self.ofc.create_ofc_port.call_count, 0)
port_id = port['port']['id']
# Check a response of create_port
self._check_response_portbinding_no_profile(port['port'])
# Check a response of update_port
ctx = context.get_admin_context()
port = self._update('ports', port_id, {'port': profile_arg},
neutron_context=ctx)['port']
self.assertEqual(self.ofc.create_ofc_port.call_count, 1)
self._check_response_portbinding_profile(port)
port = self._show('ports', port_id, neutron_context=ctx)['port']
self._check_response_portbinding_profile(port)
def test_port_update_portinfo_detail(self):
with self.port() as port:
self.assertEqual(self.ofc.create_ofc_port.call_count, 0)
self.assertEqual(self.ofc.delete_ofc_port.call_count, 0)
port_id = port['port']['id']
ctx = context.get_admin_context()
# add portinfo
profile_arg = {portbindings.PROFILE: self._get_portinfo()}
port = self._update('ports', port_id, {'port': profile_arg},
neutron_context=ctx)['port']
self.assertEqual(self.ofc.create_ofc_port.call_count, 1)
self.assertEqual(self.ofc.delete_ofc_port.call_count, 0)
# portinfo unchanged
port = self._update('ports', port_id, {'port': profile_arg},
neutron_context=ctx)['port']
self.assertEqual(self.ofc.create_ofc_port.call_count, 1)
self.assertEqual(self.ofc.delete_ofc_port.call_count, 0)
# modify portinfo
profile_arg = {portbindings.PROFILE:
self._get_portinfo(datapath_id='0x1234567890',
port_no=99)}
port = self._update('ports', port_id, {'port': profile_arg},
neutron_context=ctx)['port']
self.assertEqual(self.ofc.create_ofc_port.call_count, 2)
self.assertEqual(self.ofc.delete_ofc_port.call_count, 1)
# delete portinfo with an empty dict
profile_arg = {portbindings.PROFILE: {}}
port = self._update('ports', port_id, {'port': profile_arg},
neutron_context=ctx)['port']
self.assertEqual(self.ofc.create_ofc_port.call_count, 2)
self.assertEqual(self.ofc.delete_ofc_port.call_count, 2)
def test_port_update_portinfo_detail_clear_with_none(self):
with self.port() as port:
self.assertEqual(self.ofc.create_ofc_port.call_count, 0)
self.assertEqual(self.ofc.delete_ofc_port.call_count, 0)
port_id = port['port']['id']
ctx = context.get_admin_context()
# add portinfo
profile_arg = {portbindings.PROFILE: self._get_portinfo()}
port = self._update('ports', port_id, {'port': profile_arg},
neutron_context=ctx)['port']
self.assertEqual(self.ofc.create_ofc_port.call_count, 1)
self.assertEqual(self.ofc.delete_ofc_port.call_count, 0)
# delete portinfo with None
profile_arg = {portbindings.PROFILE: None}
port = self._update('ports', port_id, {'port': profile_arg},
neutron_context=ctx)['port']
self.assertEqual(self.ofc.create_ofc_port.call_count, 1)
self.assertEqual(self.ofc.delete_ofc_port.call_count, 1)
def test_port_create_portinfo_with_empty_dict(self):
profile_arg = {portbindings.PROFILE: {}}
with self.port(arg_list=(portbindings.PROFILE,),
**profile_arg) as port:
port_id = port['port']['id']
# Check a response of create_port
self._check_response_portbinding_no_profile(port['port'])
self.assertEqual(self.ofc.create_ofc_port.call_count, 0)
# add portinfo
ctx = context.get_admin_context()
profile_arg = {portbindings.PROFILE: self._get_portinfo()}
port = self._update('ports', port_id, {'port': profile_arg},
neutron_context=ctx)['port']
self._check_response_portbinding_profile(port)
self.assertEqual(self.ofc.create_ofc_port.call_count, 1)
self.assertEqual(self.ofc.delete_ofc_port.call_count, 0)
def test_port_create_portinfo_with_none(self):
profile_arg = {portbindings.PROFILE: None}
with self.port(arg_list=(portbindings.PROFILE,),
**profile_arg) as port:
port_id = port['port']['id']
# Check a response of create_port
self._check_response_portbinding_no_profile(port['port'])
self.assertEqual(self.ofc.create_ofc_port.call_count, 0)
# add portinfo
ctx = context.get_admin_context()
profile_arg = {portbindings.PROFILE: self._get_portinfo()}
port = self._update('ports', port_id, {'port': profile_arg},
neutron_context=ctx)['port']
self._check_response_portbinding_profile(port)
self.assertEqual(self.ofc.create_ofc_port.call_count, 1)
self.assertEqual(self.ofc.delete_ofc_port.call_count, 0)
def test_port_create_portinfo_non_admin(self):
with self.network(set_context=True, tenant_id='test') as net1:
with self.subnet(network=net1) as subnet1:
profile_arg = {portbindings.PROFILE: self._get_portinfo()}
try:
with self.port(subnet=subnet1,
expected_res_status=403,
arg_list=(portbindings.PROFILE,),
set_context=True, tenant_id='test',
**profile_arg):
pass
except exc.HTTPClientError:
pass
self.assertEqual(self.ofc.create_ofc_port.call_count, 0)
def test_port_update_portinfo_non_admin(self):
profile_arg = {portbindings.PROFILE: self._get_portinfo()}
with self.network() as net1:
with self.subnet(network=net1) as subnet1:
with self.port(subnet=subnet1) as port:
# By default user is admin - now test non admin user
# Note that 404 is returned when prohibit by policy.
# See comment for PolicyNotAuthorized except clause
# in update() in neutron.api.v2.base.Controller.
port_id = port['port']['id']
ctx = self._get_non_admin_context()
port = self._update('ports', port_id,
{'port': profile_arg},
expected_code=404,
neutron_context=ctx)
self.assertEqual(self.ofc.create_ofc_port.call_count, 0)
def test_port_create_portinfo_validation_called(self):
# Check validate_portinfo is called.
profile_arg = {portbindings.PROFILE:
{'portinfo:datapath_id': '0xabc',
'portinfo:port_no': 0xffff + 1}}
try:
with self.port(arg_list=(portbindings.PROFILE,),
expected_res_status=400,
**profile_arg):
pass
except exc.HTTPClientError:
pass
class TestNecPortBindingValidatePortInfo(test_nec_plugin.NecPluginV2TestCase):
def test_validate_portinfo_ok(self):
profile = {'portinfo:datapath_id': '0x1234567890abcdef',
'portinfo:port_no': 123}
portinfo = self.plugin._validate_portinfo(profile)
# NOTE(mriedem): Handle long integer conversion universally.
self.assertEqual(
0x1234567890abcdef,
int(portinfo['datapath_id'].replace('L', ''), 16)
)
self.assertEqual(portinfo['port_no'], 123)
def test_validate_portinfo_ok_without_0x(self):
profile = {'portinfo:datapath_id': '1234567890abcdef',
'portinfo:port_no': 123}
portinfo = self.plugin._validate_portinfo(profile)
# NOTE(mriedem): Handle long integer conversion universally.
self.assertEqual(
0x1234567890abcdef,
int(portinfo['datapath_id'].replace('L', ''), 16)
)
self.assertEqual(portinfo['port_no'], 123)
def _test_validate_exception(self, profile, expected_msg):
e = self.assertRaises(q_exc.InvalidInput,
self.plugin._validate_portinfo, profile)
self.assertThat(str(e), matchers.StartsWith(expected_msg))
def test_validate_portinfo_dict_validation(self):
expected_msg = ("Invalid input for operation: "
"Validation of dictionary's keys failed.")
profile = {'portinfo:port_no': 123}
self._test_validate_exception(profile, expected_msg)
profile = {'portinfo:datapath_id': '0xabcdef'}
self._test_validate_exception(profile, expected_msg)
def test_validate_portinfo_negative_port_number(self):
profile = {'portinfo:datapath_id': '0x1234567890abcdef',
'portinfo:port_no': -1}
expected_msg = ("Invalid input for operation: "
"'-1' should be non-negative.")
self._test_validate_exception(profile, expected_msg)
def test_validate_portinfo_invalid_datapath_id(self):
expected_msg = ("Invalid input for operation: "
"portinfo:datapath_id should be a hex string")
# non hexidecimal datapath_id
profile = {'portinfo:datapath_id': 'INVALID',
'portinfo:port_no': 123}
self._test_validate_exception(profile, expected_msg)
# Too big datapath_id
profile = {'portinfo:datapath_id': '0x10000000000000000',
'portinfo:port_no': 123}
self._test_validate_exception(profile, expected_msg)
def test_validate_portinfo_too_big_port_number(self):
profile = {'portinfo:datapath_id': '0x1234567890abcdef',
'portinfo:port_no': 65536}
expected_msg = ("Invalid input for operation: "
"portinfo:port_no should be [0:65535]")
self._test_validate_exception(profile, expected_msg)
|
apache-2.0
|
40223211/2015cd_midterm-
|
wsgi.py
|
1
|
27730
|
#@+leo-ver=5-thin
#@+node:2014fall.20141212095015.1775: * @file wsgi.py
# coding=utf-8
# 上面的程式內容編碼必須在程式的第一或者第二行才會有作用
################# (1) 模組導入區
# 導入 cherrypy 模組, 為了在 OpenShift 平台上使用 cherrypy 模組, 必須透過 setup.py 安裝
#@@language python
#@@tabwidth -4
#@+<<declarations>>
#@+node:2014fall.20141212095015.1776: ** <<declarations>> (wsgi)
import cherrypy
# 導入 Python 內建的 os 模組, 因為 os 模組為 Python 內建, 所以無需透過 setup.py 安裝
import os
# 導入 random 模組
import random
# 導入 gear 模組
import gear
################# (2) 廣域變數設定區
# 確定程式檔案所在目錄, 在 Windows 下有最後的反斜線
_curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
# 設定在雲端與近端的資料儲存目錄
if 'OPENSHIFT_REPO_DIR' in os.environ.keys():
# 表示程式在雲端執行
download_root_dir = os.environ['OPENSHIFT_DATA_DIR']
data_dir = os.environ['OPENSHIFT_DATA_DIR']
else:
# 表示程式在近端執行
download_root_dir = _curdir + "/local_data/"
data_dir = _curdir + "/local_data/"
'''以下為近端 input() 與 for 迴圈應用的程式碼, 若要將程式送到 OpenShift 執行, 除了採用 CherryPy 網際框架外, 還要轉為 html 列印
# 利用 input() 取得的資料型別為字串
toprint = input("要印甚麼內容?")
# 若要將 input() 取得的字串轉為整數使用, 必須利用 int() 轉換
repeat_no = int(input("重複列印幾次?"))
for i in range(repeat_no):
print(toprint)
'''
#@-<<declarations>>
#@+others
#@+node:2014fall.20141212095015.1777: ** class Hello
################# (3) 程式類別定義區
# 以下改用 CherryPy 網際框架程式架構
# 以下為 Hello 類別的設計內容, 其中的 object 使用, 表示 Hello 類別繼承 object 的所有特性, 包括方法與屬性設計
class Hello(object):
# Hello 類別的啟動設定
_cp_config = {
'tools.encode.encoding': 'utf-8',
'tools.sessions.on' : True,
'tools.sessions.storage_type' : 'file',
#'tools.sessions.locking' : 'explicit',
# session 以檔案儲存, 而且位於 data_dir 下的 tmp 目錄
'tools.sessions.storage_path' : data_dir+'/tmp',
# session 有效時間設為 60 分鐘
'tools.sessions.timeout' : 60
}
#@+others
#@+node:2014fall.20141212095015.2004: *3* __init__
def __init__(self):
# 配合透過案例啟始建立所需的目錄
if not os.path.isdir(data_dir+'/tmp'):
os.mkdir(data_dir+'/tmp')
if not os.path.isdir(data_dir+"/downloads"):
os.mkdir(data_dir+"/downloads")
if not os.path.isdir(data_dir+"/images"):
os.mkdir(data_dir+"/images")
#@+node:2014fall.20141212095015.1778: *3* index_orig
# 以 @ 開頭的 cherrypy.expose 為 decorator, 用來表示隨後的成員方法, 可以直接讓使用者以 URL 連結執行
@cherrypy.expose
# index 方法為 CherryPy 各類別成員方法中的內建(default)方法, 當使用者執行時未指定方法, 系統將會優先執行 index 方法
# 有 self 的方法為類別中的成員方法, Python 程式透過此一 self 在各成員方法間傳遞物件內容
def index_orig(self, toprint="40223211"):
return toprint
#@+node:2014fall.20141212095015.1779: *3* hello
@cherrypy.expose
def hello(self, toprint="40223211"):
return toprint
#@+node:2014fall.20141215194146.1791: *3* index
@cherrypy.expose
def index(self, guess=None):
# 將標準答案存入 answer session 對應區
theanswer = random.randint(1, 100)
thecount = 0
# 將答案與計算次數變數存進 session 對應變數
cherrypy.session['answer'] = theanswer
cherrypy.session['count'] = thecount
# 印出讓使用者輸入的超文件表單
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<form method=POST action=doCheck>
請輸入您所猜的整數:<input type=text name=guess><br />
<input type=submit value=send>
</form>
<hr>
<!-- 以下在網頁內嵌 Brython 程式 -->
<script type="text/python">
from browser import document, alert
def echo(ev):
alert(document["zone"].value)
# 將文件中名稱為 mybutton 的物件, 透過 click 事件與 echo 函式 bind 在一起
document['mybutton'].bind('click',echo)
</script>
<input id="zone"><button id="mybutton">click !</button>
<hr>
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
import math
# 畫布指定在名稱為 plotarea 的 canvas 上
# 以下使用中文變數名稱
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 用紅色畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(0, 500)
ctx.strokeStyle = "red"
ctx.stroke()
# 用藍色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 0)
ctx.strokeStyle = "blue"
ctx.stroke()
# 用綠色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 500)
ctx.strokeStyle = "green"
ctx.stroke()
# 用黑色畫一個圓
ctx.beginPath()
ctx.lineWidth = 3
ctx.strokeStyle = "black"
ctx.arc(250,250,50,0,2*math.pi)
ctx.stroke()
</script>
<canvas id="plotarea" width="800" height="600"></canvas>
</body>
</html>
'''
return outstring
#@+node:2015.20150330144929.1713: *3* twoDgear
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def twoDgear(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<form method=POST action=mygeartest>
齒數:<input type=text name=N><br />
模數:<input type=text name=M><br />
壓力角:<input type=text name=P><br />
<input type=submit value=send>
</form>
</body>
</html>
'''
return outstring
#@+node:2015.20150331094055.1733: *3* threeDgear
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def threeDgear(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<form method=POST action=do3Dgear>
齒數:<input type=text name=N><br />
模數:<input type=text name=M><br />
壓力角:<input type=text name=P><br />
<input type=submit value=send>
</form>
</body>
</html>
'''
return outstring
#@+node:2015.20150330144929.1762: *3* do2Dgear
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def do2Dgear(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
import math
# 畫布指定在名稱為 plotarea 的 canvas 上
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 用紅色畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
'''
outstring += '''
ctx.moveTo('''+str(N)+","+str(M)+")"
outstring += '''
ctx.lineTo(0, 500)
ctx.strokeStyle = "red"
ctx.stroke()
# 用藍色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 0)
ctx.strokeStyle = "blue"
ctx.stroke()
# 用綠色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 500)
ctx.strokeStyle = "green"
ctx.stroke()
# 用黑色畫一個圓
ctx.beginPath()
ctx.lineWidth = 3
ctx.strokeStyle = "black"
ctx.arc(250,250,50,0,2*math.pi)
ctx.stroke()
</script>
<canvas id="plotarea" width="800" height="600"></canvas>
</body>
</html>
'''
return outstring
#@+node:2015.20150331094055.1735: *3* do3Dgear
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def do3Dgear(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
import math
# 畫布指定在名稱為 plotarea 的 canvas 上
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 用紅色畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
'''
outstring += '''
ctx.moveTo('''+str(N)+","+str(M)+")"
outstring += '''
ctx.lineTo(0, 500)
ctx.strokeStyle = "red"
ctx.stroke()
# 用藍色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 0)
ctx.strokeStyle = "blue"
ctx.stroke()
# 用綠色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 500)
ctx.strokeStyle = "green"
ctx.stroke()
# 用黑色畫一個圓
ctx.beginPath()
ctx.lineWidth = 3
ctx.strokeStyle = "black"
ctx.arc(250,250,50,0,2*math.pi)
ctx.stroke()
</script>
<canvas id="plotarea" width="800" height="600"></canvas>
</body>
</html>
'''
return outstring
#@+node:2015.20150330144929.1765: *3* mygeartest
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def mygeartest(self, N=20, M=15, P=9):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
from math import *
n = int(input("齒數"))
a = int(input("模數"))
# 準備在 id="plotarea" 的 canvas 中繪圖
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
def create_line(x1, y1, x2, y2, width=3, fill="red"):
ctx.beginPath()
ctx.lineWidth = width
ctx.moveTo(x1, y1)
ctx.lineTo(x2, y2)
ctx.strokeStyle = fill
ctx.stroke()
# 導入數學函式後, 圓周率為 pi
# deg 為角度轉為徑度的轉換因子
deg = pi/180.
#
# 以下分別為正齒輪繪圖與主 tkinter 畫布繪圖
#
# 定義一個繪正齒輪的繪圖函式
# midx 為齒輪圓心 x 座標
# midy 為齒輪圓心 y 座標
# rp 為節圓半徑, n 為齒數
def 齒輪(midx, midy, rp, n, 顏色):
# 將角度轉換因子設為全域變數
global deg
# 齒輪漸開線分成 15 線段繪製
imax = 15
# 在輸入的畫布上繪製直線, 由圓心到節圓 y 軸頂點畫一直線
create_line(midx, midy, midx, midy-rp)
# 畫出 rp 圓, 畫圓函式尚未定義
#create_oval(midx-rp, midy-rp, midx+rp, midy+rp, width=2)
# a 為模數 (代表公制中齒的大小), 模數為節圓直徑(稱為節徑)除以齒數
# 模數也就是齒冠大小
#a=2*rp/n
# d 為齒根大小, 為模數的 1.157 或 1.25倍, 這裡採 1.25 倍
d=2.5*rp/n
# ra 為齒輪的外圍半徑
ra=rp+a
print("ra:", ra)
# 畫出 ra 圓, 畫圓函式尚未定義
#create_oval(midx-ra, midy-ra, midx+ra, midy+ra, width=1)
# rb 則為齒輪的基圓半徑
# 基圓為漸開線長齒之基準圓
rb=rp*cos(20*deg)
print("rp:", rp)
print("rb:", rb)
# 畫出 rb 圓 (基圓), 畫圓函式尚未定義
#create_oval(midx-rb, midy-rb, midx+rb, midy+rb, width=1)
# rd 為齒根圓半徑
rd=rp-d
# 當 rd 大於 rb 時
print("rd:", rd)
# 畫出 rd 圓 (齒根圓), 畫圓函式尚未定義
#create_oval(midx-rd, midy-rd, midx+rd, midy+rd, width=1)
# dr 則為基圓到齒頂圓半徑分成 imax 段後的每段半徑增量大小
# 將圓弧分成 imax 段來繪製漸開線
dr=(ra-rb)/imax
# tan(20*deg)-20*deg 為漸開線函數
sigma=pi/(2*n)+tan(20*deg)-20*deg
for j in range(n):
ang=-2.*j*pi/n+sigma
ang2=2.*j*pi/n+sigma
lxd=midx+rd*sin(ang2-2.*pi/n)
lyd=midy-rd*cos(ang2-2.*pi/n)
#for(i=0;i<=imax;i++):
for i in range(imax+1):
r=rb+i*dr
theta=sqrt((r*r)/(rb*rb)-1.)
alpha=theta-atan(theta)
xpt=r*sin(alpha-ang)
ypt=r*cos(alpha-ang)
xd=rd*sin(-ang)
yd=rd*cos(-ang)
# i=0 時, 繪線起點由齒根圓上的點, 作為起點
if(i==0):
last_x = midx+xd
last_y = midy-yd
# 由左側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點
create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色)
# 最後一點, 則為齒頂圓
if(i==imax):
lfx=midx+xpt
lfy=midy-ypt
last_x = midx+xpt
last_y = midy-ypt
# the line from last end of dedendum point to the recent
# end of dedendum point
# lxd 為齒根圓上的左側 x 座標, lyd 則為 y 座標
# 下列為齒根圓上用來近似圓弧的直線
create_line((lxd),(lyd),(midx+xd),(midy-yd),fill=顏色)
#for(i=0;i<=imax;i++):
for i in range(imax+1):
r=rb+i*dr
theta=sqrt((r*r)/(rb*rb)-1.)
alpha=theta-atan(theta)
xpt=r*sin(ang2-alpha)
ypt=r*cos(ang2-alpha)
xd=rd*sin(ang2)
yd=rd*cos(ang2)
# i=0 時, 繪線起點由齒根圓上的點, 作為起點
if(i==0):
last_x = midx+xd
last_y = midy-yd
# 由右側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點
create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色)
# 最後一點, 則為齒頂圓
if(i==imax):
rfx=midx+xpt
rfy=midy-ypt
last_x = midx+xpt
last_y = midy-ypt
# lfx 為齒頂圓上的左側 x 座標, lfy 則為 y 座標
# 下列為齒頂圓上用來近似圓弧的直線
create_line(lfx,lfy,rfx,rfy,fill=顏色)
齒輪(400,400,300,n,"blue")
</script>
<canvas id="plotarea" width="800" height="800"></canvas>
</body>
</html>
'''
return outstring
#@+node:amd.20150415215023.1: *3* mygeartest2
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def mygeartest2(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
from math import *
# 請注意, 這裡導入位於 Lib/site-packages 目錄下的 spur.py 檔案
import spur
# 準備在 id="plotarea" 的 canvas 中繪圖
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 以下利用 spur.py 程式進行繪圖, 接下來的協同設計運算必須要配合使用者的需求進行設計運算與繪圖
# 其中並將工作分配給其他組員建立類似 spur.py 的相關零件繪圖模組
# midx, midy 為齒輪圓心座標, rp 為節圓半徑, n 為齒數, pa 為壓力角, color 為線的顏色
# Gear(midx, midy, rp, n=20, pa=20, color="black"):
# 模數決定齒的尺寸大小, 囓合齒輪組必須有相同的模數與壓力角
n_g1 = int(input("齒數"))
m = int(input("模數"))
pa = int(input("壓力角"))
rp_g1 = m*n_g1/2
ctx.save()
# translate to the origin of second gear
ctx.translate(400,400)
# rotate to engage
ctx.rotate(pi/2)
# put it back
ctx.translate(-400,-400)
spur.Spur(ctx).Gear(400,400,rp_g1,n_g1, pa, "blue")
ctx.restore()
</script>
<canvas id="plotarea" width="1200" height="1200"></canvas>
</body>
</html>
'''
return outstring
#@+node:2015.20150331094055.1737: *3* my3Dgeartest
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def my3Dgeartest(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
from math import *
# 準備在 id="plotarea" 的 canvas 中繪圖
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
def create_line(x1, y1, x2, y2, width=3, fill="red"):
ctx.beginPath()
ctx.lineWidth = width
ctx.moveTo(x1, y1)
ctx.lineTo(x2, y2)
ctx.strokeStyle = fill
ctx.stroke()
# 導入數學函式後, 圓周率為 pi
# deg 為角度轉為徑度的轉換因子
deg = pi/180.
#
# 以下分別為正齒輪繪圖與主 tkinter 畫布繪圖
#
# 定義一個繪正齒輪的繪圖函式
# midx 為齒輪圓心 x 座標
# midy 為齒輪圓心 y 座標
# rp 為節圓半徑, n 為齒數
def gear(midx, midy, rp, n, 顏色):
# 將角度轉換因子設為全域變數
global deg
# 齒輪漸開線分成 15 線段繪製
imax = 15
# 在輸入的畫布上繪製直線, 由圓心到節圓 y 軸頂點畫一直線
create_line(midx, midy, midx, midy-rp)
# 畫出 rp 圓, 畫圓函式尚未定義
#create_oval(midx-rp, midy-rp, midx+rp, midy+rp, width=2)
# a 為模數 (代表公制中齒的大小), 模數為節圓直徑(稱為節徑)除以齒數
# 模數也就是齒冠大小
a=2*rp/n
# d 為齒根大小, 為模數的 1.157 或 1.25倍, 這裡採 1.25 倍
d=2.5*rp/n
# ra 為齒輪的外圍半徑
ra=rp+a
print("ra:", ra)
# 畫出 ra 圓, 畫圓函式尚未定義
#create_oval(midx-ra, midy-ra, midx+ra, midy+ra, width=1)
# rb 則為齒輪的基圓半徑
# 基圓為漸開線長齒之基準圓
rb=rp*cos(20*deg)
print("rp:", rp)
print("rb:", rb)
# 畫出 rb 圓 (基圓), 畫圓函式尚未定義
#create_oval(midx-rb, midy-rb, midx+rb, midy+rb, width=1)
# rd 為齒根圓半徑
rd=rp-d
# 當 rd 大於 rb 時
print("rd:", rd)
# 畫出 rd 圓 (齒根圓), 畫圓函式尚未定義
#create_oval(midx-rd, midy-rd, midx+rd, midy+rd, width=1)
# dr 則為基圓到齒頂圓半徑分成 imax 段後的每段半徑增量大小
# 將圓弧分成 imax 段來繪製漸開線
dr=(ra-rb)/imax
# tan(20*deg)-20*deg 為漸開線函數
sigma=pi/(2*n)+tan(20*deg)-20*deg
for j in range(n):
ang=-2.*j*pi/n+sigma
ang2=2.*j*pi/n+sigma
lxd=midx+rd*sin(ang2-2.*pi/n)
lyd=midy-rd*cos(ang2-2.*pi/n)
#for(i=0;i<=imax;i++):
for i in range(imax+1):
r=rb+i*dr
theta=sqrt((r*r)/(rb*rb)-1.)
alpha=theta-atan(theta)
xpt=r*sin(alpha-ang)
ypt=r*cos(alpha-ang)
xd=rd*sin(-ang)
yd=rd*cos(-ang)
# i=0 時, 繪線起點由齒根圓上的點, 作為起點
if(i==0):
last_x = midx+xd
last_y = midy-yd
# 由左側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點
create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色)
# 最後一點, 則為齒頂圓
if(i==imax):
lfx=midx+xpt
lfy=midy-ypt
last_x = midx+xpt
last_y = midy-ypt
# the line from last end of dedendum point to the recent
# end of dedendum point
# lxd 為齒根圓上的左側 x 座標, lyd 則為 y 座標
# 下列為齒根圓上用來近似圓弧的直線
create_line((lxd),(lyd),(midx+xd),(midy-yd),fill=顏色)
#for(i=0;i<=imax;i++):
for i in range(imax+1):
r=rb+i*dr
theta=sqrt((r*r)/(rb*rb)-1.)
alpha=theta-atan(theta)
xpt=r*sin(ang2-alpha)
ypt=r*cos(ang2-alpha)
xd=rd*sin(ang2)
yd=rd*cos(ang2)
# i=0 時, 繪線起點由齒根圓上的點, 作為起點
if(i==0):
last_x = midx+xd
last_y = midy-yd
# 由右側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點
create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色)
# 最後一點, 則為齒頂圓
if(i==imax):
rfx=midx+xpt
rfy=midy-ypt
last_x = midx+xpt
last_y = midy-ypt
# lfx 為齒頂圓上的左側 x 座標, lfy 則為 y 座標
# 下列為齒頂圓上用來近似圓弧的直線
create_line(lfx,lfy,rfx,rfy,fill=顏色)
gear(400,400,300,41,"blue")
</script>
<canvas id="plotarea" width="800" height="800"></canvas>
</body>
</html>
'''
return outstring
#@+node:2014fall.20141215194146.1793: *3* doCheck
@cherrypy.expose
def doCheck(self, guess=None):
# 假如使用者直接執行 doCheck, 則設法轉回根方法
if guess is None:
raise cherrypy.HTTPRedirect("/")
# 從 session 取出 answer 對應資料, 且處理直接執行 doCheck 時無法取 session 值情況
try:
theanswer = int(cherrypy.session.get('answer'))
except:
raise cherrypy.HTTPRedirect("/")
# 經由表單所取得的 guess 資料型別為 string
try:
theguess = int(guess)
except:
return "error " + self.guessform()
# 每執行 doCheck 一次,次數增量一次
cherrypy.session['count'] += 1
# 答案與所猜數字進行比對
if theanswer < theguess:
return "big " + self.guessform()
elif theanswer > theguess:
return "small " + self.guessform()
else:
# 已經猜對, 從 session 取出累計猜測次數
thecount = cherrypy.session.get('count')
return "exact: <a href=''>再猜</a>"
#@+node:2014fall.20141215194146.1789: *3* guessform
def guessform(self):
# 印出讓使用者輸入的超文件表單
outstring = str(cherrypy.session.get('answer')) + "/" + str(cherrypy.session.get('count')) + '''<form method=POST action=doCheck>
請輸入您所猜的整數:<input type=text name=guess><br />
<input type=submit value=send>
</form>'''
return outstring
#@-others
#@-others
################# (4) 程式啟動區
# 配合程式檔案所在目錄設定靜態目錄或靜態檔案
application_conf = {'/static':{
'tools.staticdir.on': True,
# 程式執行目錄下, 必須自行建立 static 目錄
'tools.staticdir.dir': _curdir+"/static"},
'/downloads':{
'tools.staticdir.on': True,
'tools.staticdir.dir': data_dir+"/downloads"},
'/images':{
'tools.staticdir.on': True,
'tools.staticdir.dir': data_dir+"/images"}
}
root = Hello()
root.gear = gear.Gear()
if 'OPENSHIFT_REPO_DIR' in os.environ.keys():
# 表示在 OpenSfhit 執行
application = cherrypy.Application(root, config=application_conf)
else:
# 表示在近端執行
cherrypy.quickstart(root, config=application_conf)
#@-leo
|
gpl-3.0
|
thaim/ansible
|
lib/ansible/module_utils/facts/network/base.py
|
88
|
2400
|
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.facts.collector import BaseFactCollector
class Network:
"""
This is a generic Network subclass of Facts. This should be further
subclassed to implement per platform. If you subclass this,
you must define:
- interfaces (a list of interface names)
- interface_<name> dictionary of ipv4, ipv6, and mac address information.
All subclasses MUST define platform.
"""
platform = 'Generic'
# FIXME: remove load_on_init when we can
def __init__(self, module, load_on_init=False):
self.module = module
# TODO: more or less abstract/NotImplemented
def populate(self, collected_facts=None):
return {}
class NetworkCollector(BaseFactCollector):
# MAYBE: we could try to build this based on the arch specific implementation of Network() or its kin
name = 'network'
_fact_class = Network
_fact_ids = set(['interfaces',
'default_ipv4',
'default_ipv6',
'all_ipv4_addresses',
'all_ipv6_addresses'])
IPV6_SCOPE = {'0': 'global',
'10': 'host',
'20': 'link',
'40': 'admin',
'50': 'site',
'80': 'organization'}
def collect(self, module=None, collected_facts=None):
collected_facts = collected_facts or {}
if not module:
return {}
# Network munges cached_facts by side effect, so give it a copy
facts_obj = self._fact_class(module)
facts_dict = facts_obj.populate(collected_facts=collected_facts)
return facts_dict
|
mit
|
ourbest/sns_app
|
backend/dates.py
|
1
|
1240
|
from datetime import datetime, timedelta
from dj import times
def get_date(date=None):
if date and isinstance(date, str):
return times.localtime(datetime.strptime(date[0:10], '%Y-%m-%d'))
if not date:
return times.localtime(datetime.now()).replace(hour=0, second=0, minute=0, microsecond=0)
else:
return times.localtime(date).replace(hour=0, second=0, minute=0, microsecond=0)
def today():
return get_date()
def yesterday():
return today() - timedelta(days=1)
def delta(date_str, days):
dt = datetime.strptime(date_str, '%Y-%m-%d') + timedelta(days=days)
return dt.strftime('%Y-%m-%d')
def current_week():
now = today()
if now.weekday() == 7:
return now, now + timedelta(days=7)
sunday = now - timedelta(now.weekday() + 1)
return sunday, sunday + timedelta(6)
def plus_week(delta):
f, t = current_week()
return f + timedelta(7 * delta), t + timedelta(7 * delta)
def to_str(week, format='%Y-%m-%d'):
(from_dt, to_dt) = week
return '%s - %s' % (from_dt.strftime(format), to_dt.strftime(format))
def to_date_str_range(week, format='%Y-%m-%d'):
(from_dt, to_dt) = week
return from_dt.strftime(format), to_dt.strftime(format)
|
lgpl-3.0
|
julianwang/cinder
|
cinder/api/views/backups.py
|
8
|
3833
|
# Copyright (C) 2012 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from cinder.api import common
LOG = logging.getLogger(__name__)
class ViewBuilder(common.ViewBuilder):
"""Model backup API responses as a python dictionary."""
_collection_name = "backups"
def __init__(self):
"""Initialize view builder."""
super(ViewBuilder, self).__init__()
def summary_list(self, request, backups, origin_backup_count):
"""Show a list of backups without many details."""
return self._list_view(self.summary, request, backups,
origin_backup_count)
def detail_list(self, request, backups, origin_backup_count):
"""Detailed view of a list of backups ."""
return self._list_view(self.detail, request, backups,
origin_backup_count)
def summary(self, request, backup):
"""Generic, non-detailed view of a backup."""
return {
'backup': {
'id': backup['id'],
'name': backup['display_name'],
'links': self._get_links(request,
backup['id']),
},
}
def restore_summary(self, request, restore):
"""Generic, non-detailed view of a restore."""
return {
'restore': {
'backup_id': restore['backup_id'],
'volume_id': restore['volume_id'],
},
}
def detail(self, request, backup):
"""Detailed view of a single backup."""
return {
'backup': {
'id': backup.get('id'),
'status': backup.get('status'),
'size': backup.get('size'),
'object_count': backup.get('object_count'),
'availability_zone': backup.get('availability_zone'),
'container': backup.get('container'),
'created_at': backup.get('created_at'),
'name': backup.get('display_name'),
'description': backup.get('display_description'),
'fail_reason': backup.get('fail_reason'),
'volume_id': backup.get('volume_id'),
'links': self._get_links(request, backup['id'])
}
}
def _list_view(self, func, request, backups, origin_backup_count):
"""Provide a view for a list of backups."""
backups_list = [func(request, backup)['backup'] for backup in backups]
backups_links = self._get_collection_links(request,
backups,
self._collection_name,
origin_backup_count)
backups_dict = dict(backups=backups_list)
if backups_links:
backups_dict['backups_links'] = backups_links
return backups_dict
def export_summary(self, request, export):
"""Generic view of an export."""
return {
'backup-record': {
'backup_service': export['backup_service'],
'backup_url': export['backup_url'],
},
}
|
apache-2.0
|
rofehr/enigma2
|
lib/python/Components/Timeshift.py
|
1
|
69940
|
# -*- coding: utf-8 -*-
# InfoBarTimeshift requires InfoBarSeek, instantiated BEFORE!
# Hrmf.
#
# Timeshift works the following way:
# demux0 demux1 "TimeshiftActions" "TimeshiftActivateActions" "SeekActions"
# - normal playback TUNER unused PLAY enable disable disable
# - user presses "yellow" button. FILE record PAUSE enable disable enable
# - user presess pause again FILE record PLAY enable disable enable
# - user fast forwards FILE record FF enable disable enable
# - end of timeshift buffer reached TUNER record PLAY enable enable disable
# - user backwards FILE record BACK # !! enable disable enable
#
# in other words:
# - when a service is playing, pressing the "timeshiftStart" button ("yellow") enables recording ("enables timeshift"),
# freezes the picture (to indicate timeshift), sets timeshiftMode ("activates timeshift")
# now, the service becomes seekable, so "SeekActions" are enabled, "TimeshiftEnableActions" are disabled.
# - the user can now PVR around
# - if it hits the end, the service goes into live mode ("deactivates timeshift", it's of course still "enabled")
# the service looses it's "seekable" state. It can still be paused, but just to activate timeshift right
# after!
# the seek actions will be disabled, but the timeshiftActivateActions will be enabled
# - if the user rewinds, or press pause, timeshift will be activated again
# note that a timeshift can be enabled ("recording") and
# activated (currently time-shifting).
from Components.ActionMap import ActionMap, HelpableActionMap
from Components.ServiceEventTracker import ServiceEventTracker
from Components.config import config
from Components.SystemInfo import SystemInfo
from Components.Task import job_manager as JobManager
from Screens.ChoiceBox import ChoiceBox
from Screens.MessageBox import MessageBox
import Screens.Standby
from ServiceReference import ServiceReference
from RecordTimer import RecordTimerEntry, parseEvent
from timer import TimerEntry
from Tools import ASCIItranslit, Notifications
from Tools.BoundFunction import boundFunction
from Tools.Directories import pathExists, fileExists, getRecordingFilename, copyfile, resolveFilename, SCOPE_TIMESHIFT, SCOPE_AUTORECORD
from Tools.TimeShift import CopyTimeshiftJob, MergeTimeshiftJob, CreateAPSCFilesJob
from enigma import eBackgroundFileEraser, eTimer, eServiceCenter, iServiceInformation, iPlayableService, eEPGCache
from boxbranding import getBoxType, getBrandOEM
from time import time, localtime, strftime
from random import randint
import os
class InfoBarTimeshift:
ts_disabled = False
def __init__(self):
self["TimeshiftActions"] = HelpableActionMap(self, "InfobarTimeshiftActions",
{
"timeshiftStart": (self.startTimeshift, _("Start timeshift")), # the "yellow key"
"timeshiftStop": (self.stopTimeshift, _("Stop timeshift")), # currently undefined :), probably 'TV'
"instantRecord": self.instantRecord,
"restartTimeshift": self.restartTimeshift
}, prio=1)
self["TimeshiftActivateActions"] = ActionMap(["InfobarTimeshiftActivateActions"],
{
"timeshiftActivateEnd": self.activateTimeshiftEnd, # something like "rewind key"
"timeshiftActivateEndAndPause": self.activateTimeshiftEndAndPause # something like "pause key"
}, prio=-1) # priority over record
self["TimeshiftSeekPointerActions"] = ActionMap(["InfobarTimeshiftSeekPointerActions"],
{
"SeekPointerOK": self.ptsSeekPointerOK,
"SeekPointerLeft": self.ptsSeekPointerLeft,
"SeekPointerRight": self.ptsSeekPointerRight
}, prio=-1)
self["TimeshiftFileActions"] = ActionMap(["InfobarTimeshiftActions"],
{
"jumpPreviousFile": self.__evSOF,
"jumpNextFile": self.__evEOF
}, prio=-1) # priority over history
self["TimeshiftActions"].setEnabled(False)
self["TimeshiftActivateActions"].setEnabled(False)
self["TimeshiftSeekPointerActions"].setEnabled(False)
self["TimeshiftFileActions"].setEnabled(False)
self.switchToLive = True
self.ptsStop = False
self.ts_rewind_timer = eTimer()
self.ts_rewind_timer.callback.append(self.rewindService)
self.save_timeshift_file = False
self.saveTimeshiftEventPopupActive = False
self.__event_tracker = ServiceEventTracker(screen = self, eventmap =
{
iPlayableService.evStart: self.__serviceStarted,
iPlayableService.evSeekableStatusChanged: self.__seekableStatusChanged,
iPlayableService.evEnd: self.__serviceEnd,
iPlayableService.evSOF: self.__evSOF,
iPlayableService.evUpdatedInfo: self.__evInfoChanged,
iPlayableService.evUpdatedEventInfo: self.__evEventInfoChanged,
iPlayableService.evUser+1: self.ptsTimeshiftFileChanged
})
self.pts_begintime = 0
self.pts_switchtolive = False
self.pts_firstplayable = 1
self.pts_currplaying = 1
self.pts_nextplaying = 0
self.pts_lastseekspeed = 0
self.pts_service_changed = False
self.pts_file_changed = False
self.pts_record_running = self.session.nav.RecordTimer.isRecording()
self.save_current_timeshift = False
self.save_timeshift_postaction = None
self.service_changed = 0
self.event_changed = False
self.checkEvents_value = int(config.timeshift.timeshiftCheckEvents.value)
self.pts_starttime = time()
self.ptsAskUser_wait = False
# Init Global Variables
self.session.ptsmainloopvalue = 0
config.timeshift.isRecording.value = False
# Init eBackgroundFileEraser
self.BgFileEraser = eBackgroundFileEraser.getInstance()
# Init PTS Delay-Timer
self.pts_delay_timer = eTimer()
self.pts_delay_timer.callback.append(self.autostartAutorecordTimeshift)
# Init PTS MergeRecords-Timer
self.pts_mergeRecords_timer = eTimer()
self.pts_mergeRecords_timer.callback.append(self.ptsMergeRecords)
# Init PTS Merge Cleanup-Timer
self.pts_mergeCleanUp_timer = eTimer()
self.pts_mergeCleanUp_timer.callback.append(self.ptsMergePostCleanUp)
# Init PTS QuitMainloop-Timer
self.pts_QuitMainloop_timer = eTimer()
self.pts_QuitMainloop_timer.callback.append(self.ptsTryQuitMainloop)
# Init PTS CleanUp-Timer
self.pts_cleanUp_timer = eTimer()
self.pts_cleanUp_timer.callback.append(self.ptsCleanTimeshiftFolder)
# Init PTS CleanEvent-Timer
self.pts_cleanEvent_timer = eTimer()
self.pts_cleanEvent_timer.callback.append(self.ptsEventCleanTimeshiftFolder)
# Init PTS SeekBack-Timer
self.pts_SeekBack_timer = eTimer()
self.pts_SeekBack_timer.callback.append(self.ptsSeekBackTimer)
self.pts_StartSeekBackTimer = eTimer()
self.pts_StartSeekBackTimer.callback.append(self.ptsStartSeekBackTimer)
# Init PTS CheckFileChanged-Timer
self.pts_CheckFileChanged_timer = eTimer()
self.pts_CheckFileChanged_timer.callback.append(self.ptsCheckFileChanged)
# Init Block-Zap Timer
self.pts_blockZap_timer = eTimer()
# Record Event Tracker
self.session.nav.RecordTimer.on_state_change.append(self.ptsTimerEntryStateChange)
# Keep Current Event Info for recordings
self.pts_eventcount = 0
self.pts_curevent_begin = int(time())
self.pts_curevent_end = 0
self.pts_curevent_name = _("Timeshift")
self.pts_curevent_description = ""
self.pts_curevent_servicerefname = ""
self.pts_curevent_station = ""
self.pts_curevent_eventid = None
# Init PTS Infobar
def __seekableStatusChanged(self):
# print '__seekableStatusChanged'
self["TimeshiftActivateActions"].setEnabled(not self.isSeekable() and self.timeshiftEnabled())
state = self.getSeek() is not None and self.timeshiftEnabled()
self["SeekActionsPTS"].setEnabled(state)
self["TimeshiftFileActions"].setEnabled(state)
# print ('__seekableStatusChanged - state %s, seekstate %s' % (state, self.seekstate))
if not state and self.pts_currplaying == self.pts_eventcount:
self.setSeekState(self.SEEK_STATE_PLAY)
if self.pts_eventcount < self.pts_firstplayable:
self.pts_firstplayable = self.pts_eventcount
self.restartSubtitle()
# print ('[TIMESHIFT] - pts_currplaying %s, pts_nextplaying %s, pts_eventcount %s, pts_firstplayable %s' % (self.pts_currplaying, self.pts_nextplaying, self.pts_eventcount, self.pts_firstplayable))
if self.timeshiftEnabled() and not self.isSeekable():
self.ptsSeekPointerReset()
if int(config.timeshift.startdelay.value):
if self.pts_starttime <= (time()-5):
self.pts_blockZap_timer.start(3000, True)
self.pts_currplaying = self.pts_eventcount
self.pts_nextplaying = 0
self.pts_file_changed = True
self.ptsSetNextPlaybackFile("pts_livebuffer_%s" % self.pts_eventcount)
# print ('[TIMESHIFT] - pts_currplaying %s, pts_nextplaying %s, pts_eventcount %s, pts_firstplayable %s' % (self.pts_currplaying, self.pts_nextplaying, self.pts_eventcount, self.pts_firstplayable))
def __serviceStarted(self):
# print '__serviceStarted'
self.service_changed = 1
self.pts_service_changed = True
# print 'self.timeshiftEnabled1',self.timeshiftEnabled()
if self.pts_delay_timer.isActive():
# print 'TS AUTO START TEST1'
self.pts_delay_timer.stop()
if int(config.timeshift.startdelay.value):
# print 'TS AUTO START TEST2'
self.pts_delay_timer.start(int(config.timeshift.startdelay.value) * 1000, True)
self.__seekableStatusChanged()
def __serviceEnd(self):
# print '!!!!! __serviceEnd'
if self.save_current_timeshift:
if self.pts_curevent_end > time():
self.SaveTimeshift("pts_livebuffer_%s" % self.pts_eventcount, mergelater=True)
self.ptsRecordCurrentEvent()
else:
self.SaveTimeshift("pts_livebuffer_%s" % self.pts_eventcount)
self.service_changed = 0
if not config.timeshift.isRecording.value:
self.__seekableStatusChanged()
def __evSOF(self):
# print '!!!!! jumpToPrevTimeshiftedEvent'
if not self.timeshiftEnabled() or self.pts_CheckFileChanged_timer.isActive():
return
# print ('[TIMESHIFT] - pts_currplaying %s, pts_nextplaying %s, pts_eventcount %s, pts_firstplayable %s' % (self.pts_currplaying, self.pts_nextplaying, self.pts_eventcount, self.pts_firstplayable))
self.pts_switchtolive = False
self.pts_nextplaying = 0
if self.pts_currplaying > self.pts_firstplayable:
self.pts_currplaying -= 1
else:
self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(0)
return
# Switch to previous TS file by seeking forward to next file
# print 'self.pts_currplaying2',self.pts_currplaying
# print ("'!!!!! %spts_livebuffer_%s" % (config.usage.timeshift_path.value, self.pts_currplaying))
if fileExists("%spts_livebuffer_%s" % (config.usage.timeshift_path.value, self.pts_currplaying), 'r'):
self.ptsSetNextPlaybackFile("pts_livebuffer_%s" % self.pts_currplaying)
self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(3600 * 24 * 90000)
self.pts_CheckFileChanged_timer.start(1000, False)
self.pts_file_changed = False
else:
print ('[TIMESHIFT] - "pts_livebuffer_%s" file was not found -> put pointer to the first (current) "pts_livebuffer_%s" file' % (self.pts_currplaying, self.pts_currplaying + 1))
self.pts_currplaying += 1
self.pts_firstplayable += 1
self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(0)
# print ('[TIMESHIFT] - pts_currplaying %s, pts_nextplaying %s, pts_eventcount %s, pts_firstplayable %s' % (self.pts_currplaying, self.pts_nextplaying, self.pts_eventcount, self.pts_firstplayable))
def __evEOF(self):
# print '!!!!! jumpToNextTimeshiftedEvent'
if not self.timeshiftEnabled() or self.pts_CheckFileChanged_timer.isActive():
return
# print ('[TIMESHIFT] - pts_currplaying %s, pts_nextplaying %s, pts_eventcount %s, pts_firstplayable %s' % (self.pts_currplaying, self.pts_nextplaying, self.pts_eventcount, self.pts_firstplayable))
self.pts_switchtolive = False
self.pts_nextplaying = 0
self.pts_currplaying += 1
# Switch to next TS file by seeking forward to next file
# print 'self.pts_currplaying2',self.pts_currplaying
# print ("'!!!!! %spts_livebuffer_%s" % (config.usage.timeshift_path.value, self.pts_currplaying))
if fileExists("%spts_livebuffer_%s" % (config.usage.timeshift_path.value, self.pts_currplaying), 'r'):
self.ptsSetNextPlaybackFile("pts_livebuffer_%s" % self.pts_currplaying)
self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(3600 * 24 * 90000)
else:
self.pts_switchtolive = True
self.pts_currplaying -= 1
self.ptsSetNextPlaybackFile("")
self.setSeekState(self.SEEK_STATE_PLAY)
self.doSeek(3600 * 24 * 90000)
self.pts_CheckFileChanged_timer.start(1000, False)
self.pts_file_changed = False
# print ('[TIMESHIFT] - pts_currplaying %s, pts_nextplaying %s, pts_eventcount %s, pts_firstplayable %s' % (self.pts_currplaying, self.pts_nextplaying, self.pts_eventcount, self.pts_firstplayable))
def __evInfoChanged(self):
# print '__evInfoChanged'
# print 'service_changed',self.service_changed
# print ('[TIMESHIFT] - pts_currplaying %s, pts_nextplaying %s, pts_eventcount %s, pts_firstplayable %s' % (self.pts_currplaying, self.pts_nextplaying, self.pts_eventcount, self.pts_firstplayable))
if self.service_changed:
self.service_changed = 0
# We zapped away before saving the file, save it now!
if self.save_current_timeshift:
self.SaveTimeshift("pts_livebuffer_%s" % self.pts_eventcount)
# Delete Timeshift Records on zap
if config.timeshift.deleteAfterZap.value:
self.ptsEventCleanTimerSTOP()
self.pts_firstplayable = self.pts_eventcount + 1
if self.pts_eventcount == 0 and not int(config.timeshift.startdelay.value):
self.pts_cleanUp_timer.start(1000, True)
# print ('[TIMESHIFT] - pts_currplaying %s, pts_nextplaying %s, pts_eventcount %s, pts_firstplayable %s' % (self.pts_currplaying, self.pts_nextplaying, self.pts_eventcount, self.pts_firstplayable))
def __evEventInfoChanged(self):
# print '__evEventInfoChanged'
# Get Current Event Info
service = self.session.nav.getCurrentService()
old_begin_time = self.pts_begintime
info = service and service.info()
ptr = info and info.getEvent(0)
self.pts_begintime = ptr and ptr.getBeginTime() or 0
# Save current TimeShift permanently now ...
if info.getInfo(iServiceInformation.sVideoPID) != -1:
# Take care of Record Margin Time ...
if self.save_current_timeshift and self.timeshiftEnabled():
if config.recording.margin_after.value > 0 and len(self.recording) == 0:
self.SaveTimeshift(mergelater=True)
recording = RecordTimerEntry(ServiceReference(self.session.nav.getCurrentlyPlayingServiceOrGroup()), time(), time()+(config.recording.margin_after.value * 60), self.pts_curevent_name, self.pts_curevent_description, self.pts_curevent_eventid, dirname = config.usage.autorecord_path.value)
recording.dontSave = True
self.session.nav.RecordTimer.record(recording)
self.recording.append(recording)
else:
self.SaveTimeshift()
if not config.timeshift.filesplitting.value:
self.stopTimeshiftcheckTimeshiftRunningCallback(True)
#(Re)Start TimeShift
# print 'self.pts_delay_timer.isActive',self.pts_delay_timer.isActive()
if not self.pts_delay_timer.isActive():
# print 'TS AUTO START TEST4'
if old_begin_time != self.pts_begintime or old_begin_time == 0:
# print 'TS AUTO START TEST5'
if int(config.timeshift.startdelay.value) or self.timeshiftEnabled():
self.event_changed = True
self.pts_delay_timer.start(1000, True)
def getTimeshift(self):
if self.ts_disabled:
return None
service = self.session.nav.getCurrentService()
return service and service.timeshift()
def timeshiftEnabled(self):
ts = self.getTimeshift()
return ts and ts.isTimeshiftEnabled()
def startTimeshift(self):
ts = self.getTimeshift()
if ts is None:
# self.session.open(MessageBox, _("Timeshift not possible!"), MessageBox.TYPE_ERROR, timeout=5)
return 0
if ts.isTimeshiftEnabled():
print "hu, timeshift already enabled?"
else:
self.activateAutorecordTimeshift()
self.activateTimeshiftEndAndPause()
def stopTimeshift(self):
# print 'stopTimeshift'
ts = self.getTimeshift()
if ts and ts.isTimeshiftEnabled():
# print 'TEST1'
if int(config.timeshift.startdelay.value) and self.isSeekable():
# print 'TEST2'
self.switchToLive = True
self.ptsStop = True
self.checkTimeshiftRunning(self.stopTimeshiftcheckTimeshiftRunningCallback)
elif not int(config.timeshift.startdelay.value):
# print 'TEST2b'
self.checkTimeshiftRunning(self.stopTimeshiftcheckTimeshiftRunningCallback)
else:
# print 'TES2c'
return 0
else:
# print 'TEST3'
return 0
def stopTimeshiftcheckTimeshiftRunningCallback(self, answer):
# print 'stopTimeshiftcheckTimeshiftRunningCallback'
# print ' answer', answer
if answer and int(config.timeshift.startdelay.value) and self.switchToLive and self.isSeekable():
# print 'TEST4'
self.ptsStop = False
self.pts_nextplaying = 0
self.pts_switchtolive = True
self.setSeekState(self.SEEK_STATE_PLAY)
self.ptsSetNextPlaybackFile("")
self.doSeek(3600 * 24 * 90000)
self.__seekableStatusChanged()
return 0
was_enabled = False
ts = self.getTimeshift()
if ts and ts.isTimeshiftEnabled():
# print 'TEST5'
was_enabled = ts.isTimeshiftEnabled()
if answer and ts:
# print 'TEST6'
if int(config.timeshift.startdelay.value):
# print 'TEST7'
ts.stopTimeshift(self.switchToLive)
else:
# print 'TEST8', str(self.event_changed)
ts.stopTimeshift(not self.event_changed)
self.__seekableStatusChanged()
# activates timeshift, and seeks to (almost) the end
def activateTimeshiftEnd(self, back = True):
ts = self.getTimeshift()
if ts is None:
return
if ts.isTimeshiftActive():
self.pauseService()
else:
ts.activateTimeshift() # activate timeshift will automatically pause
self.setSeekState(self.SEEK_STATE_PAUSE)
seekable = self.getSeek()
if seekable is not None:
seekable.seekTo(-90000) # seek approx. 1 sec before end
if back:
if getBrandOEM() == 'xtrend':
self.ts_rewind_timer.start(1000, 1)
else:
self.ts_rewind_timer.start(100, 1)
def rewindService(self):
if getBrandOEM() in ('gigablue', 'xp'):
self.setSeekState(self.SEEK_STATE_PLAY)
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
def callServiceStarted(self):
self.__serviceStarted()
# same as activateTimeshiftEnd, but pauses afterwards.
def activateTimeshiftEndAndPause(self):
self.activateTimeshiftEnd(False)
def checkTimeshiftRunning(self, returnFunction):
# print 'checkTimeshiftRunning'
# print 'self.switchToLive',self.switchToLive
if self.ptsStop:
returnFunction(True)
elif (self.isSeekable() and self.timeshiftEnabled() or self.save_current_timeshift) and config.usage.check_timeshift.value:
# print 'TEST1'
if config.timeshift.favoriteSaveAction.value == "askuser":
# print 'TEST2'
if self.save_current_timeshift:
# print 'TEST3'
message = _("You have chosen to save the current timeshift event, but the event has not yet finished\nWhat do you want to do ?")
choice = [(_("Save timeshift as movie and continue recording"), "savetimeshiftandrecord"),
(_("Save timeshift as movie and stop recording"), "savetimeshift"),
(_("Cancel save timeshift as movie"), "noSave"),
(_("Nothing, just leave this menu"), "no")]
self.session.openWithCallback(boundFunction(self.checkTimeshiftRunningCallback, returnFunction), MessageBox, message, simple = True, list = choice, timeout=30)
else:
# print 'TEST4'
message = _("You seem to be in timeshift, Do you want to leave timeshift ?")
choice = [(_("Yes, but don't save timeshift as movie"), "noSave"),
(_("Yes, but save timeshift as movie and continue recording"), "savetimeshiftandrecord"),
(_("Yes, but save timeshift as movie and stop recording"), "savetimeshift"),
(_("No"), "no")]
self.session.openWithCallback(boundFunction(self.checkTimeshiftRunningCallback, returnFunction), MessageBox, message, simple = True, list = choice, timeout=30)
else:
# print 'TEST5'
if self.save_current_timeshift:
# print 'TEST6'
# the user has previously activated "Timeshift save recording" of current event - so must be necessarily saved of the timeshift!
# workaround - without the message box can the box no longer be operated when goes in standby(no freezing - no longer can use - unhandled key screen comes when key press -)
message = _("You have chosen to save the current timeshift")
choice = [(_("Now save timeshift as movie and continues recording"), "savetimeshiftandrecord")]
self.session.openWithCallback(boundFunction(self.checkTimeshiftRunningCallback, returnFunction), MessageBox, message, simple = True, list = choice, timeout=1)
#InfoBarTimeshift.saveTimeshiftActions(self, "savetimeshiftandrecord", returnFunction)
else:
# print 'TEST7'
message = _("You seem to be in timeshift, Do you want to leave timeshift ?")
choice = [(_("Yes"), config.timeshift.favoriteSaveAction.value), (_("No"), "no")]
self.session.openWithCallback(boundFunction(self.checkTimeshiftRunningCallback, returnFunction), MessageBox, message, simple = True, list = choice, timeout=30)
elif self.save_current_timeshift:
# the user has chosen "no warning" when timeshift is stopped (config.usage.check_timeshift=False)
# but the user has previously activated "Timeshift save recording" of current event
# so we silently do "savetimeshiftandrecord" when switching channel independent of config.timeshift.favoriteSaveAction
# workaround - without the message box can the box no longer be operated when goes in standby(no freezing - no longer can use - unhandled key screen comes when key press -)
message = _("You have chosen to save the current timeshift")
choice = [(_("Now save timeshift as movie and continues recording"), "savetimeshiftandrecord")]
self.session.openWithCallback(boundFunction(self.checkTimeshiftRunningCallback, returnFunction), MessageBox, message, simple = True, list = choice, timeout=1)
#InfoBarTimeshift.saveTimeshiftActions(self, "savetimeshiftandrecord", returnFunction)
else:
returnFunction(True)
def checkTimeshiftRunningCallback(self, returnFunction, answer):
# print 'checkTimeshiftRunningCallback'
# print 'returnFunction',returnFunction
# print 'answer',answer
if answer:
if answer == "savetimeshift" or answer == "savetimeshiftandrecord":
self.save_current_timeshift = True
elif answer == "noSave":
self.save_current_timeshift = False
elif answer == "no":
pass
InfoBarTimeshift.saveTimeshiftActions(self, answer, returnFunction)
def eraseTimeshiftFile(self):
for filename in os.listdir(config.usage.timeshift_path.value):
if filename.startswith("timeshift.") and not filename.endswith(".del") and not filename.endswith(".copy"):
self.BgFileEraser.erase("%s%s" % (config.usage.timeshift_path.value,filename))
def autostartAutorecordTimeshift(self):
# print '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!autostartAutorecordTimeshift'
self["TimeshiftActions"].setEnabled(True)
ts = self.getTimeshift()
if ts is None:
# print '[TimeShift] tune lock failed, so could not start.'
return 0
if self.pts_delay_timer.isActive():
self.pts_delay_timer.stop()
if (int(config.timeshift.startdelay.value) and not self.timeshiftEnabled()) or self.event_changed:
self.activateAutorecordTimeshift()
def activateAutorecordTimeshift(self):
# print 'activateAutorecordTimeshift'
# print ('[TIMESHIFT] - pts_currplaying %s, pts_nextplaying %s, pts_eventcount %s, pts_firstplayable %s' % (self.pts_currplaying, self.pts_nextplaying, self.pts_eventcount, self.pts_firstplayable))
self.createTimeshiftFolder()
if self.pts_eventcount == 0: #only cleanup folder after switching channels, not when a new event starts, to allow saving old events from timeshift buffer
self.ptsCleanTimeshiftFolder(justZapped = True) #remove all timeshift files
else:
self.ptsCleanTimeshiftFolder(justZapped = False) #only delete very old timeshift files based on config.usage.timeshiftMaxHours
if self.ptsCheckTimeshiftPath() is False or self.session.screen["Standby"].boolean is True or self.ptsLiveTVStatus() is False or (config.timeshift.stopwhilerecording.value and self.pts_record_running):
return
# setNextPlaybackFile() on event change while timeshifting
if self.isSeekable():
self.pts_nextplaying = self.pts_currplaying + 1
self.ptsSetNextPlaybackFile("pts_livebuffer_%s" % self.pts_nextplaying)
# Do not switch back to LiveTV while timeshifting
self.switchToLive = False
else:
self.switchToLive = True
# (Re)start Timeshift now
if config.timeshift.filesplitting.value:
self.stopTimeshiftcheckTimeshiftRunningCallback(True)
self.event_changed = False
ts = self.getTimeshift()
if ts and (not ts.startTimeshift() or self.pts_eventcount == 0):
# Update internal Event Counter
self.pts_eventcount += 1
if (getBoxType() == 'vuuno' or getBoxType() == 'vuduo') and os.path.exists("/proc/stb/lcd/symbol_timeshift"):
if self.session.nav.RecordTimer.isRecording():
f = open("/proc/stb/lcd/symbol_timeshift", "w")
f.write("0")
f.close()
self.pts_starttime = time()
self.save_timeshift_postaction = None
self.ptsGetEventInfo()
self.ptsCreateHardlink()
self.__seekableStatusChanged()
self.ptsEventCleanTimerSTART()
elif ts and ts.startTimeshift():
self.ptsGetEventInfo()
try:
# rewrite .meta and .eit files
metafile = open("%spts_livebuffer_%s.meta" % (config.usage.timeshift_path.value,self.pts_eventcount), "w")
metafile.write("%s\n%s\n%s\n%i\n" % (self.pts_curevent_servicerefname,self.pts_curevent_name.replace("\n", ""),self.pts_curevent_description.replace("\n", ""),int(self.pts_starttime)))
metafile.close()
self.ptsCreateEITFile("%spts_livebuffer_%s" % (config.usage.timeshift_path.value,self.pts_eventcount))
except:
print "[Timeshift] - failure rewrite meta and eit files."
self.ptsEventCleanTimerSTART()
else:
self.ptsEventCleanTimerSTOP()
try:
self.session.open(MessageBox, _("Timeshift not possible!"), MessageBox.TYPE_ERROR, timeout=2)
except:
print '[TIMESHIFT] Failed to open MessageBox, Timeshift not possible, probably another MessageBox was active.'
# print ('[TIMESHIFT] - pts_currplaying %s, pts_nextplaying %s, pts_eventcount %s, pts_firstplayable %s' % (self.pts_currplaying, self.pts_nextplaying, self.pts_eventcount, self.pts_firstplayable))
def createTimeshiftFolder(self):
timeshiftdir = resolveFilename(SCOPE_TIMESHIFT)
if not pathExists(timeshiftdir):
try:
os.makedirs(timeshiftdir)
except:
print "[TimeShift] Failed to create %s !!" %timeshiftdir
def restartTimeshift(self):
self.activateAutorecordTimeshift()
Notifications.AddNotification(MessageBox, _("[TimeShift] Restarting Timeshift!"), MessageBox.TYPE_INFO, timeout=5)
def saveTimeshiftEventPopup(self):
self.saveTimeshiftEventPopupActive = True
filecount = 0
entrylist = [(_("Current Event:") + " %s" % self.pts_curevent_name, "savetimeshift")]
filelist = os.listdir(config.usage.timeshift_path.value)
if filelist is not None:
try:
filelist = sorted(filelist, key=lambda x: int(x.split('pts_livebuffer_')[1]) if x.startswith("pts_livebuffer") and not os.path.splitext(x)[1] else x)
except:
print '[TIMESHIFT] - file sorting error, use standard sorting method'
filelist.sort()
# print filelist
for filename in filelist:
if filename.startswith("pts_livebuffer") and not os.path.splitext(filename)[1]:
# print "TRUE"
statinfo = os.stat("%s%s" % (config.usage.timeshift_path.value,filename))
if statinfo.st_mtime < (time()-5.0):
# Get Event Info from meta file
readmetafile = open("%s%s.meta" % (config.usage.timeshift_path.value,filename), "r")
servicerefname = readmetafile.readline()[0:-1]
eventname = readmetafile.readline()[0:-1]
description = readmetafile.readline()[0:-1]
begintime = readmetafile.readline()[0:-1]
readmetafile.close()
# Add Event to list
filecount += 1
if config.timeshift.deleteAfterZap.value and servicerefname == self.pts_curevent_servicerefname:
entrylist.append((_("Record") + " #%s (%s): %s" % (filecount,strftime("%H:%M",localtime(int(begintime))),eventname), "%s" % filename))
else:
servicename = ServiceReference(servicerefname).getServiceName()
#entrylist.append((_("Record") + " #%s (%s,%s): %s" % (filecount,strftime("%H:%M",localtime(int(begintime))),servicename,eventname), "%s" % filename))
entrylist.append(("[%s] %s : %s" % (strftime("%H:%M",localtime(int(begintime))),servicename,eventname), "%s" % filename))
self.session.openWithCallback(self.recordQuestionCallback, ChoiceBox, title=_("Which event do you want to save permanently?"), list=entrylist)
def saveTimeshiftActions(self, action=None, returnFunction=None):
# print 'saveTimeshiftActions'
# print 'action',action
if action == "savetimeshift":
self.SaveTimeshift()
elif action == "savetimeshiftandrecord":
if self.pts_curevent_end > time():
self.SaveTimeshift(mergelater=True)
self.ptsRecordCurrentEvent()
else:
self.SaveTimeshift()
elif action == "noSave":
config.timeshift.isRecording.value = False
self.save_current_timeshift = False
elif action == "no":
pass
# Get rid of old timeshift file before E2 truncates its filesize
if returnFunction is not None and action != "no":
self.eraseTimeshiftFile()
# print 'action returnFunction'
returnFunction(action and action != "no")
def SaveTimeshift(self, timeshiftfile=None, mergelater=False):
# print 'SaveTimeshift'
self.save_current_timeshift = False
savefilename = None
if timeshiftfile is not None:
savefilename = timeshiftfile
# print 'savefilename',savefilename
if savefilename is None:
# print 'TEST1'
for filename in os.listdir(config.usage.timeshift_path.value):
# print 'filename',filename
if filename.startswith("timeshift.") and not filename.endswith(".del") and not filename.endswith(".copy") and not filename.endswith(".sc"):
statinfo = os.stat("%s%s" % (config.usage.timeshift_path.value,filename))
if statinfo.st_mtime > (time()-5.0):
savefilename=filename
# print 'savefilename',savefilename
if savefilename is None:
Notifications.AddNotification(MessageBox, _("No Timeshift found to save as recording!"), MessageBox.TYPE_ERROR, timeout=30)
else:
timeshift_saved = True
timeshift_saveerror1 = ""
timeshift_saveerror2 = ""
metamergestring = ""
config.timeshift.isRecording.value = True
if mergelater:
self.pts_mergeRecords_timer.start(120000, True)
metamergestring = "pts_merge\n"
try:
if timeshiftfile is None:
# Save Current Event by creating hardlink to ts file
if self.pts_starttime >= (time()-60):
self.pts_starttime -= 60
ptsfilename = "%s - %s - %s" % (strftime("%Y%m%d %H%M",localtime(self.pts_starttime)),self.pts_curevent_station,self.pts_curevent_name.replace("\n", ""))
try:
if config.usage.setup_level.index >= 2:
if config.recording.filename_composition.value == "long" and self.pts_curevent_name.replace("\n", "") != self.pts_curevent_description.replace("\n", ""):
ptsfilename = "%s - %s - %s - %s" % (strftime("%Y%m%d %H%M",localtime(self.pts_starttime)),self.pts_curevent_station,self.pts_curevent_name.replace("\n", ""),self.pts_curevent_description.replace("\n", ""))
elif config.recording.filename_composition.value == "short":
ptsfilename = "%s - %s" % (strftime("%Y%m%d",localtime(self.pts_starttime)),self.pts_curevent_name.replace("\n", ""))
elif config.recording.filename_composition.value == "veryshort":
ptsfilename = "%s - %s" % (self.pts_curevent_name.replace("\n", ""),strftime("%Y%m%d %H%M",localtime(self.pts_starttime)))
elif config.recording.filename_composition.value == "veryveryshort":
ptsfilename = "%s - %s" % (self.pts_curevent_name.replace("\n", ""),strftime("%Y%m%d %H%M",localtime(self.pts_starttime)))
except Exception, errormsg:
print "[TimeShift] Using default filename"
if config.recording.ascii_filenames.value:
ptsfilename = ASCIItranslit.legacyEncode(ptsfilename)
# print 'ptsfilename',ptsfilename
fullname = getRecordingFilename(ptsfilename,config.usage.autorecord_path.value)
# print 'fullname',fullname
os.link("%s%s" % (config.usage.timeshift_path.value,savefilename), "%s.ts" % fullname)
metafile = open("%s.ts.meta" % fullname, "w")
metafile.write("%s\n%s\n%s\n%i\n%s" % (self.pts_curevent_servicerefname,self.pts_curevent_name.replace("\n", ""),self.pts_curevent_description.replace("\n", ""),int(self.pts_starttime),metamergestring))
metafile.close()
self.ptsCreateEITFile(fullname)
elif timeshiftfile.startswith("pts_livebuffer"):
# Save stored timeshift by creating hardlink to ts file
readmetafile = open("%s%s.meta" % (config.usage.timeshift_path.value,timeshiftfile), "r")
servicerefname = readmetafile.readline()[0:-1]
eventname = readmetafile.readline()[0:-1]
description = readmetafile.readline()[0:-1]
begintime = readmetafile.readline()[0:-1]
readmetafile.close()
if config.timeshift.deleteAfterZap.value and servicerefname == self.pts_curevent_servicerefname:
servicename = self.pts_curevent_station
else:
servicename = ServiceReference(servicerefname).getServiceName()
ptsfilename = "%s - %s - %s" % (strftime("%Y%m%d %H%M",localtime(int(begintime))),servicename,eventname)
try:
if config.usage.setup_level.index >= 2:
if config.recording.filename_composition.value == "long" and eventname != description:
ptsfilename = "%s - %s - %s - %s" % (strftime("%Y%m%d %H%M",localtime(int(begintime))),servicename,eventname,description)
elif config.recording.filename_composition.value == "short":
ptsfilename = "%s - %s" % (strftime("%Y%m%d",localtime(int(begintime))),eventname)
elif config.recording.filename_composition.value == "veryshort":
ptsfilename = "%s - %s" % (eventname,strftime("%Y%m%d %H%M",localtime(int(begintime))))
elif config.recording.filename_composition.value == "veryveryshort":
ptsfilename = "%s - %s" % (eventname,strftime("%Y%m%d %H%M",localtime(int(begintime))))
except Exception, errormsg:
print "[TimeShift] Using default filename"
if config.recording.ascii_filenames.value:
ptsfilename = ASCIItranslit.legacyEncode(ptsfilename)
fullname=getRecordingFilename(ptsfilename,config.usage.autorecord_path.value)
os.link("%s%s" % (config.usage.timeshift_path.value,timeshiftfile),"%s.ts" % fullname)
os.link("%s%s.meta" % (config.usage.timeshift_path.value,timeshiftfile),"%s.ts.meta" % fullname)
if os.path.exists("%s%s.eit" % (config.usage.timeshift_path.value,timeshiftfile)):
os.link("%s%s.eit" % (config.usage.timeshift_path.value,timeshiftfile),"%s.eit" % fullname)
# Add merge-tag to metafile
if mergelater:
metafile = open("%s.ts.meta" % fullname, "a")
metafile.write("%s\n" % metamergestring)
metafile.close()
# Create AP and SC Files when not merging
if not mergelater:
self.ptsCreateAPSCFiles(fullname+".ts")
except Exception, errormsg:
timeshift_saved = False
timeshift_saveerror1 = errormsg
# Hmpppf! Saving Timeshift via Hardlink-Method failed. Probably other device?
# Let's try to copy the file in background now! This might take a while ...
if not timeshift_saved:
try:
stat = os.statvfs(config.usage.autorecord_path.value)
freespace = stat.f_bfree / 1000 * stat.f_bsize / 1000
randomint = randint(1, 999)
if timeshiftfile is None:
# Get Filesize for Free Space Check
filesize = int(os.path.getsize("%s%s" % (config.usage.timeshift_path.value,savefilename)) / (1024*1024))
# Save Current Event by copying it to the other device
if filesize <= freespace:
os.link("%s%s" % (config.usage.timeshift_path.value,savefilename), "%s%s.%s.copy" % (config.usage.timeshift_path.value,savefilename,randomint))
copy_file = savefilename
metafile = open("%s.ts.meta" % fullname, "w")
metafile.write("%s\n%s\n%s\n%i\n%s" % (self.pts_curevent_servicerefname,self.pts_curevent_name.replace("\n", ""),self.pts_curevent_description.replace("\n", ""),int(self.pts_starttime),metamergestring))
metafile.close()
self.ptsCreateEITFile(fullname)
elif timeshiftfile.startswith("pts_livebuffer"):
# Get Filesize for Free Space Check
filesize = int(os.path.getsize("%s%s" % (config.usage.timeshift_path.value, timeshiftfile)) / (1024*1024))
# Save stored timeshift by copying it to the other device
if filesize <= freespace:
os.link("%s%s" % (config.usage.timeshift_path.value,timeshiftfile), "%s%s.%s.copy" % (config.usage.timeshift_path.value,timeshiftfile,randomint))
copyfile("%s%s.meta" % (config.usage.timeshift_path.value,timeshiftfile),"%s.ts.meta" % fullname)
if os.path.exists("%s%s.eit" % (config.usage.timeshift_path.value,timeshiftfile)):
copyfile("%s%s.eit" % (config.usage.timeshift_path.value,timeshiftfile),"%s.eit" % fullname)
copy_file = timeshiftfile
# Add merge-tag to metafile
if mergelater:
metafile = open("%s.ts.meta" % fullname, "a")
metafile.write("%s\n" % metamergestring)
metafile.close()
# Only copy file when enough disk-space available!
if filesize <= freespace:
timeshift_saved = True
copy_file = copy_file+"."+str(randomint)
# Get Event Info from meta file
if os.path.exists("%s.ts.meta" % fullname):
readmetafile = open("%s.ts.meta" % fullname, "r")
servicerefname = readmetafile.readline()[0:-1]
eventname = readmetafile.readline()[0:-1]
readmetafile.close()
else:
eventname = ""
JobManager.AddJob(CopyTimeshiftJob(self, "mv \"%s%s.copy\" \"%s.ts\"" % (config.usage.timeshift_path.value,copy_file,fullname), copy_file, fullname, eventname))
if not Screens.Standby.inTryQuitMainloop and not Screens.Standby.inStandby and not mergelater and self.save_timeshift_postaction != "standby":
Notifications.AddNotification(MessageBox, _("Saving timeshift as movie now. This might take a while!"), MessageBox.TYPE_INFO, timeout=30)
else:
timeshift_saved = False
timeshift_saveerror1 = ""
timeshift_saveerror2 = _("Not enough free Diskspace!\n\nFilesize: %sMB\nFree Space: %sMB\nPath: %s" % (filesize,freespace,config.usage.autorecord_path.value))
except Exception, errormsg:
timeshift_saved = False
timeshift_saveerror2 = errormsg
if not timeshift_saved:
config.timeshift.isRecording.value = False
self.save_timeshift_postaction = None
errormessage = str(timeshift_saveerror1) + "\n" + str(timeshift_saveerror2)
Notifications.AddNotification(MessageBox, _("Timeshift save failed!")+"\n\n%s" % errormessage, MessageBox.TYPE_ERROR, timeout=30)
# print 'SAVE COMPLETED'
def ptsAskUser(self, what):
if self.ptsAskUser_wait:
return
message_time = _("The buffer time for timeshift exceeds the specified limit in the settings.\nWhat do you want to do ?")
message_space = _("The available disk space for timeshift is less than specified in the settings.\nWhat do you want to do ?")
choice_restart = [(_("Delete the current timeshift buffer and restart timeshift"), "restarttimeshift"),
(_("Nothing, just leave this menu"), "no")]
choice_save = [(_("Stop timeshift and save timeshift buffer as movie and start recording of current event"), "savetimeshiftandrecord"),
(_("Stop timeshift and save timeshift buffer as movie"), "savetimeshift"),
(_("Stop timeshift"), "noSave"),
(_("Nothing, just leave this menu"), "no")]
if what == "time":
message = message_time
choice = choice_restart
elif what == "space":
message = message_space
choice = choice_restart
elif what == "time_and_save":
message = message_time
choice = choice_save
elif what == "space_and_save":
message = message_space
choice = choice_save
else:
return
self.ptsAskUser_wait = True
self.session.openWithCallback(self.ptsAskUserCallback, MessageBox, message, simple = True, list = choice, timeout=30)
def ptsAskUserCallback(self, answer):
self.ptsAskUser_wait = False
if answer:
if answer == "restarttimeshift":
self.ptsEventCleanTimerSTOP()
self.save_current_timeshift = False
self.stopTimeshiftAskUserCallback(True)
self.restartTimeshift()
elif answer == "noSave":
self.ptsEventCleanTimerSTOP()
self.save_current_timeshift = False
self.stopTimeshiftAskUserCallback(True)
elif answer == "savetimeshift" or answer == "savetimeshiftandrecord":
self.ptsEventCleanTimerSTOP()
self.save_current_timeshift = True
InfoBarTimeshift.saveTimeshiftActions(self, answer, self.stopTimeshiftAskUserCallback)
def stopTimeshiftAskUserCallback(self, answer):
ts = self.getTimeshift()
if answer and ts:
ts.stopTimeshift(True)
self.__seekableStatusChanged()
def ptsEventCleanTimerSTOP(self, justStop = False):
if justStop is False:
self.pts_eventcount = 0
if self.pts_cleanEvent_timer.isActive():
self.pts_cleanEvent_timer.stop()
print "[TIMESHIFT] - 'cleanEvent_timer' is stopped"
def ptsEventCleanTimerSTART(self):
if not self.pts_cleanEvent_timer.isActive() and int(config.timeshift.timeshiftCheckEvents.value):
self.pts_cleanEvent_timer.start(60000*int(config.timeshift.timeshiftCheckEvents.value), False)
print "[TIMESHIFT] - 'cleanEvent_timer' is starting"
def ptsEventCleanTimeshiftFolder(self):
print "[TIMESHIFT] - 'cleanEvent_timer' is running"
self.ptsCleanTimeshiftFolder(justZapped = False)
def ptsCleanTimeshiftFolder(self, justZapped = True):
# print '!!!!!!!!!!!!!!!!!!!!! ptsCleanTimeshiftFolder'
if self.ptsCheckTimeshiftPath() is False or self.session.screen["Standby"].boolean is True:
self.ptsEventCleanTimerSTOP()
return
freespace = int(config.timeshift.timeshiftCheckFreeSpace.value)
timeshiftEnabled = self.timeshiftEnabled()
isSeekable = self.isSeekable()
filecounter = 0
filesize = 0
lockedFiles = []
removeFiles = []
if timeshiftEnabled:
if isSeekable:
for i in range(self.pts_currplaying,self.pts_eventcount + 1):
lockedFiles.append(("pts_livebuffer_%s") % i)
else:
if not self.event_changed:
lockedFiles.append(("pts_livebuffer_%s") % self.pts_currplaying)
if freespace:
try:
stat = os.statvfs(config.usage.timeshift_path.value)
freespace = stat.f_bavail * stat.f_bsize / 1024 / 1024
except:
print "[TIMESHIFT] - error reading disk space - function 'checking for free space' can't used"
if freespace < int(config.timeshift.timeshiftCheckFreeSpace.value):
for i in range(1,self.pts_eventcount + 1):
removeFiles.append(("pts_livebuffer_%s") % i)
print "[TIMESHIFT] - less than %s MByte disk space available - try to the deleting all unused timeshift files" % config.timeshift.timeshiftCheckFreeSpace.value
elif self.pts_eventcount - config.timeshift.timeshiftMaxEvents.value >= 0:
if self.event_changed or len(lockedFiles) == 0:
for i in range(1,self.pts_eventcount - config.timeshift.timeshiftMaxEvents.value + 2):
removeFiles.append(("pts_livebuffer_%s") % i)
else:
for i in range(1,self.pts_eventcount - config.timeshift.timeshiftMaxEvents.value + 1):
removeFiles.append(("pts_livebuffer_%s") % i)
for filename in os.listdir(config.usage.timeshift_path.value):
if (os.path.exists("%s%s" % (config.usage.timeshift_path.value,filename))) and ((filename.startswith("timeshift.") or filename.startswith("pts_livebuffer_"))):
# print 'filename:',filename
statinfo = os.stat("%s%s" % (config.usage.timeshift_path.value,filename))
if (justZapped is True) and (filename.endswith(".del") is False) and (filename.endswith(".copy") is False):
# after zapping, remove all regular timeshift files
# print "[TimeShift] Erasing stranded timeshift file %s" % filename
filesize += os.path.getsize("%s%s" % (config.usage.timeshift_path.value,filename))
self.BgFileEraser.erase("%s%s" % (config.usage.timeshift_path.value,filename))
elif (filename.endswith(".eit") is False) and (filename.endswith(".meta") is False) and (filename.endswith(".sc") is False) and (filename.endswith(".del") is False) and (filename.endswith(".copy") is False):
# remove old files, but only complete sets of files (base file, .eit, .meta, .sc),
# and not while saveTimeshiftEventPopup is active (avoid deleting files about to be saved)
# and don't delete files from currently playing up to the last event
if not filename.startswith("timeshift."):
filecounter += 1
if ((statinfo.st_mtime < (time()-3600*config.timeshift.timeshiftMaxHours.value)) or any(filename in s for s in removeFiles)) and (self.saveTimeshiftEventPopupActive is False) and not any(filename in s for s in lockedFiles):
# print "[TimeShift] Erasing set of old timeshift files (base file, .eit, .meta, .sc) %s" % filename
filesize += os.path.getsize("%s%s" % (config.usage.timeshift_path.value,filename))
self.BgFileEraser.erase("%s%s" % (config.usage.timeshift_path.value,filename))
if os.path.exists("%s%s.eit" % (config.usage.timeshift_path.value,filename)):
filesize += os.path.getsize("%s%s.eit" % (config.usage.timeshift_path.value,filename))
self.BgFileEraser.erase("%s%s.eit" % (config.usage.timeshift_path.value,filename))
if os.path.exists("%s%s.meta" % (config.usage.timeshift_path.value,filename)):
filesize += os.path.getsize("%s%s.meta" % (config.usage.timeshift_path.value,filename))
self.BgFileEraser.erase("%s%s.meta" % (config.usage.timeshift_path.value,filename))
if os.path.exists("%s%s.sc" % (config.usage.timeshift_path.value,filename)):
filesize += os.path.getsize("%s%s.sc" % (config.usage.timeshift_path.value,filename))
self.BgFileEraser.erase("%s%s.sc" % (config.usage.timeshift_path.value,filename))
if not filename.startswith("timeshift."):
filecounter -= 1
else:
# remove anything still left over another 24h later
if statinfo.st_mtime < (time()-3600*(24+config.timeshift.timeshiftMaxHours.value)):
# print "[TimeShift] Erasing very old timeshift file %s" % filename
if filename.endswith(".del") is True:
filesize += os.path.getsize("%s%s" % (config.usage.timeshift_path.value,filename))
try:
os.rename("%s%s" % (config.usage.timeshift_path.value,filename), "%s%s.del_again" % (config.usage.timeshift_path.value,filename))
self.BgFileEraser.erase("%s%s.del_again" % (config.usage.timeshift_path.value,filename))
except:
print "[TimeShift] - can't rename %s%s." % (config.usage.timeshift_path.value,filename)
self.BgFileEraser.erase("%s%s" % (config.usage.timeshift_path.value,filename))
else:
filesize += os.path.getsize("%s%s" % (config.usage.timeshift_path.value,filename))
self.BgFileEraser.erase("%s%s" % (config.usage.timeshift_path.value,filename))
if filecounter == 0:
self.ptsEventCleanTimerSTOP()
else:
if timeshiftEnabled and not isSeekable:
if freespace + (filesize / 1024 / 1024) < int(config.timeshift.timeshiftCheckFreeSpace.value):
self.ptsAskUser("space")
elif time() - self.pts_starttime > 3600 * config.timeshift.timeshiftMaxHours.value:
self.ptsAskUser("time")
elif isSeekable:
if freespace + (filesize / 1024 / 1024) < int(config.timeshift.timeshiftCheckFreeSpace.value):
self.ptsAskUser("space_and_save")
elif time() - self.pts_starttime > 3600 * config.timeshift.timeshiftMaxHours.value:
self.ptsAskUser("time_and_save")
if self.checkEvents_value != int(config.timeshift.timeshiftCheckEvents.value):
if self.pts_cleanEvent_timer.isActive():
print "[TIMESHIFT] - 'cleanEvent_timer' was changed"
self.pts_cleanEvent_timer.stop()
if int(config.timeshift.timeshiftCheckEvents.value):
self.ptsEventCleanTimerSTART()
else:
print "[TIMESHIFT] - 'cleanEvent_timer' is deactivated"
self.checkEvents_value = int(config.timeshift.timeshiftCheckEvents.value)
def ptsGetEventInfo(self):
event = None
try:
serviceref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
serviceHandler = eServiceCenter.getInstance()
info = serviceHandler.info(serviceref)
self.pts_curevent_servicerefname = serviceref.toString()
self.pts_curevent_station = info.getName(serviceref)
service = self.session.nav.getCurrentService()
info = service and service.info()
event = info and info.getEvent(0)
except Exception, errormsg:
Notifications.AddNotification(MessageBox, _("Getting Event Info failed!")+"\n\n%s" % errormsg, MessageBox.TYPE_ERROR, timeout=10)
if event is not None:
curEvent = parseEvent(event)
self.pts_curevent_begin = int(curEvent[0])
self.pts_curevent_end = int(curEvent[1])
self.pts_curevent_name = curEvent[2]
self.pts_curevent_description = curEvent[3]
self.pts_curevent_eventid = curEvent[4]
def ptsFrontpanelActions(self, action=None):
if self.session.nav.RecordTimer.isRecording() or SystemInfo.get("NumFrontpanelLEDs", 0) == 0:
return
if action == "start":
if os.path.exists("/proc/stb/fp/led_set_pattern"):
f = open("/proc/stb/fp/led_set_pattern", "w")
f.write("0xa7fccf7a")
f.close()
elif os.path.exists("/proc/stb/fp/led0_pattern"):
f = open("/proc/stb/fp/led0_pattern", "w")
f.write("0x55555555")
f.close()
if os.path.exists("/proc/stb/fp/led_pattern_speed"):
f = open("/proc/stb/fp/led_pattern_speed", "w")
f.write("20")
f.close()
elif os.path.exists("/proc/stb/fp/led_set_speed"):
f = open("/proc/stb/fp/led_set_speed", "w")
f.write("20")
f.close()
elif action == "stop":
if os.path.exists("/proc/stb/fp/led_set_pattern"):
f = open("/proc/stb/fp/led_set_pattern", "w")
f.write("0")
f.close()
elif os.path.exists("/proc/stb/fp/led0_pattern"):
f = open("/proc/stb/fp/led0_pattern", "w")
f.write("0")
f.close()
def ptsCreateHardlink(self):
# print 'ptsCreateHardlink'
for filename in os.listdir(config.usage.timeshift_path.value):
# if filename.startswith("timeshift") and not os.path.splitext(filename)[1]:
if filename.startswith("timeshift") and not filename.endswith(".sc") and not filename.endswith(".del") and not filename.endswith(".copy"):
if os.path.exists("%spts_livebuffer_%s.eit" % (config.usage.timeshift_path.value,self.pts_eventcount)):
self.BgFileEraser.erase("%spts_livebuffer_%s.eit" % (config.usage.timeshift_path.value,self.pts_eventcount))
if os.path.exists("%spts_livebuffer_%s.meta" % (config.usage.timeshift_path.value,self.pts_eventcount)):
self.BgFileEraser.erase("%spts_livebuffer_%s.meta" % (config.usage.timeshift_path.value,self.pts_eventcount))
if os.path.exists("%spts_livebuffer_%s" % (config.usage.timeshift_path.value,self.pts_eventcount)):
self.BgFileEraser.erase("%spts_livebuffer_%s" % (config.usage.timeshift_path.value,self.pts_eventcount))
if os.path.exists("%spts_livebuffer_%s.sc" % (config.usage.timeshift_path.value,self.pts_eventcount)):
self.BgFileEraser.erase("%spts_livebuffer_%s.sc" % (config.usage.timeshift_path.value,self.pts_eventcount))
try:
# Create link to pts_livebuffer file
os.link("%s%s" % (config.usage.timeshift_path.value,filename), "%spts_livebuffer_%s" % (config.usage.timeshift_path.value,self.pts_eventcount))
os.link("%s%s.sc" % (config.usage.timeshift_path.value,filename), "%spts_livebuffer_%s.sc" % (config.usage.timeshift_path.value,self.pts_eventcount))
# Create a Meta File
metafile = open("%spts_livebuffer_%s.meta" % (config.usage.timeshift_path.value,self.pts_eventcount), "w")
metafile.write("%s\n%s\n%s\n%i\n" % (self.pts_curevent_servicerefname,self.pts_curevent_name.replace("\n", ""),self.pts_curevent_description.replace("\n", ""),int(self.pts_starttime)))
metafile.close()
except Exception, errormsg:
Notifications.AddNotification(MessageBox, _("Creating Hardlink to Timeshift file failed!")+"\n"+_("The Filesystem on your Timeshift-Device does not support hardlinks.\nMake sure it is formatted in EXT2 or EXT3!")+"\n\n%s" % errormsg, MessageBox.TYPE_ERROR, timeout=30)
# Create EIT File
self.ptsCreateEITFile("%spts_livebuffer_%s" % (config.usage.timeshift_path.value,self.pts_eventcount))
# Autorecord
if config.timeshift.autorecord.value:
try:
fullname = getRecordingFilename("%s - %s - %s" % (strftime("%Y%m%d %H%M",localtime(self.pts_starttime)),self.pts_curevent_station,self.pts_curevent_name),config.usage.autorecord_path.value)
os.link("%s%s" % (config.usage.timeshift_path.value,filename), "%s.ts" % fullname)
# Create a Meta File
metafile = open("%s.ts.meta" % fullname, "w")
metafile.write("%s\n%s\n%s\n%i\nautosaved\n" % (self.pts_curevent_servicerefname,self.pts_curevent_name.replace("\n", ""),self.pts_curevent_description.replace("\n", ""),int(self.pts_starttime)))
metafile.close()
except Exception, errormsg:
print "[Timeshift] %s" % errormsg
def ptsRecordCurrentEvent(self):
recording = RecordTimerEntry(ServiceReference(self.session.nav.getCurrentlyPlayingServiceOrGroup()), time(), self.pts_curevent_end, self.pts_curevent_name, self.pts_curevent_description, self.pts_curevent_eventid, dirname = config.usage.autorecord_path.value)
recording.dontSave = True
self.session.nav.RecordTimer.record(recording)
self.recording.append(recording)
def ptsMergeRecords(self):
if self.session.nav.RecordTimer.isRecording():
self.pts_mergeRecords_timer.start(120000, True)
return
ptsmergeSRC = ""
ptsmergeDEST = ""
ptsmergeeventname = ""
ptsgetnextfile = False
ptsfilemerged = False
filelist = os.listdir(config.usage.autorecord_path.value)
if filelist is not None:
filelist.sort()
for filename in filelist:
if filename.endswith(".meta"):
# Get Event Info from meta file
readmetafile = open("%s%s" % (config.usage.autorecord_path.value,filename), "r")
servicerefname = readmetafile.readline()[0:-1]
eventname = readmetafile.readline()[0:-1]
eventtitle = readmetafile.readline()[0:-1]
eventtime = readmetafile.readline()[0:-1]
eventtag = readmetafile.readline()[0:-1]
readmetafile.close()
if ptsgetnextfile:
ptsgetnextfile = False
ptsmergeSRC = filename[0:-5]
if ASCIItranslit.legacyEncode(eventname) == ASCIItranslit.legacyEncode(ptsmergeeventname):
# Copy EIT File
if fileExists("%s%s.eit" % (config.usage.autorecord_path.value, ptsmergeSRC[0:-3])):
copyfile("%s%s.eit" % (config.usage.autorecord_path.value, ptsmergeSRC[0:-3]),"%s%s.eit" % (config.usage.autorecord_path.value, ptsmergeDEST[0:-3]))
# Delete AP and SC Files
if os.path.exists("%s%s.ap" % (config.usage.autorecord_path.value, ptsmergeDEST)):
self.BgFileEraser.erase("%s%s.ap" % (config.usage.autorecord_path.value, ptsmergeDEST))
if os.path.exists("%s%s.sc" % (config.usage.autorecord_path.value, ptsmergeDEST)):
self.BgFileEraser.erase("%s%s.sc" % (config.usage.autorecord_path.value, ptsmergeDEST))
# Add Merge Job to JobManager
JobManager.AddJob(MergeTimeshiftJob(self, "cat \"%s%s\" >> \"%s%s\"" % (config.usage.autorecord_path.value,ptsmergeSRC,config.usage.autorecord_path.value,ptsmergeDEST), ptsmergeSRC, ptsmergeDEST, eventname))
config.timeshift.isRecording.value = True
ptsfilemerged = True
else:
ptsgetnextfile = True
if eventtag == "pts_merge" and not ptsgetnextfile:
ptsgetnextfile = True
ptsmergeDEST = filename[0:-5]
ptsmergeeventname = eventname
ptsfilemerged = False
# If still recording or transfering, try again later ...
if fileExists("%s%s" % (config.usage.autorecord_path.value,ptsmergeDEST)):
statinfo = os.stat("%s%s" % (config.usage.autorecord_path.value,ptsmergeDEST))
if statinfo.st_mtime > (time()-10.0):
self.pts_mergeRecords_timer.start(120000, True)
return
# Rewrite Meta File to get rid of pts_merge tag
metafile = open("%s%s.meta" % (config.usage.autorecord_path.value,ptsmergeDEST), "w")
metafile.write("%s\n%s\n%s\n%i\n" % (servicerefname,eventname.replace("\n", ""),eventtitle.replace("\n", ""),int(eventtime)))
metafile.close()
# Merging failed :(
if not ptsfilemerged and ptsgetnextfile:
Notifications.AddNotification(MessageBox,_("[Timeshift] Merging records failed!"), MessageBox.TYPE_ERROR, timeout=30)
def ptsCreateAPSCFiles(self, filename):
if fileExists(filename, 'r'):
if fileExists(filename+".meta", 'r'):
# Get Event Info from meta file
readmetafile = open(filename+".meta", "r")
servicerefname = readmetafile.readline()[0:-1]
eventname = readmetafile.readline()[0:-1]
readmetafile.close()
else:
eventname = ""
JobManager.AddJob(CreateAPSCFilesJob(self, "/usr/lib/enigma2/python/Components/createapscfiles \"%s\" > /dev/null" % filename, eventname))
else:
self.ptsSaveTimeshiftFinished()
def ptsCreateEITFile(self, filename):
if self.pts_curevent_eventid is not None:
try:
serviceref = ServiceReference(self.session.nav.getCurrentlyPlayingServiceOrGroup()).ref.toString()
eEPGCache.getinstance().saveEventToFile(filename+".eit", serviceref, self.pts_curevent_eventid, -1, -1)
except Exception, errormsg:
print "[Timeshift] %s" % errormsg
def ptsCopyFilefinished(self, srcfile, destfile):
# Erase Source File
if fileExists(srcfile):
self.BgFileEraser.erase(srcfile)
# Restart Merge Timer
if self.pts_mergeRecords_timer.isActive():
self.pts_mergeRecords_timer.stop()
self.pts_mergeRecords_timer.start(15000, True)
else:
# Create AP and SC Files
self.ptsCreateAPSCFiles(destfile)
def ptsMergeFilefinished(self, srcfile, destfile):
if self.session.nav.RecordTimer.isRecording() or len(JobManager.getPendingJobs()) >= 1:
# Rename files and delete them later ...
self.pts_mergeCleanUp_timer.start(120000, True)
os.system("echo \"\" > \"%s.pts.del\"" % (srcfile[0:-3]))
else:
# Delete Instant Record permanently now ... R.I.P.
self.BgFileEraser.erase("%s" % srcfile)
self.BgFileEraser.erase("%s.ap" % srcfile)
self.BgFileEraser.erase("%s.sc" % srcfile)
self.BgFileEraser.erase("%s.meta" % srcfile)
self.BgFileEraser.erase("%s.cuts" % srcfile)
self.BgFileEraser.erase("%s.eit" % (srcfile[0:-3]))
# Create AP and SC Files
self.ptsCreateAPSCFiles(destfile)
# Run Merge-Process one more time to check if there are more records to merge
self.pts_mergeRecords_timer.start(10000, True)
def ptsSaveTimeshiftFinished(self):
if not self.pts_mergeCleanUp_timer.isActive():
self.ptsFrontpanelActions("stop")
config.timeshift.isRecording.value = False
if Screens.Standby.inTryQuitMainloop:
self.pts_QuitMainloop_timer.start(30000, True)
else:
Notifications.AddNotification(MessageBox, _("Timeshift saved to your harddisk!"), MessageBox.TYPE_INFO, timeout=30)
def ptsMergePostCleanUp(self):
if self.session.nav.RecordTimer.isRecording() or len(JobManager.getPendingJobs()) >= 1:
config.timeshift.isRecording.value = True
self.pts_mergeCleanUp_timer.start(120000, True)
return
self.ptsFrontpanelActions("stop")
config.timeshift.isRecording.value = False
filelist = os.listdir(config.usage.autorecord_path.value)
for filename in filelist:
if filename.endswith(".pts.del"):
srcfile = config.usage.autorecord_path.value + "/" + filename[0:-8] + ".ts"
self.BgFileEraser.erase("%s" % srcfile)
self.BgFileEraser.erase("%s.ap" % srcfile)
self.BgFileEraser.erase("%s.sc" % srcfile)
self.BgFileEraser.erase("%s.meta" % srcfile)
self.BgFileEraser.erase("%s.cuts" % srcfile)
self.BgFileEraser.erase("%s.eit" % (srcfile[0:-3]))
self.BgFileEraser.erase("%s.pts.del" % (srcfile[0:-3]))
# Restart QuitMainloop Timer to give BgFileEraser enough time
if Screens.Standby.inTryQuitMainloop and self.pts_QuitMainloop_timer.isActive():
self.pts_QuitMainloop_timer.start(60000, True)
def ptsTryQuitMainloop(self):
if Screens.Standby.inTryQuitMainloop and (len(JobManager.getPendingJobs()) >= 1 or self.pts_mergeCleanUp_timer.isActive()):
self.pts_QuitMainloop_timer.start(60000, True)
return
if Screens.Standby.inTryQuitMainloop and self.session.ptsmainloopvalue:
self.session.dialog_stack = []
self.session.summary_stack = [None]
self.session.open(Screens.Standby.TryQuitMainloop, self.session.ptsmainloopvalue)
def ptsGetSeekInfo(self):
s = self.session.nav.getCurrentService()
return s and s.seek()
def ptsGetPosition(self):
seek = self.ptsGetSeekInfo()
if seek is None:
return None
pos = seek.getPlayPosition()
if pos[0]:
return 0
return pos[1]
def ptsGetLength(self):
seek = self.ptsGetSeekInfo()
if seek is None:
return None
length = seek.getLength()
if length[0]:
return 0
return length[1]
def ptsGetTimeshiftStatus(self):
if (self.isSeekable() and self.timeshiftEnabled() or self.save_current_timeshift) and config.usage.check_timeshift.value:
return True
else:
return False
def ptsSeekPointerOK(self):
if self.pvrStateDialog.has_key("PTSSeekPointer") and self.timeshiftEnabled() and self.isSeekable():
if not self.pvrStateDialog.shown:
if self.seekstate != self.SEEK_STATE_PLAY or self.seekstate == self.SEEK_STATE_PAUSE:
self.setSeekState(self.SEEK_STATE_PLAY)
self.doShow()
return
length = self.ptsGetLength()
position = self.ptsGetPosition()
if length is None or position is None:
return
cur_pos = self.pvrStateDialog["PTSSeekPointer"].position
jumptox = int(cur_pos[0]) - (int(self.pvrStateDialog["PTSSeekBack"].instance.position().x())+8)
jumptoperc = round((jumptox / float(self.pvrStateDialog["PTSSeekBack"].instance.size().width())) * 100, 0)
jumptotime = int((length / 100) * jumptoperc)
jumptodiff = position - jumptotime
self.doSeekRelative(-jumptodiff)
else:
return
def ptsSeekPointerLeft(self):
if self.pvrStateDialog.has_key("PTSSeekPointer") and self.pvrStateDialog.shown and self.timeshiftEnabled() and self.isSeekable():
self.ptsMoveSeekPointer(direction="left")
else:
return
def ptsSeekPointerRight(self):
if self.pvrStateDialog.has_key("PTSSeekPointer") and self.pvrStateDialog.shown and self.timeshiftEnabled() and self.isSeekable():
self.ptsMoveSeekPointer(direction="right")
else:
return
def ptsSeekPointerReset(self):
if self.pvrStateDialog.has_key("PTSSeekPointer") and self.timeshiftEnabled():
self.pvrStateDialog["PTSSeekPointer"].setPosition(int(self.pvrStateDialog["PTSSeekBack"].instance.position().x())+8,self.pvrStateDialog["PTSSeekPointer"].position[1])
def ptsSeekPointerSetCurrentPos(self):
if not self.pvrStateDialog.has_key("PTSSeekPointer") or not self.timeshiftEnabled() or not self.isSeekable():
return
position = self.ptsGetPosition()
length = self.ptsGetLength()
if length >= 1:
tpixels = int((float(int((position*100)/length))/100)*self.pvrStateDialog["PTSSeekBack"].instance.size().width())
self.pvrStateDialog["PTSSeekPointer"].setPosition(int(self.pvrStateDialog["PTSSeekBack"].instance.position().x())+8+tpixels, self.pvrStateDialog["PTSSeekPointer"].position[1])
def ptsMoveSeekPointer(self, direction=None):
if direction is None or not self.pvrStateDialog.has_key("PTSSeekPointer"):
return
isvalidjump = False
cur_pos = self.pvrStateDialog["PTSSeekPointer"].position
self.doShow()
if direction == "left":
minmaxval = int(self.pvrStateDialog["PTSSeekBack"].instance.position().x())+8
movepixels = -15
if cur_pos[0]+movepixels > minmaxval:
isvalidjump = True
elif direction == "right":
minmaxval = int(self.pvrStateDialog["PTSSeekBack"].instance.size().width()*0.96)
movepixels = 15
if cur_pos[0]+movepixels < minmaxval:
isvalidjump = True
else:
return 0
if isvalidjump:
self.pvrStateDialog["PTSSeekPointer"].setPosition(cur_pos[0]+movepixels,cur_pos[1])
else:
self.pvrStateDialog["PTSSeekPointer"].setPosition(minmaxval,cur_pos[1])
def ptsCheckFileChanged(self):
# print '!!!!! ptsCheckFileChanged'
# print ('[TIMESHIFT] - pts_currplaying %s, pts_nextplaying %s, pts_eventcount %s, pts_firstplayable %s' % (self.pts_currplaying, self.pts_nextplaying, self.pts_eventcount, self.pts_firstplayable))
# print 'self.pts_file_changed',self.pts_file_changed
if self.pts_file_changed or not self.timeshiftEnabled():
self.pts_CheckFileChanged_timer.stop()
if not self.pts_currplaying == self.pts_eventcount:
self.pts_SeekBack_timer.start(1000, True)
else:
self.doSeek(3600 * 24 * 90000)
def ptsTimeshiftFileChanged(self):
# print '!!!!! ptsTimeshiftFileChanged'
# print ('[TIMESHIFT] - pts_currplaying %s, pts_nextplaying %s, pts_eventcount %s, pts_firstplayable %s' % (self.pts_currplaying, self.pts_nextplaying, self.pts_eventcount, self.pts_firstplayable))
self.pts_file_changed = True
# Reset Seek Pointer
self.ptsSeekPointerReset()
# print 'self.pts_switchtolive',self.pts_switchtolive
if self.pts_switchtolive:
self.pts_switchtolive = False
self.pts_nextplaying = 0
self.pts_currplaying = self.pts_eventcount
return
if self.pts_nextplaying:
self.pts_currplaying = self.pts_nextplaying
self.pts_nextplaying = self.pts_currplaying + 1
# Get next pts file ...
# print ("!!! %spts_livebuffer_%s" % (config.usage.timeshift_path.value,self.pts_nextplaying))
if fileExists("%spts_livebuffer_%s" % (config.usage.timeshift_path.value,self.pts_nextplaying), 'r'):
# print '!!!!! TEST1'
self.ptsSetNextPlaybackFile("pts_livebuffer_%s" % self.pts_nextplaying)
self.pts_switchtolive = False
else:
self.ptsSetNextPlaybackFile("")
self.pts_switchtolive = True
# print ('[TIMESHIFT] - pts_currplaying %s, pts_nextplaying %s, pts_eventcount %s, pts_firstplayable %s' % (self.pts_currplaying, self.pts_nextplaying, self.pts_eventcount, self.pts_firstplayable))
def ptsSetNextPlaybackFile(self, nexttsfile):
# print '!!!!! ptsSetNextPlaybackFile'
ts = self.getTimeshift()
if ts is None:
return
# print ("!!! SET NextPlaybackFile%s%s" % (config.usage.timeshift_path.value,nexttsfile))
ts.setNextPlaybackFile("%s%s" % (config.usage.timeshift_path.value,nexttsfile))
def ptsSeekBackTimer(self):
# print '!!!!! ptsSeekBackTimer RUN'
self.doSeek(-90000*10) # seek ~10s before end
self.setSeekState(self.SEEK_STATE_PAUSE)
self.pts_StartSeekBackTimer.start(1000, True)
def ptsStartSeekBackTimer(self):
# print '!!!!! ptsStartSeekBackTimer RUN'
if self.pts_lastseekspeed == 0:
self.setSeekState(self.makeStateBackward(int(config.seek.enter_backward.value)))
else:
self.setSeekState(self.makeStateBackward(int(-self.pts_lastseekspeed)))
def ptsCheckTimeshiftPath(self):
if fileExists(config.usage.timeshift_path.value, 'w'):
return True
else:
# Notifications.AddNotification(MessageBox, _("Could not activate Autorecord-Timeshift!\nTimeshift-Path does not exist"), MessageBox.TYPE_ERROR, timeout=15)
if self.pts_delay_timer.isActive():
self.pts_delay_timer.stop()
if self.pts_cleanUp_timer.isActive():
self.pts_cleanUp_timer.stop()
return False
def ptsTimerEntryStateChange(self, timer):
# print 'ptsTimerEntryStateChange'
if not config.timeshift.stopwhilerecording.value:
return
self.pts_record_running = self.session.nav.RecordTimer.isRecording()
# Abort here when box is in standby mode
if self.session.screen["Standby"].boolean is True:
return
# Stop Timeshift when Record started ...
if timer.state == TimerEntry.StateRunning and self.timeshiftEnabled() and self.pts_record_running:
if self.seekstate != self.SEEK_STATE_PLAY:
self.setSeekState(self.SEEK_STATE_PLAY)
if self.isSeekable():
Notifications.AddNotification(MessageBox,_("Record started! Stopping timeshift now ..."), MessageBox.TYPE_INFO, timeout=30)
self.switchToLive = False
self.stopTimeshiftcheckTimeshiftRunningCallback(True)
# Restart Timeshift when all records stopped
if timer.state == TimerEntry.StateEnded and not self.timeshiftEnabled() and not self.pts_record_running:
self.autostartAutorecordTimeshift()
# Restart Merge-Timer when all records stopped
if timer.state == TimerEntry.StateEnded and self.pts_mergeRecords_timer.isActive():
self.pts_mergeRecords_timer.stop()
self.pts_mergeRecords_timer.start(15000, True)
# Restart FrontPanel LED when still copying or merging files
# ToDo: Only do this on PTS Events and not events from other jobs
if timer.state == TimerEntry.StateEnded and (len(JobManager.getPendingJobs()) >= 1 or self.pts_mergeRecords_timer.isActive()):
self.ptsFrontpanelActions("start")
config.timeshift.isRecording.value = True
def ptsLiveTVStatus(self):
service = self.session.nav.getCurrentService()
info = service and service.info()
sTSID = info and info.getInfo(iServiceInformation.sTSID) or -1
if sTSID is None or sTSID == -1:
return False
else:
return True
|
gpl-2.0
|
mhei/openwrt
|
scripts/json_overview_image_info.py
|
4
|
1044
|
#!/usr/bin/env python3
import json
from pathlib import Path
from os import getenv
from sys import argv
if len(argv) != 2:
print("JSON info files script requires ouput file as argument")
exit(1)
output_path = Path(argv[1])
assert getenv("WORK_DIR"), "$WORK_DIR required"
work_dir = Path(getenv("WORK_DIR"))
output = {}
for json_file in work_dir.glob("*.json"):
image_info = json.loads(json_file.read_text())
if not output:
output.update(image_info)
else:
# get first (and only) profile in json file
device_id = next(iter(image_info["profiles"].keys()))
if device_id not in output["profiles"]:
output["profiles"].update(image_info["profiles"])
else:
output["profiles"][device_id]["images"].append(
image_info["profiles"][device_id]["images"][0]
)
if output:
output_path.write_text(json.dumps(output, sort_keys=True, separators=(",", ":")))
else:
print("JSON info file script could not find any JSON files for target")
|
gpl-2.0
|
proxysh/Safejumper-for-Mac
|
buildlinux/env32/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/langthaimodel.py
|
2930
|
11275
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# The following result for thai was collected from a limited sample (1M).
# Character Mapping Table:
TIS620CharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253,182,106,107,100,183,184,185,101, 94,186,187,108,109,110,111, # 40
188,189,190, 89, 95,112,113,191,192,193,194,253,253,253,253,253, # 50
253, 64, 72, 73,114, 74,115,116,102, 81,201,117, 90,103, 78, 82, # 60
96,202, 91, 79, 84,104,105, 97, 98, 92,203,253,253,253,253,253, # 70
209,210,211,212,213, 88,214,215,216,217,218,219,220,118,221,222,
223,224, 99, 85, 83,225,226,227,228,229,230,231,232,233,234,235,
236, 5, 30,237, 24,238, 75, 8, 26, 52, 34, 51,119, 47, 58, 57,
49, 53, 55, 43, 20, 19, 44, 14, 48, 3, 17, 25, 39, 62, 31, 54,
45, 9, 16, 2, 61, 15,239, 12, 42, 46, 18, 21, 76, 4, 66, 63,
22, 10, 1, 36, 23, 13, 40, 27, 32, 35, 86,240,241,242,243,244,
11, 28, 41, 29, 33,245, 50, 37, 6, 7, 67, 77, 38, 93,246,247,
68, 56, 59, 65, 69, 60, 70, 80, 71, 87,248,249,250,251,252,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 92.6386%
# first 1024 sequences:7.3177%
# rest sequences: 1.0230%
# negative sequences: 0.0436%
ThaiLangModel = (
0,1,3,3,3,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,0,0,3,3,3,0,3,3,3,3,
0,3,3,0,0,0,1,3,0,3,3,2,3,3,0,1,2,3,3,3,3,0,2,0,2,0,0,3,2,1,2,2,
3,0,3,3,2,3,0,0,3,3,0,3,3,0,3,3,3,3,3,3,3,3,3,0,3,2,3,0,2,2,2,3,
0,2,3,0,0,0,0,1,0,1,2,3,1,1,3,2,2,0,1,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,3,3,2,3,2,3,3,2,2,2,
3,1,2,3,0,3,3,2,2,1,2,3,3,1,2,0,1,3,0,1,0,0,1,0,0,0,0,0,0,0,1,1,
3,3,2,2,3,3,3,3,1,2,3,3,3,3,3,2,2,2,2,3,3,2,2,3,3,2,2,3,2,3,2,2,
3,3,1,2,3,1,2,2,3,3,1,0,2,1,0,0,3,1,2,1,0,0,1,0,0,0,0,0,0,1,0,1,
3,3,3,3,3,3,2,2,3,3,3,3,2,3,2,2,3,3,2,2,3,2,2,2,2,1,1,3,1,2,1,1,
3,2,1,0,2,1,0,1,0,1,1,0,1,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,2,2,3,2,3,3,2,3,1,1,2,3,2,2,2,3,2,2,2,2,2,1,2,1,
2,2,1,1,3,3,2,1,0,1,2,2,0,1,3,0,0,0,1,1,0,0,0,0,0,2,3,0,0,2,1,1,
3,3,2,3,3,2,0,0,3,3,0,3,3,0,2,2,3,1,2,2,1,1,1,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,0,0,1,0,0,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,0,0,3,3,0,2,3,0,2,1,2,2,2,2,1,2,0,0,2,2,2,0,2,2,1,1,
0,2,1,0,2,0,0,2,0,1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,3,2,3,2,0,2,2,1,3,2,1,3,2,1,2,3,2,2,3,0,2,3,2,2,1,2,2,2,2,
1,2,2,0,0,0,0,2,0,1,2,0,1,1,1,0,1,0,3,1,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,2,3,3,2,3,2,2,2,3,2,2,3,2,2,1,2,3,2,2,3,1,3,2,2,2,3,2,2,2,3,
3,2,1,3,0,1,1,1,0,2,1,1,1,1,1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,2,0,0,
1,0,0,3,0,3,3,3,3,3,0,0,3,0,2,2,3,3,3,3,3,0,0,0,1,1,3,0,0,0,0,2,
0,0,1,0,0,0,0,0,0,0,2,3,0,0,0,3,0,2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,3,3,3,3,0,0,2,3,0,0,3,0,3,3,2,3,3,3,3,3,0,0,3,3,3,0,0,0,3,3,
0,0,3,0,0,0,0,2,0,0,2,1,1,3,0,0,1,0,0,2,3,0,1,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,3,3,3,3,3,3,3,1,2,1,3,3,2,2,1,2,2,2,3,1,1,2,0,2,1,2,1,
2,2,1,0,0,0,1,1,0,1,0,1,1,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,
3,0,2,1,2,3,3,3,0,2,0,2,2,0,2,1,3,2,2,1,2,1,0,0,2,2,1,0,2,1,2,2,
0,1,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,3,1,1,3,0,2,3,1,1,3,2,1,1,2,0,2,2,3,2,1,1,1,1,1,2,
3,0,0,1,3,1,2,1,2,0,3,0,0,0,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,
3,3,1,1,3,2,3,3,3,1,3,2,1,3,2,1,3,2,2,2,2,1,3,3,1,2,1,3,1,2,3,0,
2,1,1,3,2,2,2,1,2,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,
3,3,2,3,2,3,3,2,3,2,3,2,3,3,2,1,0,3,2,2,2,1,2,2,2,1,2,2,1,2,1,1,
2,2,2,3,0,1,3,1,1,1,1,0,1,1,0,2,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,3,2,2,1,1,3,2,3,2,3,2,0,3,2,2,1,2,0,2,2,2,1,2,2,2,2,1,
3,2,1,2,2,1,0,2,0,1,0,0,1,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,3,1,2,3,3,2,2,3,0,1,1,2,0,3,3,2,2,3,0,1,1,3,0,0,0,0,
3,1,0,3,3,0,2,0,2,1,0,0,3,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,2,3,2,3,3,0,1,3,1,1,2,1,2,1,1,3,1,1,0,2,3,1,1,1,1,1,1,1,1,
3,1,1,2,2,2,2,1,1,1,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,1,1,2,1,3,3,2,3,2,2,3,2,2,3,1,2,2,1,2,0,3,2,1,2,2,2,2,2,1,
3,2,1,2,2,2,1,1,1,1,0,0,1,1,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,1,3,3,0,2,1,0,3,2,0,0,3,1,0,1,1,0,1,0,0,0,0,0,1,
1,0,0,1,0,3,2,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,2,2,3,0,0,1,3,0,3,2,0,3,2,2,3,3,3,3,3,1,0,2,2,2,0,2,2,1,2,
0,2,3,0,0,0,0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,0,2,3,1,3,3,2,3,3,0,3,3,0,3,2,2,3,2,3,3,3,0,0,2,2,3,0,1,1,1,3,
0,0,3,0,0,0,2,2,0,1,3,0,1,2,2,2,3,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,
3,2,3,3,2,0,3,3,2,2,3,1,3,2,1,3,2,0,1,2,2,0,2,3,2,1,0,3,0,0,0,0,
3,0,0,2,3,1,3,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,3,2,2,2,1,2,0,1,3,1,1,3,1,3,0,0,2,1,1,1,1,2,1,1,1,0,2,1,0,1,
1,2,0,0,0,3,1,1,0,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,3,1,0,0,0,1,0,
3,3,3,3,2,2,2,2,2,1,3,1,1,1,2,0,1,1,2,1,2,1,3,2,0,0,3,1,1,1,1,1,
3,1,0,2,3,0,0,0,3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,2,3,0,3,3,0,2,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,2,3,1,3,0,0,1,2,0,0,2,0,3,3,2,3,3,3,2,3,0,0,2,2,2,0,0,0,2,2,
0,0,1,0,0,0,0,3,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,3,0,2,0,0,0,0,0,0,0,0,0,0,1,2,3,1,3,3,0,0,1,0,3,0,0,0,0,0,
0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,2,3,1,2,3,1,0,3,0,2,2,1,0,2,1,1,2,0,1,0,0,1,1,1,1,0,1,0,0,
1,0,0,0,0,1,1,0,3,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,0,1,1,1,3,1,2,2,2,2,2,2,1,1,1,1,0,3,1,0,1,3,1,1,1,1,
1,1,0,2,0,1,3,1,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,
3,0,2,2,1,3,3,2,3,3,0,1,1,0,2,2,1,2,1,3,3,1,0,0,3,2,0,0,0,0,2,1,
0,1,0,0,0,0,1,2,0,1,1,3,1,1,2,2,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,3,0,0,1,0,0,0,3,0,0,3,0,3,1,0,1,1,1,3,2,0,0,0,3,0,0,0,0,2,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
3,3,1,3,2,1,3,3,1,2,2,0,1,2,1,0,1,2,0,0,0,0,0,3,0,0,0,3,0,0,0,0,
3,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,2,0,3,3,3,2,2,0,1,1,0,1,3,0,0,0,2,2,0,0,0,0,3,1,0,1,0,0,0,
0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,2,3,1,2,0,0,2,1,0,3,1,0,1,2,0,1,1,1,1,3,0,0,3,1,1,0,2,2,1,1,
0,2,0,0,0,0,0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,3,1,2,0,0,2,2,0,1,2,0,1,0,1,3,1,2,1,0,0,0,2,0,3,0,0,0,1,0,
0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,1,2,2,0,0,0,2,0,2,1,0,1,1,0,1,1,1,2,1,0,0,1,1,1,0,2,1,1,1,
0,1,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1,
0,0,0,2,0,1,3,1,1,1,1,0,0,0,0,3,2,0,1,0,0,0,1,2,0,0,0,1,0,0,0,0,
0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,2,3,2,2,0,0,0,1,0,0,0,0,2,3,2,1,2,2,3,0,0,0,2,3,1,0,0,0,1,1,
0,0,1,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,
3,3,2,2,0,1,0,0,0,0,2,0,2,0,1,0,0,0,1,1,0,0,0,2,1,0,1,0,1,1,0,0,
0,1,0,2,0,0,1,0,3,0,1,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,1,0,0,1,0,0,0,0,0,1,1,2,0,0,0,0,1,0,0,1,3,1,0,0,0,0,1,1,0,0,
0,1,0,0,0,0,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,
3,3,1,1,1,1,2,3,0,0,2,1,1,1,1,1,0,2,1,1,0,0,0,2,1,0,1,2,1,1,0,1,
2,1,0,3,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,3,1,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,
0,0,0,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,0,0,0,0,0,0,1,2,1,0,1,1,0,2,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,2,0,0,0,1,3,0,1,0,0,0,2,0,0,0,0,0,0,0,1,2,0,0,0,0,0,
3,3,0,0,1,1,2,0,0,1,2,1,0,1,1,1,0,1,1,0,0,2,1,1,0,1,0,0,1,1,1,0,
0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,1,0,0,0,0,1,0,0,0,0,3,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,0,0,1,1,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,2,0,1,2,0,0,1,1,0,2,0,1,0,0,1,0,0,0,0,1,0,0,0,2,0,0,0,0,
1,0,0,1,0,1,1,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,0,0,0,0,0,0,0,1,1,0,1,1,0,2,1,3,0,0,0,0,1,1,0,0,0,0,0,0,0,3,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,1,0,0,2,0,0,2,0,0,1,1,2,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0,
1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,2,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,3,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,0,0,
1,0,0,0,0,0,0,0,0,1,0,0,0,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,1,0,0,2,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
)
TIS620ThaiModel = {
'charToOrderMap': TIS620CharToOrderMap,
'precedenceMatrix': ThaiLangModel,
'mTypicalPositiveRatio': 0.926386,
'keepEnglishLetter': False,
'charsetName': "TIS-620"
}
# flake8: noqa
|
gpl-2.0
|
andymckay/django
|
django/contrib/gis/tests/geoapp/tests.py
|
91
|
35402
|
from __future__ import absolute_import
import re
from django.db import connection
from django.db.utils import DatabaseError
from django.contrib.gis import gdal
from django.contrib.gis.geos import (fromstr, GEOSGeometry,
Point, LineString, LinearRing, Polygon, GeometryCollection)
from django.contrib.gis.tests.utils import (
no_mysql, no_oracle, no_spatialite,
mysql, oracle, postgis, spatialite)
from django.test import TestCase
from .models import Country, City, PennsylvaniaCity, State, Track
if not spatialite:
from .models import Feature, MinusOneSRID
class GeoModelTest(TestCase):
def test01_fixtures(self):
"Testing geographic model initialization from fixtures."
# Ensuring that data was loaded from initial data fixtures.
self.assertEqual(2, Country.objects.count())
self.assertEqual(8, City.objects.count())
self.assertEqual(2, State.objects.count())
def test02_proxy(self):
"Testing Lazy-Geometry support (using the GeometryProxy)."
## Testing on a Point
pnt = Point(0, 0)
nullcity = City(name='NullCity', point=pnt)
nullcity.save()
# Making sure TypeError is thrown when trying to set with an
# incompatible type.
for bad in [5, 2.0, LineString((0, 0), (1, 1))]:
try:
nullcity.point = bad
except TypeError:
pass
else:
self.fail('Should throw a TypeError')
# Now setting with a compatible GEOS Geometry, saving, and ensuring
# the save took, notice no SRID is explicitly set.
new = Point(5, 23)
nullcity.point = new
# Ensuring that the SRID is automatically set to that of the
# field after assignment, but before saving.
self.assertEqual(4326, nullcity.point.srid)
nullcity.save()
# Ensuring the point was saved correctly after saving
self.assertEqual(new, City.objects.get(name='NullCity').point)
# Setting the X and Y of the Point
nullcity.point.x = 23
nullcity.point.y = 5
# Checking assignments pre & post-save.
self.assertNotEqual(Point(23, 5), City.objects.get(name='NullCity').point)
nullcity.save()
self.assertEqual(Point(23, 5), City.objects.get(name='NullCity').point)
nullcity.delete()
## Testing on a Polygon
shell = LinearRing((0, 0), (0, 100), (100, 100), (100, 0), (0, 0))
inner = LinearRing((40, 40), (40, 60), (60, 60), (60, 40), (40, 40))
# Creating a State object using a built Polygon
ply = Polygon(shell, inner)
nullstate = State(name='NullState', poly=ply)
self.assertEqual(4326, nullstate.poly.srid) # SRID auto-set from None
nullstate.save()
ns = State.objects.get(name='NullState')
self.assertEqual(ply, ns.poly)
# Testing the `ogr` and `srs` lazy-geometry properties.
if gdal.HAS_GDAL:
self.assertEqual(True, isinstance(ns.poly.ogr, gdal.OGRGeometry))
self.assertEqual(ns.poly.wkb, ns.poly.ogr.wkb)
self.assertEqual(True, isinstance(ns.poly.srs, gdal.SpatialReference))
self.assertEqual('WGS 84', ns.poly.srs.name)
# Changing the interior ring on the poly attribute.
new_inner = LinearRing((30, 30), (30, 70), (70, 70), (70, 30), (30, 30))
ns.poly[1] = new_inner
ply[1] = new_inner
self.assertEqual(4326, ns.poly.srid)
ns.save()
self.assertEqual(ply, State.objects.get(name='NullState').poly)
ns.delete()
def test03a_kml(self):
"Testing KML output from the database using GeoQuerySet.kml()."
# Only PostGIS and Spatialite (>=2.4.0-RC4) support KML serialization
if not (postgis or (spatialite and connection.ops.kml)):
self.assertRaises(NotImplementedError, State.objects.all().kml, field_name='poly')
return
# Should throw a TypeError when trying to obtain KML from a
# non-geometry field.
qs = City.objects.all()
self.assertRaises(TypeError, qs.kml, 'name')
# The reference KML depends on the version of PostGIS used
# (the output stopped including altitude in 1.3.3).
if connection.ops.spatial_version >= (1, 3, 3):
ref_kml = '<Point><coordinates>-104.609252,38.255001</coordinates></Point>'
else:
ref_kml = '<Point><coordinates>-104.609252,38.255001,0</coordinates></Point>'
# Ensuring the KML is as expected.
ptown1 = City.objects.kml(field_name='point', precision=9).get(name='Pueblo')
ptown2 = City.objects.kml(precision=9).get(name='Pueblo')
for ptown in [ptown1, ptown2]:
self.assertEqual(ref_kml, ptown.kml)
def test03b_gml(self):
"Testing GML output from the database using GeoQuerySet.gml()."
if mysql or (spatialite and not connection.ops.gml) :
self.assertRaises(NotImplementedError, Country.objects.all().gml, field_name='mpoly')
return
# Should throw a TypeError when tyring to obtain GML from a
# non-geometry field.
qs = City.objects.all()
self.assertRaises(TypeError, qs.gml, field_name='name')
ptown1 = City.objects.gml(field_name='point', precision=9).get(name='Pueblo')
ptown2 = City.objects.gml(precision=9).get(name='Pueblo')
if oracle:
# No precision parameter for Oracle :-/
gml_regex = re.compile(r'^<gml:Point srsName="SDO:4326" xmlns:gml="http://www.opengis.net/gml"><gml:coordinates decimal="\." cs="," ts=" ">-104.60925\d+,38.25500\d+ </gml:coordinates></gml:Point>')
elif spatialite:
# Spatialite has extra colon in SrsName
gml_regex = re.compile(r'^<gml:Point SrsName="EPSG::4326"><gml:coordinates decimal="\." cs="," ts=" ">-104.609251\d+,38.255001</gml:coordinates></gml:Point>')
else:
gml_regex = re.compile(r'^<gml:Point srsName="EPSG:4326"><gml:coordinates>-104\.60925\d+,38\.255001</gml:coordinates></gml:Point>')
for ptown in [ptown1, ptown2]:
self.assertTrue(gml_regex.match(ptown.gml))
def test03c_geojson(self):
"Testing GeoJSON output from the database using GeoQuerySet.geojson()."
# Only PostGIS 1.3.4+ supports GeoJSON.
if not connection.ops.geojson:
self.assertRaises(NotImplementedError, Country.objects.all().geojson, field_name='mpoly')
return
if connection.ops.spatial_version >= (1, 4, 0):
pueblo_json = '{"type":"Point","coordinates":[-104.609252,38.255001]}'
houston_json = '{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},"coordinates":[-95.363151,29.763374]}'
victoria_json = '{"type":"Point","bbox":[-123.30519600,48.46261100,-123.30519600,48.46261100],"coordinates":[-123.305196,48.462611]}'
chicago_json = '{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},"bbox":[-87.65018,41.85039,-87.65018,41.85039],"coordinates":[-87.65018,41.85039]}'
else:
pueblo_json = '{"type":"Point","coordinates":[-104.60925200,38.25500100]}'
houston_json = '{"type":"Point","crs":{"type":"EPSG","properties":{"EPSG":4326}},"coordinates":[-95.36315100,29.76337400]}'
victoria_json = '{"type":"Point","bbox":[-123.30519600,48.46261100,-123.30519600,48.46261100],"coordinates":[-123.30519600,48.46261100]}'
chicago_json = '{"type":"Point","crs":{"type":"EPSG","properties":{"EPSG":4326}},"bbox":[-87.65018,41.85039,-87.65018,41.85039],"coordinates":[-87.65018,41.85039]}'
# Precision argument should only be an integer
self.assertRaises(TypeError, City.objects.geojson, precision='foo')
# Reference queries and values.
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 0) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Pueblo';
self.assertEqual(pueblo_json, City.objects.geojson().get(name='Pueblo').geojson)
# 1.3.x: SELECT ST_AsGeoJson("geoapp_city"."point", 8, 1) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Houston';
# 1.4.x: SELECT ST_AsGeoJson("geoapp_city"."point", 8, 2) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Houston';
# This time we want to include the CRS by using the `crs` keyword.
self.assertEqual(houston_json, City.objects.geojson(crs=True, model_att='json').get(name='Houston').json)
# 1.3.x: SELECT ST_AsGeoJson("geoapp_city"."point", 8, 2) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Victoria';
# 1.4.x: SELECT ST_AsGeoJson("geoapp_city"."point", 8, 1) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Houston';
# This time we include the bounding box by using the `bbox` keyword.
self.assertEqual(victoria_json, City.objects.geojson(bbox=True).get(name='Victoria').geojson)
# 1.(3|4).x: SELECT ST_AsGeoJson("geoapp_city"."point", 5, 3) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Chicago';
# Finally, we set every available keyword.
self.assertEqual(chicago_json, City.objects.geojson(bbox=True, crs=True, precision=5).get(name='Chicago').geojson)
def test03d_svg(self):
"Testing SVG output using GeoQuerySet.svg()."
if mysql or oracle:
self.assertRaises(NotImplementedError, City.objects.svg)
return
self.assertRaises(TypeError, City.objects.svg, precision='foo')
# SELECT AsSVG(geoapp_city.point, 0, 8) FROM geoapp_city WHERE name = 'Pueblo';
svg1 = 'cx="-104.609252" cy="-38.255001"'
# Even though relative, only one point so it's practically the same except for
# the 'c' letter prefix on the x,y values.
svg2 = svg1.replace('c', '')
self.assertEqual(svg1, City.objects.svg().get(name='Pueblo').svg)
self.assertEqual(svg2, City.objects.svg(relative=5).get(name='Pueblo').svg)
@no_mysql
def test04_transform(self):
"Testing the transform() GeoManager method."
# Pre-transformed points for Houston and Pueblo.
htown = fromstr('POINT(1947516.83115183 6322297.06040572)', srid=3084)
ptown = fromstr('POINT(992363.390841912 481455.395105533)', srid=2774)
prec = 3 # Precision is low due to version variations in PROJ and GDAL.
# Asserting the result of the transform operation with the values in
# the pre-transformed points. Oracle does not have the 3084 SRID.
if not oracle:
h = City.objects.transform(htown.srid).get(name='Houston')
self.assertEqual(3084, h.point.srid)
self.assertAlmostEqual(htown.x, h.point.x, prec)
self.assertAlmostEqual(htown.y, h.point.y, prec)
p1 = City.objects.transform(ptown.srid, field_name='point').get(name='Pueblo')
p2 = City.objects.transform(srid=ptown.srid).get(name='Pueblo')
for p in [p1, p2]:
self.assertEqual(2774, p.point.srid)
self.assertAlmostEqual(ptown.x, p.point.x, prec)
self.assertAlmostEqual(ptown.y, p.point.y, prec)
@no_mysql
@no_spatialite # SpatiaLite does not have an Extent function
def test05_extent(self):
"Testing the `extent` GeoQuerySet method."
# Reference query:
# `SELECT ST_extent(point) FROM geoapp_city WHERE (name='Houston' or name='Dallas');`
# => BOX(-96.8016128540039 29.7633724212646,-95.3631439208984 32.7820587158203)
expected = (-96.8016128540039, 29.7633724212646, -95.3631439208984, 32.782058715820)
qs = City.objects.filter(name__in=('Houston', 'Dallas'))
extent = qs.extent()
for val, exp in zip(extent, expected):
self.assertAlmostEqual(exp, val, 4)
# Only PostGIS has support for the MakeLine aggregate.
@no_mysql
@no_oracle
@no_spatialite
def test06_make_line(self):
"Testing the `make_line` GeoQuerySet method."
# Ensuring that a `TypeError` is raised on models without PointFields.
self.assertRaises(TypeError, State.objects.make_line)
self.assertRaises(TypeError, Country.objects.make_line)
# Reference query:
# SELECT AsText(ST_MakeLine(geoapp_city.point)) FROM geoapp_city;
ref_line = GEOSGeometry('LINESTRING(-95.363151 29.763374,-96.801611 32.782057,-97.521157 34.464642,174.783117 -41.315268,-104.609252 38.255001,-95.23506 38.971823,-87.650175 41.850385,-123.305196 48.462611)', srid=4326)
self.assertEqual(ref_line, City.objects.make_line())
@no_mysql
def test09_disjoint(self):
"Testing the `disjoint` lookup type."
ptown = City.objects.get(name='Pueblo')
qs1 = City.objects.filter(point__disjoint=ptown.point)
self.assertEqual(7, qs1.count())
qs2 = State.objects.filter(poly__disjoint=ptown.point)
self.assertEqual(1, qs2.count())
self.assertEqual('Kansas', qs2[0].name)
def test10_contains_contained(self):
"Testing the 'contained', 'contains', and 'bbcontains' lookup types."
# Getting Texas, yes we were a country -- once ;)
texas = Country.objects.get(name='Texas')
# Seeing what cities are in Texas, should get Houston and Dallas,
# and Oklahoma City because 'contained' only checks on the
# _bounding box_ of the Geometries.
if not oracle:
qs = City.objects.filter(point__contained=texas.mpoly)
self.assertEqual(3, qs.count())
cities = ['Houston', 'Dallas', 'Oklahoma City']
for c in qs: self.assertEqual(True, c.name in cities)
# Pulling out some cities.
houston = City.objects.get(name='Houston')
wellington = City.objects.get(name='Wellington')
pueblo = City.objects.get(name='Pueblo')
okcity = City.objects.get(name='Oklahoma City')
lawrence = City.objects.get(name='Lawrence')
# Now testing contains on the countries using the points for
# Houston and Wellington.
tx = Country.objects.get(mpoly__contains=houston.point) # Query w/GEOSGeometry
nz = Country.objects.get(mpoly__contains=wellington.point.hex) # Query w/EWKBHEX
self.assertEqual('Texas', tx.name)
self.assertEqual('New Zealand', nz.name)
# Spatialite 2.3 thinks that Lawrence is in Puerto Rico (a NULL geometry).
if not spatialite:
ks = State.objects.get(poly__contains=lawrence.point)
self.assertEqual('Kansas', ks.name)
# Pueblo and Oklahoma City (even though OK City is within the bounding box of Texas)
# are not contained in Texas or New Zealand.
self.assertEqual(0, len(Country.objects.filter(mpoly__contains=pueblo.point))) # Query w/GEOSGeometry object
self.assertEqual((mysql and 1) or 0,
len(Country.objects.filter(mpoly__contains=okcity.point.wkt))) # Qeury w/WKT
# OK City is contained w/in bounding box of Texas.
if not oracle:
qs = Country.objects.filter(mpoly__bbcontains=okcity.point)
self.assertEqual(1, len(qs))
self.assertEqual('Texas', qs[0].name)
@no_mysql
def test11_lookup_insert_transform(self):
"Testing automatic transform for lookups and inserts."
# San Antonio in 'WGS84' (SRID 4326)
sa_4326 = 'POINT (-98.493183 29.424170)'
wgs_pnt = fromstr(sa_4326, srid=4326) # Our reference point in WGS84
# Oracle doesn't have SRID 3084, using 41157.
if oracle:
# San Antonio in 'Texas 4205, Southern Zone (1983, meters)' (SRID 41157)
# Used the following Oracle SQL to get this value:
# SELECT SDO_UTIL.TO_WKTGEOMETRY(SDO_CS.TRANSFORM(SDO_GEOMETRY('POINT (-98.493183 29.424170)', 4326), 41157)) FROM DUAL;
nad_wkt = 'POINT (300662.034646583 5416427.45974934)'
nad_srid = 41157
else:
# San Antonio in 'NAD83(HARN) / Texas Centric Lambert Conformal' (SRID 3084)
nad_wkt = 'POINT (1645978.362408288754523 6276356.025927528738976)' # Used ogr.py in gdal 1.4.1 for this transform
nad_srid = 3084
# Constructing & querying with a point from a different SRID. Oracle
# `SDO_OVERLAPBDYINTERSECT` operates differently from
# `ST_Intersects`, so contains is used instead.
nad_pnt = fromstr(nad_wkt, srid=nad_srid)
if oracle:
tx = Country.objects.get(mpoly__contains=nad_pnt)
else:
tx = Country.objects.get(mpoly__intersects=nad_pnt)
self.assertEqual('Texas', tx.name)
# Creating San Antonio. Remember the Alamo.
sa = City.objects.create(name='San Antonio', point=nad_pnt)
# Now verifying that San Antonio was transformed correctly
sa = City.objects.get(name='San Antonio')
self.assertAlmostEqual(wgs_pnt.x, sa.point.x, 6)
self.assertAlmostEqual(wgs_pnt.y, sa.point.y, 6)
# If the GeometryField SRID is -1, then we shouldn't perform any
# transformation if the SRID of the input geometry is different.
# SpatiaLite does not support missing SRID values.
if not spatialite:
m1 = MinusOneSRID(geom=Point(17, 23, srid=4326))
m1.save()
self.assertEqual(-1, m1.geom.srid)
@no_mysql
def test12_null_geometries(self):
"Testing NULL geometry support, and the `isnull` lookup type."
# Creating a state with a NULL boundary.
State.objects.create(name='Puerto Rico')
# Querying for both NULL and Non-NULL values.
nullqs = State.objects.filter(poly__isnull=True)
validqs = State.objects.filter(poly__isnull=False)
# Puerto Rico should be NULL (it's a commonwealth unincorporated territory)
self.assertEqual(1, len(nullqs))
self.assertEqual('Puerto Rico', nullqs[0].name)
# The valid states should be Colorado & Kansas
self.assertEqual(2, len(validqs))
state_names = [s.name for s in validqs]
self.assertEqual(True, 'Colorado' in state_names)
self.assertEqual(True, 'Kansas' in state_names)
# Saving another commonwealth w/a NULL geometry.
nmi = State.objects.create(name='Northern Mariana Islands', poly=None)
self.assertEqual(nmi.poly, None)
# Assigning a geomery and saving -- then UPDATE back to NULL.
nmi.poly = 'POLYGON((0 0,1 0,1 1,1 0,0 0))'
nmi.save()
State.objects.filter(name='Northern Mariana Islands').update(poly=None)
self.assertEqual(None, State.objects.get(name='Northern Mariana Islands').poly)
# Only PostGIS has `left` and `right` lookup types.
@no_mysql
@no_oracle
@no_spatialite
def test13_left_right(self):
"Testing the 'left' and 'right' lookup types."
# Left: A << B => true if xmax(A) < xmin(B)
# Right: A >> B => true if xmin(A) > xmax(B)
# See: BOX2D_left() and BOX2D_right() in lwgeom_box2dfloat4.c in PostGIS source.
# Getting the borders for Colorado & Kansas
co_border = State.objects.get(name='Colorado').poly
ks_border = State.objects.get(name='Kansas').poly
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
# These cities should be strictly to the right of the CO border.
cities = ['Houston', 'Dallas', 'Oklahoma City',
'Lawrence', 'Chicago', 'Wellington']
qs = City.objects.filter(point__right=co_border)
self.assertEqual(6, len(qs))
for c in qs: self.assertEqual(True, c.name in cities)
# These cities should be strictly to the right of the KS border.
cities = ['Chicago', 'Wellington']
qs = City.objects.filter(point__right=ks_border)
self.assertEqual(2, len(qs))
for c in qs: self.assertEqual(True, c.name in cities)
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
vic = City.objects.get(point__left=co_border)
self.assertEqual('Victoria', vic.name)
cities = ['Pueblo', 'Victoria']
qs = City.objects.filter(point__left=ks_border)
self.assertEqual(2, len(qs))
for c in qs: self.assertEqual(True, c.name in cities)
def test14_equals(self):
"Testing the 'same_as' and 'equals' lookup types."
pnt = fromstr('POINT (-95.363151 29.763374)', srid=4326)
c1 = City.objects.get(point=pnt)
c2 = City.objects.get(point__same_as=pnt)
c3 = City.objects.get(point__equals=pnt)
for c in [c1, c2, c3]: self.assertEqual('Houston', c.name)
@no_mysql
def test15_relate(self):
"Testing the 'relate' lookup type."
# To make things more interesting, we will have our Texas reference point in
# different SRIDs.
pnt1 = fromstr('POINT (649287.0363174 4177429.4494686)', srid=2847)
pnt2 = fromstr('POINT(-98.4919715741052 29.4333344025053)', srid=4326)
# Not passing in a geometry as first param shoud
# raise a type error when initializing the GeoQuerySet
self.assertRaises(ValueError, Country.objects.filter, mpoly__relate=(23, 'foo'))
# Making sure the right exception is raised for the given
# bad arguments.
for bad_args, e in [((pnt1, 0), ValueError), ((pnt2, 'T*T***FF*', 0), ValueError)]:
qs = Country.objects.filter(mpoly__relate=bad_args)
self.assertRaises(e, qs.count)
# Relate works differently for the different backends.
if postgis or spatialite:
contains_mask = 'T*T***FF*'
within_mask = 'T*F**F***'
intersects_mask = 'T********'
elif oracle:
contains_mask = 'contains'
within_mask = 'inside'
# TODO: This is not quite the same as the PostGIS mask above
intersects_mask = 'overlapbdyintersect'
# Testing contains relation mask.
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt1, contains_mask)).name)
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, contains_mask)).name)
# Testing within relation mask.
ks = State.objects.get(name='Kansas')
self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, within_mask)).name)
# Testing intersection relation mask.
if not oracle:
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt1, intersects_mask)).name)
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, intersects_mask)).name)
self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, intersects_mask)).name)
def test16_createnull(self):
"Testing creating a model instance and the geometry being None"
c = City()
self.assertEqual(c.point, None)
@no_mysql
def test17_unionagg(self):
"Testing the `unionagg` (aggregate union) GeoManager method."
tx = Country.objects.get(name='Texas').mpoly
# Houston, Dallas -- Oracle has different order.
union1 = fromstr('MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)')
union2 = fromstr('MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)')
qs = City.objects.filter(point__within=tx)
self.assertRaises(TypeError, qs.unionagg, 'name')
# Using `field_name` keyword argument in one query and specifying an
# order in the other (which should not be used because this is
# an aggregate method on a spatial column)
u1 = qs.unionagg(field_name='point')
u2 = qs.order_by('name').unionagg()
tol = 0.00001
if oracle:
union = union2
else:
union = union1
self.assertEqual(True, union.equals_exact(u1, tol))
self.assertEqual(True, union.equals_exact(u2, tol))
qs = City.objects.filter(name='NotACity')
self.assertEqual(None, qs.unionagg(field_name='point'))
@no_spatialite # SpatiaLite does not support abstract geometry columns
def test18_geometryfield(self):
"Testing the general GeometryField."
Feature(name='Point', geom=Point(1, 1)).save()
Feature(name='LineString', geom=LineString((0, 0), (1, 1), (5, 5))).save()
Feature(name='Polygon', geom=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0)))).save()
Feature(name='GeometryCollection',
geom=GeometryCollection(Point(2, 2), LineString((0, 0), (2, 2)),
Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))))).save()
f_1 = Feature.objects.get(name='Point')
self.assertEqual(True, isinstance(f_1.geom, Point))
self.assertEqual((1.0, 1.0), f_1.geom.tuple)
f_2 = Feature.objects.get(name='LineString')
self.assertEqual(True, isinstance(f_2.geom, LineString))
self.assertEqual(((0.0, 0.0), (1.0, 1.0), (5.0, 5.0)), f_2.geom.tuple)
f_3 = Feature.objects.get(name='Polygon')
self.assertEqual(True, isinstance(f_3.geom, Polygon))
f_4 = Feature.objects.get(name='GeometryCollection')
self.assertEqual(True, isinstance(f_4.geom, GeometryCollection))
self.assertEqual(f_3.geom, f_4.geom[2])
@no_mysql
def test19_centroid(self):
"Testing the `centroid` GeoQuerySet method."
qs = State.objects.exclude(poly__isnull=True).centroid()
if oracle:
tol = 0.1
elif spatialite:
tol = 0.000001
else:
tol = 0.000000001
for s in qs:
self.assertEqual(True, s.poly.centroid.equals_exact(s.centroid, tol))
@no_mysql
def test20_pointonsurface(self):
"Testing the `point_on_surface` GeoQuerySet method."
# Reference values.
if oracle:
# SELECT SDO_UTIL.TO_WKTGEOMETRY(SDO_GEOM.SDO_POINTONSURFACE(GEOAPP_COUNTRY.MPOLY, 0.05)) FROM GEOAPP_COUNTRY;
ref = {'New Zealand' : fromstr('POINT (174.616364 -36.100861)', srid=4326),
'Texas' : fromstr('POINT (-103.002434 36.500397)', srid=4326),
}
elif postgis or spatialite:
# Using GEOSGeometry to compute the reference point on surface values
# -- since PostGIS also uses GEOS these should be the same.
ref = {'New Zealand' : Country.objects.get(name='New Zealand').mpoly.point_on_surface,
'Texas' : Country.objects.get(name='Texas').mpoly.point_on_surface
}
for c in Country.objects.point_on_surface():
if spatialite:
# XXX This seems to be a WKT-translation-related precision issue?
tol = 0.00001
else:
tol = 0.000000001
self.assertEqual(True, ref[c.name].equals_exact(c.point_on_surface, tol))
@no_mysql
@no_oracle
def test21_scale(self):
"Testing the `scale` GeoQuerySet method."
xfac, yfac = 2, 3
tol = 5 # XXX The low precision tolerance is for SpatiaLite
qs = Country.objects.scale(xfac, yfac, model_att='scaled')
for c in qs:
for p1, p2 in zip(c.mpoly, c.scaled):
for r1, r2 in zip(p1, p2):
for c1, c2 in zip(r1.coords, r2.coords):
self.assertAlmostEqual(c1[0] * xfac, c2[0], tol)
self.assertAlmostEqual(c1[1] * yfac, c2[1], tol)
@no_mysql
@no_oracle
def test22_translate(self):
"Testing the `translate` GeoQuerySet method."
xfac, yfac = 5, -23
qs = Country.objects.translate(xfac, yfac, model_att='translated')
for c in qs:
for p1, p2 in zip(c.mpoly, c.translated):
for r1, r2 in zip(p1, p2):
for c1, c2 in zip(r1.coords, r2.coords):
# XXX The low precision is for SpatiaLite
self.assertAlmostEqual(c1[0] + xfac, c2[0], 5)
self.assertAlmostEqual(c1[1] + yfac, c2[1], 5)
@no_mysql
def test23_numgeom(self):
"Testing the `num_geom` GeoQuerySet method."
# Both 'countries' only have two geometries.
for c in Country.objects.num_geom(): self.assertEqual(2, c.num_geom)
for c in City.objects.filter(point__isnull=False).num_geom():
# Oracle will return 1 for the number of geometries on non-collections,
# whereas PostGIS will return None.
if postgis:
self.assertEqual(None, c.num_geom)
else:
self.assertEqual(1, c.num_geom)
@no_mysql
@no_spatialite # SpatiaLite can only count vertices in LineStrings
def test24_numpoints(self):
"Testing the `num_points` GeoQuerySet method."
for c in Country.objects.num_points():
self.assertEqual(c.mpoly.num_points, c.num_points)
if not oracle:
# Oracle cannot count vertices in Point geometries.
for c in City.objects.num_points(): self.assertEqual(1, c.num_points)
@no_mysql
def test25_geoset(self):
"Testing the `difference`, `intersection`, `sym_difference`, and `union` GeoQuerySet methods."
geom = Point(5, 23)
tol = 1
qs = Country.objects.all().difference(geom).sym_difference(geom).union(geom)
# XXX For some reason SpatiaLite does something screwey with the Texas geometry here. Also,
# XXX it doesn't like the null intersection.
if spatialite:
qs = qs.exclude(name='Texas')
else:
qs = qs.intersection(geom)
for c in qs:
if oracle:
# Should be able to execute the queries; however, they won't be the same
# as GEOS (because Oracle doesn't use GEOS internally like PostGIS or
# SpatiaLite).
pass
else:
self.assertEqual(c.mpoly.difference(geom), c.difference)
if not spatialite:
self.assertEqual(c.mpoly.intersection(geom), c.intersection)
self.assertEqual(c.mpoly.sym_difference(geom), c.sym_difference)
self.assertEqual(c.mpoly.union(geom), c.union)
@no_mysql
def test26_inherited_geofields(self):
"Test GeoQuerySet methods on inherited Geometry fields."
# Creating a Pennsylvanian city.
mansfield = PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)')
# All transformation SQL will need to be performed on the
# _parent_ table.
qs = PennsylvaniaCity.objects.transform(32128)
self.assertEqual(1, qs.count())
for pc in qs: self.assertEqual(32128, pc.point.srid)
@no_mysql
@no_oracle
@no_spatialite
def test27_snap_to_grid(self):
"Testing GeoQuerySet.snap_to_grid()."
# Let's try and break snap_to_grid() with bad combinations of arguments.
for bad_args in ((), range(3), range(5)):
self.assertRaises(ValueError, Country.objects.snap_to_grid, *bad_args)
for bad_args in (('1.0',), (1.0, None), tuple(map(unicode, range(4)))):
self.assertRaises(TypeError, Country.objects.snap_to_grid, *bad_args)
# Boundary for San Marino, courtesy of Bjorn Sandvik of thematicmapping.org
# from the world borders dataset he provides.
wkt = ('MULTIPOLYGON(((12.41580 43.95795,12.45055 43.97972,12.45389 43.98167,'
'12.46250 43.98472,12.47167 43.98694,12.49278 43.98917,'
'12.50555 43.98861,12.51000 43.98694,12.51028 43.98277,'
'12.51167 43.94333,12.51056 43.93916,12.49639 43.92333,'
'12.49500 43.91472,12.48778 43.90583,12.47444 43.89722,'
'12.46472 43.89555,12.45917 43.89611,12.41639 43.90472,'
'12.41222 43.90610,12.40782 43.91366,12.40389 43.92667,'
'12.40500 43.94833,12.40889 43.95499,12.41580 43.95795)))')
sm = Country.objects.create(name='San Marino', mpoly=fromstr(wkt))
# Because floating-point arithmetic isn't exact, we set a tolerance
# to pass into GEOS `equals_exact`.
tol = 0.000000001
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.1)) FROM "geoapp_country" WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 44,12.5 44,12.5 43.9,12.4 43.9,12.4 44)))')
self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.1).get(name='San Marino').snap_to_grid, tol))
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.05, 0.23)) FROM "geoapp_country" WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 43.93,12.45 43.93,12.5 43.93,12.45 43.93,12.4 43.93)))')
self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.05, 0.23).get(name='San Marino').snap_to_grid, tol))
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.5, 0.17, 0.05, 0.23)) FROM "geoapp_country" WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 43.87,12.45 43.87,12.45 44.1,12.5 44.1,12.5 43.87,12.45 43.87,12.4 43.87)))')
self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.05, 0.23, 0.5, 0.17).get(name='San Marino').snap_to_grid, tol))
@no_mysql
@no_spatialite
def test28_reverse(self):
"Testing GeoQuerySet.reverse_geom()."
coords = [ (-95.363151, 29.763374), (-95.448601, 29.713803) ]
Track.objects.create(name='Foo', line=LineString(coords))
t = Track.objects.reverse_geom().get(name='Foo')
coords.reverse()
self.assertEqual(tuple(coords), t.reverse_geom.coords)
if oracle:
self.assertRaises(TypeError, State.objects.reverse_geom)
@no_mysql
@no_oracle
@no_spatialite
def test29_force_rhr(self):
"Testing GeoQuerySet.force_rhr()."
rings = ( ( (0, 0), (5, 0), (0, 5), (0, 0) ),
( (1, 1), (1, 3), (3, 1), (1, 1) ),
)
rhr_rings = ( ( (0, 0), (0, 5), (5, 0), (0, 0) ),
( (1, 1), (3, 1), (1, 3), (1, 1) ),
)
State.objects.create(name='Foo', poly=Polygon(*rings))
s = State.objects.force_rhr().get(name='Foo')
self.assertEqual(rhr_rings, s.force_rhr.coords)
@no_mysql
@no_oracle
@no_spatialite
def test30_geohash(self):
"Testing GeoQuerySet.geohash()."
if not connection.ops.geohash: return
# Reference query:
# SELECT ST_GeoHash(point) FROM geoapp_city WHERE name='Houston';
# SELECT ST_GeoHash(point, 5) FROM geoapp_city WHERE name='Houston';
ref_hash = '9vk1mfq8jx0c8e0386z6'
h1 = City.objects.geohash().get(name='Houston')
h2 = City.objects.geohash(precision=5).get(name='Houston')
self.assertEqual(ref_hash, h1.geohash)
self.assertEqual(ref_hash[:5], h2.geohash)
from .test_feeds import GeoFeedTest
from .test_regress import GeoRegressionTests
from .test_sitemaps import GeoSitemapTest
|
bsd-3-clause
|
brandonium21/snowflake
|
snowflakeEnv/lib/python2.7/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py
|
113
|
17292
|
# -*- coding: utf-8 -*-
#
# SelfTest/Cipher/test_pkcs1_oaep.py: Self-test for PKCS#1 OAEP encryption
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
from __future__ import nested_scopes
__revision__ = "$Id$"
import unittest
from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex
from Crypto.Util.py3compat import *
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_OAEP as PKCS
from Crypto.Hash import MD2,MD5,SHA as SHA1,SHA256,RIPEMD
from Crypto import Random
def rws(t):
"""Remove white spaces, tabs, and new lines from a string"""
for c in ['\n', '\t', ' ']:
t = t.replace(c,'')
return t
def t2b(t):
"""Convert a text string with bytes in hex form to a byte string"""
clean = rws(t)
if len(clean)%2 == 1:
raise ValueError("Even number of characters expected")
return a2b_hex(clean)
class PKCS1_OAEP_Tests(unittest.TestCase):
def setUp(self):
self.rng = Random.new().read
self.key1024 = RSA.generate(1024, self.rng)
# List of tuples with test data for PKCS#1 OAEP
# Each tuple is made up by:
# Item #0: dictionary with RSA key component
# Item #1: plaintext
# Item #2: ciphertext
# Item #3: random data (=seed)
# Item #4: hash object
_testData = (
#
# From in oaep-int.txt to be found in
# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip
#
(
# Private key
{
'n':'''bb f8 2f 09 06 82 ce 9c 23 38 ac 2b 9d a8 71 f7
36 8d 07 ee d4 10 43 a4 40 d6 b6 f0 74 54 f5 1f
b8 df ba af 03 5c 02 ab 61 ea 48 ce eb 6f cd 48
76 ed 52 0d 60 e1 ec 46 19 71 9d 8a 5b 8b 80 7f
af b8 e0 a3 df c7 37 72 3e e6 b4 b7 d9 3a 25 84
ee 6a 64 9d 06 09 53 74 88 34 b2 45 45 98 39 4e
e0 aa b1 2d 7b 61 a5 1f 52 7a 9a 41 f6 c1 68 7f
e2 53 72 98 ca 2a 8f 59 46 f8 e5 fd 09 1d bd cb''',
# Public key
'e':'11',
# In the test vector, only p and q were given...
# d is computed offline as e^{-1} mod (p-1)(q-1)
'd':'''a5dafc5341faf289c4b988db30c1cdf83f31251e0
668b42784813801579641b29410b3c7998d6bc465745e5c3
92669d6870da2c082a939e37fdcb82ec93edac97ff3ad595
0accfbc111c76f1a9529444e56aaf68c56c092cd38dc3bef
5d20a939926ed4f74a13eddfbe1a1cecc4894af9428c2b7b
8883fe4463a4bc85b1cb3c1'''
}
,
# Plaintext
'''d4 36 e9 95 69 fd 32 a7 c8 a0 5b bc 90 d3 2c 49''',
# Ciphertext
'''12 53 e0 4d c0 a5 39 7b b4 4a 7a b8 7e 9b f2 a0
39 a3 3d 1e 99 6f c8 2a 94 cc d3 00 74 c9 5d f7
63 72 20 17 06 9e 52 68 da 5d 1c 0b 4f 87 2c f6
53 c1 1d f8 23 14 a6 79 68 df ea e2 8d ef 04 bb
6d 84 b1 c3 1d 65 4a 19 70 e5 78 3b d6 eb 96 a0
24 c2 ca 2f 4a 90 fe 9f 2e f5 c9 c1 40 e5 bb 48
da 95 36 ad 87 00 c8 4f c9 13 0a de a7 4e 55 8d
51 a7 4d df 85 d8 b5 0d e9 68 38 d6 06 3e 09 55''',
# Random
'''aa fd 12 f6 59 ca e6 34 89 b4 79 e5 07 6d de c2
f0 6c b5 8f''',
# Hash
SHA1,
),
#
# From in oaep-vect.txt to be found in Example 1.1
# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip
#
(
# Private key
{
'n':'''a8 b3 b2 84 af 8e b5 0b 38 70 34 a8 60 f1 46 c4
91 9f 31 87 63 cd 6c 55 98 c8 ae 48 11 a1 e0 ab
c4 c7 e0 b0 82 d6 93 a5 e7 fc ed 67 5c f4 66 85
12 77 2c 0c bc 64 a7 42 c6 c6 30 f5 33 c8 cc 72
f6 2a e8 33 c4 0b f2 58 42 e9 84 bb 78 bd bf 97
c0 10 7d 55 bd b6 62 f5 c4 e0 fa b9 84 5c b5 14
8e f7 39 2d d3 aa ff 93 ae 1e 6b 66 7b b3 d4 24
76 16 d4 f5 ba 10 d4 cf d2 26 de 88 d3 9f 16 fb''',
'e':'''01 00 01''',
'd':'''53 33 9c fd b7 9f c8 46 6a 65 5c 73 16 ac a8 5c
55 fd 8f 6d d8 98 fd af 11 95 17 ef 4f 52 e8 fd
8e 25 8d f9 3f ee 18 0f a0 e4 ab 29 69 3c d8 3b
15 2a 55 3d 4a c4 d1 81 2b 8b 9f a5 af 0e 7f 55
fe 73 04 df 41 57 09 26 f3 31 1f 15 c4 d6 5a 73
2c 48 31 16 ee 3d 3d 2d 0a f3 54 9a d9 bf 7c bf
b7 8a d8 84 f8 4d 5b eb 04 72 4d c7 36 9b 31 de
f3 7d 0c f5 39 e9 cf cd d3 de 65 37 29 ea d5 d1 '''
}
,
# Plaintext
'''66 28 19 4e 12 07 3d b0 3b a9 4c da 9e f9 53 23
97 d5 0d ba 79 b9 87 00 4a fe fe 34''',
# Ciphertext
'''35 4f e6 7b 4a 12 6d 5d 35 fe 36 c7 77 79 1a 3f
7b a1 3d ef 48 4e 2d 39 08 af f7 22 fa d4 68 fb
21 69 6d e9 5d 0b e9 11 c2 d3 17 4f 8a fc c2 01
03 5f 7b 6d 8e 69 40 2d e5 45 16 18 c2 1a 53 5f
a9 d7 bf c5 b8 dd 9f c2 43 f8 cf 92 7d b3 13 22
d6 e8 81 ea a9 1a 99 61 70 e6 57 a0 5a 26 64 26
d9 8c 88 00 3f 84 77 c1 22 70 94 a0 d9 fa 1e 8c
40 24 30 9c e1 ec cc b5 21 00 35 d4 7a c7 2e 8a''',
# Random
'''18 b7 76 ea 21 06 9d 69 77 6a 33 e9 6b ad 48 e1
dd a0 a5 ef''',
SHA1
),
#
# From in oaep-vect.txt to be found in Example 2.1
# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip
#
(
# Private key
{
'n':'''01 94 7c 7f ce 90 42 5f 47 27 9e 70 85 1f 25 d5
e6 23 16 fe 8a 1d f1 93 71 e3 e6 28 e2 60 54 3e
49 01 ef 60 81 f6 8c 0b 81 41 19 0d 2a e8 da ba
7d 12 50 ec 6d b6 36 e9 44 ec 37 22 87 7c 7c 1d
0a 67 f1 4b 16 94 c5 f0 37 94 51 a4 3e 49 a3 2d
de 83 67 0b 73 da 91 a1 c9 9b c2 3b 43 6a 60 05
5c 61 0f 0b af 99 c1 a0 79 56 5b 95 a3 f1 52 66
32 d1 d4 da 60 f2 0e da 25 e6 53 c4 f0 02 76 6f
45''',
'e':'''01 00 01''',
'd':'''08 23 f2 0f ad b5 da 89 08 8a 9d 00 89 3e 21 fa
4a 1b 11 fb c9 3c 64 a3 be 0b aa ea 97 fb 3b 93
c3 ff 71 37 04 c1 9c 96 3c 1d 10 7a ae 99 05 47
39 f7 9e 02 e1 86 de 86 f8 7a 6d de fe a6 d8 cc
d1 d3 c8 1a 47 bf a7 25 5b e2 06 01 a4 a4 b2 f0
8a 16 7b 5e 27 9d 71 5b 1b 45 5b dd 7e ab 24 59
41 d9 76 8b 9a ce fb 3c cd a5 95 2d a3 ce e7 25
25 b4 50 16 63 a8 ee 15 c9 e9 92 d9 24 62 fe 39'''
},
# Plaintext
'''8f f0 0c aa 60 5c 70 28 30 63 4d 9a 6c 3d 42 c6
52 b5 8c f1 d9 2f ec 57 0b ee e7''',
# Ciphertext
'''01 81 af 89 22 b9 fc b4 d7 9d 92 eb e1 98 15 99
2f c0 c1 43 9d 8b cd 49 13 98 a0 f4 ad 3a 32 9a
5b d9 38 55 60 db 53 26 83 c8 b7 da 04 e4 b1 2a
ed 6a ac df 47 1c 34 c9 cd a8 91 ad dc c2 df 34
56 65 3a a6 38 2e 9a e5 9b 54 45 52 57 eb 09 9d
56 2b be 10 45 3f 2b 6d 13 c5 9c 02 e1 0f 1f 8a
bb 5d a0 d0 57 09 32 da cf 2d 09 01 db 72 9d 0f
ef cc 05 4e 70 96 8e a5 40 c8 1b 04 bc ae fe 72
0e''',
# Random
'''8c 40 7b 5e c2 89 9e 50 99 c5 3e 8c e7 93 bf 94
e7 1b 17 82''',
SHA1
),
#
# From in oaep-vect.txt to be found in Example 10.1
# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip
#
(
# Private key
{
'n':'''ae 45 ed 56 01 ce c6 b8 cc 05 f8 03 93 5c 67 4d
db e0 d7 5c 4c 09 fd 79 51 fc 6b 0c ae c3 13 a8
df 39 97 0c 51 8b ff ba 5e d6 8f 3f 0d 7f 22 a4
02 9d 41 3f 1a e0 7e 4e be 9e 41 77 ce 23 e7 f5
40 4b 56 9e 4e e1 bd cf 3c 1f b0 3e f1 13 80 2d
4f 85 5e b9 b5 13 4b 5a 7c 80 85 ad ca e6 fa 2f
a1 41 7e c3 76 3b e1 71 b0 c6 2b 76 0e de 23 c1
2a d9 2b 98 08 84 c6 41 f5 a8 fa c2 6b da d4 a0
33 81 a2 2f e1 b7 54 88 50 94 c8 25 06 d4 01 9a
53 5a 28 6a fe b2 71 bb 9b a5 92 de 18 dc f6 00
c2 ae ea e5 6e 02 f7 cf 79 fc 14 cf 3b dc 7c d8
4f eb bb f9 50 ca 90 30 4b 22 19 a7 aa 06 3a ef
a2 c3 c1 98 0e 56 0c d6 4a fe 77 95 85 b6 10 76
57 b9 57 85 7e fd e6 01 09 88 ab 7d e4 17 fc 88
d8 f3 84 c4 e6 e7 2c 3f 94 3e 0c 31 c0 c4 a5 cc
36 f8 79 d8 a3 ac 9d 7d 59 86 0e aa da 6b 83 bb''',
'e':'''01 00 01''',
'd':'''05 6b 04 21 6f e5 f3 54 ac 77 25 0a 4b 6b 0c 85
25 a8 5c 59 b0 bd 80 c5 64 50 a2 2d 5f 43 8e 59
6a 33 3a a8 75 e2 91 dd 43 f4 8c b8 8b 9d 5f c0
d4 99 f9 fc d1 c3 97 f9 af c0 70 cd 9e 39 8c 8d
19 e6 1d b7 c7 41 0a 6b 26 75 df bf 5d 34 5b 80
4d 20 1a dd 50 2d 5c e2 df cb 09 1c e9 99 7b be
be 57 30 6f 38 3e 4d 58 81 03 f0 36 f7 e8 5d 19
34 d1 52 a3 23 e4 a8 db 45 1d 6f 4a 5b 1b 0f 10
2c c1 50 e0 2f ee e2 b8 8d ea 4a d4 c1 ba cc b2
4d 84 07 2d 14 e1 d2 4a 67 71 f7 40 8e e3 05 64
fb 86 d4 39 3a 34 bc f0 b7 88 50 1d 19 33 03 f1
3a 22 84 b0 01 f0 f6 49 ea f7 93 28 d4 ac 5c 43
0a b4 41 49 20 a9 46 0e d1 b7 bc 40 ec 65 3e 87
6d 09 ab c5 09 ae 45 b5 25 19 01 16 a0 c2 61 01
84 82 98 50 9c 1c 3b f3 a4 83 e7 27 40 54 e1 5e
97 07 50 36 e9 89 f6 09 32 80 7b 52 57 75 1e 79'''
},
# Plaintext
'''8b ba 6b f8 2a 6c 0f 86 d5 f1 75 6e 97 95 68 70
b0 89 53 b0 6b 4e b2 05 bc 16 94 ee''',
# Ciphertext
'''53 ea 5d c0 8c d2 60 fb 3b 85 85 67 28 7f a9 15
52 c3 0b 2f eb fb a2 13 f0 ae 87 70 2d 06 8d 19
ba b0 7f e5 74 52 3d fb 42 13 9d 68 c3 c5 af ee
e0 bf e4 cb 79 69 cb f3 82 b8 04 d6 e6 13 96 14
4e 2d 0e 60 74 1f 89 93 c3 01 4b 58 b9 b1 95 7a
8b ab cd 23 af 85 4f 4c 35 6f b1 66 2a a7 2b fc
c7 e5 86 55 9d c4 28 0d 16 0c 12 67 85 a7 23 eb
ee be ff 71 f1 15 94 44 0a ae f8 7d 10 79 3a 87
74 a2 39 d4 a0 4c 87 fe 14 67 b9 da f8 52 08 ec
6c 72 55 79 4a 96 cc 29 14 2f 9a 8b d4 18 e3 c1
fd 67 34 4b 0c d0 82 9d f3 b2 be c6 02 53 19 62
93 c6 b3 4d 3f 75 d3 2f 21 3d d4 5c 62 73 d5 05
ad f4 cc ed 10 57 cb 75 8f c2 6a ee fa 44 12 55
ed 4e 64 c1 99 ee 07 5e 7f 16 64 61 82 fd b4 64
73 9b 68 ab 5d af f0 e6 3e 95 52 01 68 24 f0 54
bf 4d 3c 8c 90 a9 7b b6 b6 55 32 84 eb 42 9f cc''',
# Random
'''47 e1 ab 71 19 fe e5 6c 95 ee 5e aa d8 6f 40 d0
aa 63 bd 33''',
SHA1
),
)
def testEncrypt1(self):
# Verify encryption using all test vectors
for test in self._testData:
# Build the key
comps = [ long(rws(test[0][x]),16) for x in ('n','e') ]
key = RSA.construct(comps)
# RNG that takes its random numbers from a pool given
# at initialization
class randGen:
def __init__(self, data):
self.data = data
self.idx = 0
def __call__(self, N):
r = self.data[self.idx:N]
self.idx += N
return r
# The real test
key._randfunc = randGen(t2b(test[3]))
cipher = PKCS.new(key, test[4])
ct = cipher.encrypt(t2b(test[1]))
self.assertEqual(ct, t2b(test[2]))
def testEncrypt2(self):
# Verify that encryption fails if plaintext is too long
pt = '\x00'*(128-2*20-2+1)
cipher = PKCS.new(self.key1024)
self.assertRaises(ValueError, cipher.encrypt, pt)
def testDecrypt1(self):
# Verify decryption using all test vectors
for test in self._testData:
# Build the key
comps = [ long(rws(test[0][x]),16) for x in ('n','e','d') ]
key = RSA.construct(comps)
# The real test
cipher = PKCS.new(key, test[4])
pt = cipher.decrypt(t2b(test[2]))
self.assertEqual(pt, t2b(test[1]))
def testDecrypt2(self):
# Simplest possible negative tests
for ct_size in (127,128,129):
cipher = PKCS.new(self.key1024)
self.assertRaises(ValueError, cipher.decrypt, bchr(0x00)*ct_size)
def testEncryptDecrypt1(self):
# Encrypt/Decrypt messages of length [0..128-2*20-2]
for pt_len in xrange(0,128-2*20-2):
pt = self.rng(pt_len)
ct = PKCS.encrypt(pt, self.key1024)
pt2 = PKCS.decrypt(ct, self.key1024)
self.assertEqual(pt,pt2)
def testEncryptDecrypt1(self):
# Helper function to monitor what's requested from RNG
global asked
def localRng(N):
global asked
asked += N
return self.rng(N)
# Verify that OAEP is friendly to all hashes
for hashmod in (MD2,MD5,SHA1,SHA256,RIPEMD):
# Verify that encrypt() asks for as many random bytes
# as the hash output size
asked = 0
pt = self.rng(40)
self.key1024._randfunc = localRng
cipher = PKCS.new(self.key1024, hashmod)
ct = cipher.encrypt(pt)
self.assertEqual(cipher.decrypt(ct), pt)
self.failUnless(asked > hashmod.digest_size)
def testEncryptDecrypt2(self):
# Verify that OAEP supports labels
pt = self.rng(35)
xlabel = self.rng(22)
cipher = PKCS.new(self.key1024, label=xlabel)
ct = cipher.encrypt(pt)
self.assertEqual(cipher.decrypt(ct), pt)
def testEncryptDecrypt3(self):
# Verify that encrypt() uses the custom MGF
global mgfcalls
# Helper function to monitor what's requested from MGF
def newMGF(seed,maskLen):
global mgfcalls
mgfcalls += 1
return bchr(0x00)*maskLen
mgfcalls = 0
pt = self.rng(32)
cipher = PKCS.new(self.key1024, mgfunc=newMGF)
ct = cipher.encrypt(pt)
self.assertEqual(mgfcalls, 2)
self.assertEqual(cipher.decrypt(ct), pt)
def get_tests(config={}):
tests = []
tests += list_test_cases(PKCS1_OAEP_Tests)
return tests
if __name__ == '__main__':
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
|
bsd-2-clause
|
ftl-toolbox/lib_openshift
|
lib_openshift/models/v1_template.py
|
2
|
8698
|
# coding: utf-8
"""
OpenAPI spec version:
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from pprint import pformat
from six import iteritems
import re
class V1Template(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
operations = [
{
'class': 'OapiV1',
'type': 'create',
'method': 'create_namespaced_template',
'namespaced': True
},
{
'class': 'OapiV1',
'type': 'create',
'method': 'create_template',
'namespaced': False
},
{
'class': 'OapiV1',
'type': 'update',
'method': 'replace_namespaced_template',
'namespaced': True
},
{
'class': 'OapiV1',
'type': 'delete',
'method': 'delete_namespaced_template',
'namespaced': True
},
{
'class': 'OapiV1',
'type': 'read',
'method': 'get_namespaced_template',
'namespaced': True
},
]
# The key is attribute name
# and the value is attribute type.
swagger_types = {
'kind': 'str',
'api_version': 'str',
'metadata': 'V1ObjectMeta',
'objects': 'list[RuntimeRawExtension]',
'parameters': 'list[V1Parameter]',
'labels': 'object'
}
# The key is attribute name
# and the value is json key in definition.
attribute_map = {
'kind': 'kind',
'api_version': 'apiVersion',
'metadata': 'metadata',
'objects': 'objects',
'parameters': 'parameters',
'labels': 'labels'
}
def __init__(self, kind=None, api_version=None, metadata=None, objects=None, parameters=None, labels=None):
"""
V1Template - a model defined in Swagger
"""
self._kind = kind
self._api_version = api_version
self._metadata = metadata
self._objects = objects
self._parameters = parameters
self._labels = labels
@property
def kind(self):
"""
Gets the kind of this V1Template.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/release-1.2/docs/devel/api-conventions.md#types-kinds
:return: The kind of this V1Template.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V1Template.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/release-1.2/docs/devel/api-conventions.md#types-kinds
:param kind: The kind of this V1Template.
:type: str
"""
self._kind = kind
@property
def api_version(self):
"""
Gets the api_version of this V1Template.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/release-1.2/docs/devel/api-conventions.md#resources
:return: The api_version of this V1Template.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V1Template.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/release-1.2/docs/devel/api-conventions.md#resources
:param api_version: The api_version of this V1Template.
:type: str
"""
self._api_version = api_version
@property
def metadata(self):
"""
Gets the metadata of this V1Template.
Standard object's metadata.
:return: The metadata of this V1Template.
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1Template.
Standard object's metadata.
:param metadata: The metadata of this V1Template.
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def objects(self):
"""
Gets the objects of this V1Template.
Objects is an array of objects to include in this template. Required.
:return: The objects of this V1Template.
:rtype: list[RuntimeRawExtension]
"""
return self._objects
@objects.setter
def objects(self, objects):
"""
Sets the objects of this V1Template.
Objects is an array of objects to include in this template. Required.
:param objects: The objects of this V1Template.
:type: list[RuntimeRawExtension]
"""
self._objects = objects
@property
def parameters(self):
"""
Gets the parameters of this V1Template.
Optional: Parameters is an array of Parameters used during the Template to Config transformation.
:return: The parameters of this V1Template.
:rtype: list[V1Parameter]
"""
return self._parameters
@parameters.setter
def parameters(self, parameters):
"""
Sets the parameters of this V1Template.
Optional: Parameters is an array of Parameters used during the Template to Config transformation.
:param parameters: The parameters of this V1Template.
:type: list[V1Parameter]
"""
self._parameters = parameters
@property
def labels(self):
"""
Gets the labels of this V1Template.
Labels is a set of labels that are applied to every object during the Template to Config transformation. Optional
:return: The labels of this V1Template.
:rtype: object
"""
return self._labels
@labels.setter
def labels(self, labels):
"""
Sets the labels of this V1Template.
Labels is a set of labels that are applied to every object during the Template to Config transformation. Optional
:param labels: The labels of this V1Template.
:type: object
"""
self._labels = labels
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(V1Template.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
apache-2.0
|
beswarm/django-allauth
|
allauth/socialaccount/providers/xing/views.py
|
65
|
1258
|
import json
from allauth.socialaccount.providers.oauth.client import OAuth
from allauth.socialaccount.providers.oauth.views import (OAuthAdapter,
OAuthLoginView,
OAuthCallbackView)
from .provider import XingProvider
class XingAPI(OAuth):
url = 'https://api.xing.com/v1/users/me.json'
def get_user_info(self):
user = json.loads(self.query(self.url))
return user
class XingOAuthAdapter(OAuthAdapter):
provider_id = XingProvider.id
request_token_url = 'https://api.xing.com/v1/request_token'
access_token_url = 'https://api.xing.com/v1/access_token'
authorize_url = 'https://www.xing.com/v1/authorize'
def complete_login(self, request, app, token, response):
client = XingAPI(request, app.client_id, app.secret,
self.request_token_url)
extra_data = client.get_user_info()['users'][0]
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth_login = OAuthLoginView.adapter_view(XingOAuthAdapter)
oauth_callback = OAuthCallbackView.adapter_view(XingOAuthAdapter)
|
mit
|
caisq/tensorflow
|
tensorflow/contrib/kafka/python/ops/kafka_dataset_ops.py
|
19
|
2777
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Kafka Dataset."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.kafka.python.ops import kafka_op_loader # pylint: disable=unused-import
from tensorflow.contrib.kafka.python.ops import gen_dataset_ops
from tensorflow.python.data.ops.dataset_ops import Dataset
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
class KafkaDataset(Dataset):
"""A Kafka Dataset that consumes the message.
"""
def __init__(self,
topics,
servers="localhost",
group="",
eof=False,
timeout=1000):
"""Create a KafkaReader.
Args:
topics: A `tf.string` tensor containing one or more subscriptions,
in the format of [topic:partition:offset:length],
by default length is -1 for unlimited.
servers: A list of bootstrap servers.
group: The consumer group id.
eof: If True, the kafka reader will stop on EOF.
timeout: The timeout value for the Kafka Consumer to wait
(in millisecond).
"""
super(KafkaDataset, self).__init__()
self._topics = ops.convert_to_tensor(
topics, dtype=dtypes.string, name="topics")
self._servers = ops.convert_to_tensor(
servers, dtype=dtypes.string, name="servers")
self._group = ops.convert_to_tensor(
group, dtype=dtypes.string, name="group")
self._eof = ops.convert_to_tensor(eof, dtype=dtypes.bool, name="eof")
self._timeout = ops.convert_to_tensor(
timeout, dtype=dtypes.int64, name="timeout")
def _as_variant_tensor(self):
return gen_dataset_ops.kafka_dataset(self._topics, self._servers,
self._group, self._eof, self._timeout)
@property
def output_classes(self):
return ops.Tensor
@property
def output_shapes(self):
return tensor_shape.scalar()
@property
def output_types(self):
return dtypes.string
|
apache-2.0
|
eramirem/astroML
|
book_figures/chapter7/fig_spec_reconstruction.py
|
3
|
3410
|
"""
PCA Reconstruction of a spectrum
--------------------------------
Figure 7.6
The reconstruction of a particular spectrum from its eigenvectors. The input
spectrum is shown in gray, and the partial reconstruction for progressively
more terms is shown in black. The top panel shows only the mean of the set of
spectra. By the time 20 PCA components are added, the reconstruction is very
close to the input, as indicated by the expected total variance of 94%.
"""
# Author: Jake VanderPlas
# License: BSD
# The figure produced by this code is published in the textbook
# "Statistics, Data Mining, and Machine Learning in Astronomy" (2013)
# For more information, see http://astroML.github.com
# To report a bug or issue, use the following forum:
# https://groups.google.com/forum/#!forum/astroml-general
import numpy as np
from matplotlib import pyplot as plt
from sklearn.decomposition import PCA
from astroML.datasets import sdss_corrected_spectra
from astroML.decorators import pickle_results
#----------------------------------------------------------------------
# This function adjusts matplotlib settings for a uniform feel in the textbook.
# Note that with usetex=True, fonts are rendered with LaTeX. This may
# result in an error if LaTeX is not installed on your system. In that case,
# you can set usetex to False.
from astroML.plotting import setup_text_plots
setup_text_plots(fontsize=8, usetex=True)
#------------------------------------------------------------
# Download data
data = sdss_corrected_spectra.fetch_sdss_corrected_spectra()
spectra = sdss_corrected_spectra.reconstruct_spectra(data)
wavelengths = sdss_corrected_spectra.compute_wavelengths(data)
#------------------------------------------------------------
# Compute PCA components
# Eigenvalues can be computed using PCA as in the commented code below:
#from sklearn.decomposition import PCA
#pca = PCA()
#pca.fit(spectra)
#evals = pca.explained_variance_ratio_
#evals_cs = evals.cumsum()
# because the spectra have been reconstructed from masked values, this
# is not exactly correct in this case: we'll use the values computed
# in the file compute_sdss_pca.py
evals = data['evals'] ** 2
evals_cs = evals.cumsum()
evals_cs /= evals_cs[-1]
evecs = data['evecs']
spec_mean = spectra.mean(0)
#------------------------------------------------------------
# Find the coefficients of a particular spectrum
spec = spectra[1]
coeff = np.dot(evecs, spec - spec_mean)
#------------------------------------------------------------
# Plot the sequence of reconstructions
fig = plt.figure(figsize=(5, 5))
fig.subplots_adjust(hspace=0, top=0.95, bottom=0.1, left=0.12, right=0.93)
for i, n in enumerate([0, 4, 8, 20]):
ax = fig.add_subplot(411 + i)
ax.plot(wavelengths, spec, '-', c='gray')
ax.plot(wavelengths, spec_mean + np.dot(coeff[:n], evecs[:n]), '-k')
if i < 3:
ax.xaxis.set_major_formatter(plt.NullFormatter())
ax.set_ylim(-2, 21)
ax.set_ylabel('flux')
if n == 0:
text = "mean"
elif n == 1:
text = "mean + 1 component\n"
text += r"$(\sigma^2_{tot} = %.2f)$" % evals_cs[n - 1]
else:
text = "mean + %i components\n" % n
text += r"$(\sigma^2_{tot} = %.2f)$" % evals_cs[n - 1]
ax.text(0.02, 0.93, text, ha='left', va='top', transform=ax.transAxes)
fig.axes[-1].set_xlabel(r'${\rm wavelength\ (\AA)}$')
plt.show()
|
bsd-2-clause
|
mikemccann/stoqs
|
utils/utils.py
|
4
|
13897
|
# A collection of various utility functions
import logging
from datetime import datetime
logger = logging.getLogger(__name__)
# An epoch good for time axis labels - OceanSITES uses 1 Jan 1950
EPOCH_STRING = '1950-01-01'
EPOCH_DATETIME = datetime(1950, 1, 1)
def round_to_n(x, n):
'''
Round to n significant digits
'''
if n < 1:
raise ValueError("number of significant digits must be >= 1")
if type(x) in (list, tuple):
rounded_list = []
for xi in x:
# Use %e format to get the n most significant digits, as a string.
format = "%." + str(n-1) + "e"
as_string = format % xi
rounded_list.append(float(as_string))
return rounded_list
else:
# Use %e format to get the n most significant digits, as a string.
format = "%." + str(n-1) + "e"
as_string = format % x
return float(as_string)
def addAttributeToListItems(list_to_modify, name, value):
'''
For each item in list_to_modify, add new attribute name with value value.
Useful for modyfying a django queryset before passing to a template.
'''
new_list = []
for item in list_to_modify:
new_item = item
new_item.__setattr__(name, value)
new_list.append(new_item)
return new_list
#
# Methods that return checkbox selections made on the UI, called by STOQSQueryManager and MPQuery
#
def getGet_Actual_Count(kwargs):
'''
return state of Get Actual Count checkbox from query UI
'''
get_actual_count_state = False
if kwargs.has_key('get_actual_count'):
if kwargs['get_actual_count']:
get_actual_count_state = True
return get_actual_count_state
def getShow_Sigmat_Parameter_Values(kwargs):
'''
return state of showsigmatparametervalues checkbox from query UI
'''
show_sigmat_parameter_values_state = False
if kwargs.has_key('showsigmatparametervalues'):
if kwargs['showsigmatparametervalues']:
show_sigmat_parameter_values_state = True
return show_sigmat_parameter_values_state
def getShow_StandardName_Parameter_Values(kwargs):
'''
return state of showstandardnameparametervalues checkbox from query UI
'''
show_standardname_parameter_values_state = False
if kwargs.has_key('showstandardnameparametervalues'):
if kwargs['showstandardnameparametervalues']:
show_standardname_parameter_values_state = True
return show_standardname_parameter_values_state
def getShow_All_Parameter_Values(kwargs):
'''
return state of showallparametervalues checkbox from query UI
'''
show_all_parameter_values_state = False
if kwargs.has_key('showallparametervalues'):
if kwargs['showallparametervalues']:
show_all_parameter_values_state = True
return show_all_parameter_values_state
def getShow_Parameter_Platform_Data(kwargs):
'''
return state of Show data checkbox from query UI
'''
show_parameter_platform_data_state = False
if kwargs.has_key('showparameterplatformdata'):
if kwargs['showparameterplatformdata']:
show_parameter_platform_data_state = True
return show_parameter_platform_data_state
def getShow_Geo_X3D_Data(kwargs):
'''
return state of Show data checkbox from query UI
'''
show_geo_x3d_data_state = False
logger.debug('kwargs = %s', kwargs)
if kwargs.has_key('showgeox3ddata'):
if kwargs['showgeox3ddata']:
show_geo_x3d_data_state = True
return show_geo_x3d_data_state
#
# General utility methods called by STOQSQueryManager, MPQuery, etc.
#
def getParameterGroups(dbAlias, parameter):
'''
Return list of ParameterGroups that parameter belongs to
'''
from stoqs.models import ParameterGroupParameter
return ParameterGroupParameter.objects.using(dbAlias).filter(parameter=parameter).values_list('parametergroup__name')[0]
## {{{ http://code.activestate.com/recipes/511478/ (r1)
import math
import numpy
import functools
def percentile(N, percent, key=lambda x:x):
"""
Find the percentile of a list of values.
@parameter N - is a list of values. Note N MUST BE already sorted.
@parameter percent - a float value from 0.0 to 1.0.
@parameter key - optional key function to compute value from each element of N.
@return - the percentile of the values
"""
if not N:
return None
k = (len(N)-1) * percent
f = math.floor(k)
c = math.ceil(k)
if f == c:
return key(N[int(k)])
d0 = key(N[int(f)]) * (c-k)
d1 = key(N[int(c)]) * (k-f)
return d0+d1
# median is 50th percentile.
median = functools.partial(percentile, percent=0.5)
## end of http://code.activestate.com/recipes/511478/ }}}
def mode(N):
'''
Create some bins based on the min and max of the list/array in N
compute the histogram and then the mode of the data in N.
Return the edge, which is clo.
'''
numbins = 100
var = numpy.array(N)
bins = numpy.linspace(var.min(), var.max(), numbins)
hist, bin_edges = numpy.histogram(var, bins)
index = numpy.argmax(hist)
if index == 0:
return bin_edges[index]
else:
return (bin_edges[index] + bin_edges[index-1]) / 2.0
# pure-Python Douglas-Peucker line simplification/generalization
#
# this code was written by Schuyler Erle <schuyler@nocat.net> and is
# made available in the public domain.
#
# the code was ported from a freely-licensed example at
# http://www.3dsoftware.com/Cartography/Programming/PolyLineReduction/
#
# the original page is no longer available, but is mirrored at
# http://www.mappinghacks.com/code/PolyLineReduction/
"""
>>> line = [(0,0),(1,0),(2,0),(2,1),(2,2),(1,2),(0,2),(0,1),(0,0)]
>>> simplify_points(line, 1.0)
[(0, 0), (2, 0), (2, 2), (0, 2), (0, 0)]
>>> line = [(0,0),(0.5,0.5),(1,0),(1.25,-0.25),(1.5,.5)]
>>> simplify_points(line, 0.25)
[(0, 0), (0.5, 0.5), (1.25, -0.25), (1.5, 0.5)]
"""
def simplify_points (pts, tolerance):
anchor = 0
floater = len(pts) - 1
stack = []
keep = set()
stack.append((anchor, floater))
while stack:
anchor, floater = stack.pop()
# initialize line segment
if pts[floater] != pts[anchor]:
anchorX = float(pts[floater][0] - pts[anchor][0])
anchorY = float(pts[floater][1] - pts[anchor][1])
seg_len = math.sqrt(anchorX ** 2 + anchorY ** 2)
# get the unit vector
anchorX /= seg_len
anchorY /= seg_len
else:
anchorX = anchorY = seg_len = 0.0
# inner loop:
max_dist = 0.0
farthest = anchor + 1
for i in range(anchor + 1, floater):
dist_to_seg = 0.0
# compare to anchor
vecX = float(pts[i][0] - pts[anchor][0])
vecY = float(pts[i][1] - pts[anchor][1])
seg_len = math.sqrt( vecX ** 2 + vecY ** 2 )
# dot product:
proj = vecX * anchorX + vecY * anchorY
if proj < 0.0:
dist_to_seg = seg_len
else:
# compare to floater
vecX = float(pts[i][0] - pts[floater][0])
vecY = float(pts[i][1] - pts[floater][1])
seg_len = math.sqrt( vecX ** 2 + vecY ** 2 )
# dot product:
proj = vecX * (-anchorX) + vecY * (-anchorY)
if proj < 0.0:
dist_to_seg = seg_len
else: # calculate perpendicular distance to line (pythagorean theorem):
dist_to_seg = math.sqrt(abs(seg_len ** 2 - proj ** 2))
if max_dist < dist_to_seg:
max_dist = dist_to_seg
farthest = i
if max_dist <= tolerance: # use line segment
keep.add(anchor)
keep.add(floater)
else:
stack.append((anchor, farthest))
stack.append((farthest, floater))
keep = list(keep)
keep.sort()
# Change from original code: add the index from the original line in the return
return [(pts[i] + (i,)) for i in keep]
def pearsonr(x, y):
'''
See http://stackoverflow.com/questions/3949226/calculating-pearson-correlation-and-significance-in-python and
http://shop.oreilly.com/product/9780596529321.do
'''
# Assume len(x) == len(y)
from itertools import imap
n = len(x)
sum_x = float(sum(x))
sum_y = float(sum(y))
sum_x_sq = sum(map(lambda x: pow(x, 2), x))
sum_y_sq = sum(map(lambda x: pow(x, 2), y))
psum = sum(imap(lambda x, y: x * y, x, y))
num = psum - (sum_x * sum_y/n)
den = pow((sum_x_sq - pow(sum_x, 2) / n) * (sum_y_sq - pow(sum_y, 2) / n), 0.5)
if den == 0: return 0
return num / den
def postgresifySQL(query, pointFlag=False, translateGeom=False, sampleFlag=False):
'''
Given a generic database agnostic Django query string modify it using regular expressions to work
on a PostgreSQL server. If pointFlag is True then use the mappoint field for geom. If translateGeom
is True then translate .geom to latitude and longitude columns.
'''
import re
# Get text of query to quotify for Postgresql
q = str(query)
# Remove double quotes from around all table and colum names
q = q.replace('"', '')
if not sampleFlag:
# Add aliases for geom and gid - Activity
q = q.replace('stoqs_activity.id', 'stoqs_activity.id as gid', 1)
q = q.replace('= stoqs_activity.id as gid', '= stoqs_activity.id', 1) # Fixes problem with above being applied to Sample query join
if pointFlag:
q = q.replace('stoqs_activity.mappoint', 'stoqs_activity.mappoint as geom')
else:
q = q.replace('stoqs_activity.maptrack', 'stoqs_activity.maptrack as geom')
else:
# Add aliases for geom and gid - Sample
q = q.replace('stoqs_sample.id', 'stoqs_sample.id as gid', 1)
q = q.replace('stoqs_sample.geom', 'stoqs_sample.geom as geom')
if translateGeom:
q = q.replace('stoqs_measurement.geom', 'ST_X(stoqs_measurement.geom) as longitude, ST_Y(stoqs_measurement.geom) as latitude')
# Quotify simple things that need quotes
QUOTE_NAMEEQUALS = re.compile('name\s+=\s+(?P<argument>\S+)')
QUOTE_DATES = re.compile('(?P<argument>\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d)')
q = QUOTE_NAMEEQUALS.sub(r"name = '\1'", q)
q = QUOTE_DATES.sub(r"'\1'", q)
# The IN ( ... ) clauses require special treatment: an IN SELECT subquery needs no quoting, only string values need quotes, and numbers need no quotes
FIND_INS = re.compile('\sIN\s[^\)]+\)')
items = ''
for m in FIND_INS.findall(q):
if m.find('SELECT') == -1:
##logger.debug('line = %s', m)
FIND_ITEMS = re.compile('\((?P<argument>[^\'\)]+)\)')
new_items = ''
try:
items = FIND_ITEMS.search(m).groups()[0]
except Exception, e:
logger.warn(e)
continue
else:
for item in items.split(','):
if not item.isdigit():
new_items = new_items + "'" + item.strip() + "', "
else:
new_items = new_items + item.strip() + ", "
##logger.debug('items = %s', items)
new_items = new_items[:-2]
##logger.debug('new_items = %s', new_items)
if new_items:
##logger.debug('Replacing items = %s with new_items = %s', items, new_items)
q = q.replace(r' IN (' + items, r' IN (' + new_items)
return q
def spiciness(t,s):
"""
Return spiciness as defined by Flament (2002).
see : http://www.satlab.hawaii.edu/spice/spice.html
ref : A state variable for characterizing water masses and their
diffusive stability: spiciness. Progress in Oceanography
Volume 54, 2002, Pages 493-501.
test : spice(p=0,T=15,S=33)=0.54458641375
NB : only for valid p = 0
"""
B = numpy.zeros((7,6))
B[1,1] = 0
B[1,2] = 7.7442e-001
B[1,3] = -5.85e-003
B[1,4] = -9.84e-004
B[1,5] = -2.06e-004
B[2,1] = 5.1655e-002
B[2,2] = 2.034e-003
B[2,3] = -2.742e-004
B[2,4] = -8.5e-006
B[2,5] = 1.36e-005
B[3,1] = 6.64783e-003
B[3,2] = -2.4681e-004
B[3,3] = -1.428e-005
B[3,4] = 3.337e-005
B[3,5] = 7.894e-006
B[4,1] = -5.4023e-005
B[4,2] = 7.326e-006
B[4,3] = 7.0036e-006
B[4,4] = -3.0412e-006
B[4,5] = -1.0853e-006
B[5,1] = 3.949e-007
B[5,2] = -3.029e-008
B[5,3] = -3.8209e-007
B[5,4] = 1.0012e-007
B[5,5] = 4.7133e-008
B[6,1] = -6.36e-010
B[6,2] = -1.309e-009
B[6,3] = 6.048e-009
B[6,4] = -1.1409e-009
B[6,5] = -6.676e-010
#
t = numpy.array(t)
s = numpy.array(s)
#
coefs = B[1:7,1:6]
sp = numpy.zeros(t.shape)
ss = s - 35.
bigT = numpy.ones(t.shape)
for i in range(6):
bigS = numpy.ones(t.shape)
for j in range(5):
sp+= coefs[i,j]*bigT*bigS
bigS*= ss
bigT*=t
return sp
def haversine(lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
"""
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = map(math.radians, [lon1, lat1, lon2, lat2])
# haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = math.sin(dlat/2)**2 + math.cos(lat1) * math.cos(lat2) * math.sin(dlon/2)**2
c = 2 * math.asin(math.sqrt(a))
# 6367 km is the radius of the Earth
km = 6367 * c
return km
if __name__ == "__main__":
import doctest
doctest.testmod()
|
gpl-3.0
|
rbtcollins/pip
|
tests/unit/test_options.py
|
22
|
9922
|
import os
import pytest
import pip.baseparser
from pip import main
from pip import cmdoptions
from pip.basecommand import Command
from pip.commands import commands_dict as commands
class FakeCommand(Command):
name = 'fake'
summary = name
def main(self, args):
index_opts = cmdoptions.make_option_group(
cmdoptions.index_group,
self.parser,
)
self.parser.add_option_group(index_opts)
return self.parse_args(args)
class TestOptionPrecedence(object):
"""
Tests for confirming our option precedence:
cli -> environment -> subcommand config -> global config -> option
defaults
"""
def setup(self):
self.environ_before = os.environ.copy()
commands[FakeCommand.name] = FakeCommand
def teardown(self):
os.environ = self.environ_before
commands.pop(FakeCommand.name)
def get_config_section(self, section):
config = {
'global': [('timeout', '-3')],
'fake': [('timeout', '-2')],
}
return config[section]
def get_config_section_global(self, section):
config = {
'global': [('timeout', '-3')],
'fake': [],
}
return config[section]
def test_env_override_default_int(self):
"""
Test that environment variable overrides an int option default.
"""
os.environ['PIP_TIMEOUT'] = '-1'
options, args = main(['fake'])
assert options.timeout == -1
def test_env_override_default_append(self):
"""
Test that environment variable overrides an append option default.
"""
os.environ['PIP_FIND_LINKS'] = 'F1'
options, args = main(['fake'])
assert options.find_links == ['F1']
os.environ['PIP_FIND_LINKS'] = 'F1 F2'
options, args = main(['fake'])
assert options.find_links == ['F1', 'F2']
def test_env_override_default_choice(self):
"""
Test that environment variable overrides a choice option default.
"""
os.environ['PIP_EXISTS_ACTION'] = 'w'
options, args = main(['fake'])
assert options.exists_action == ['w']
os.environ['PIP_EXISTS_ACTION'] = 's w'
options, args = main(['fake'])
assert options.exists_action == ['s', 'w']
def test_env_alias_override_default(self):
"""
When an option has multiple long forms, test that the technique of
using the env variable, "PIP_<long form>" works for all cases.
(e.g. PIP_LOG_FILE and PIP_LOCAL_LOG should all work)
"""
os.environ['PIP_LOG_FILE'] = 'override.log'
options, args = main(['fake'])
assert options.log == 'override.log'
os.environ['PIP_LOCAL_LOG'] = 'override.log'
options, args = main(['fake'])
assert options.log == 'override.log'
def test_cli_override_environment(self):
"""
Test the cli overrides and environment variable
"""
os.environ['PIP_TIMEOUT'] = '-1'
options, args = main(['fake', '--timeout', '-2'])
assert options.timeout == -2
def test_environment_override_config(self, monkeypatch):
"""
Test an environment variable overrides the config file
"""
monkeypatch.setattr(
pip.baseparser.ConfigOptionParser,
"get_config_section",
self.get_config_section,
)
os.environ['PIP_TIMEOUT'] = '-1'
options, args = main(['fake'])
assert options.timeout == -1
def test_commmand_config_override_global_config(self, monkeypatch):
"""
Test that command config overrides global config
"""
monkeypatch.setattr(
pip.baseparser.ConfigOptionParser,
"get_config_section",
self.get_config_section,
)
options, args = main(['fake'])
assert options.timeout == -2
def test_global_config_is_used(self, monkeypatch):
"""
Test that global config is used
"""
monkeypatch.setattr(
pip.baseparser.ConfigOptionParser,
"get_config_section",
self.get_config_section_global,
)
options, args = main(['fake'])
assert options.timeout == -3
class TestOptionsInterspersed(object):
def setup(self):
self.environ_before = os.environ.copy()
commands[FakeCommand.name] = FakeCommand
def teardown(self):
os.environ = self.environ_before
commands.pop(FakeCommand.name)
def test_general_option_after_subcommand(self):
options, args = main(['fake', '--timeout', '-1'])
assert options.timeout == -1
def test_option_after_subcommand_arg(self):
options, args = main(['fake', 'arg', '--timeout', '-1'])
assert options.timeout == -1
def test_additive_before_after_subcommand(self):
options, args = main(['-v', 'fake', '-v'])
assert options.verbose == 2
def test_subcommand_option_before_subcommand_fails(self):
with pytest.raises(SystemExit):
main(['--find-links', 'F1', 'fake'])
class TestGeneralOptions(object):
# the reason to specifically test general options is due to the
# extra processing they receive, and the number of bugs we've had
def setup(self):
self.environ_before = os.environ.copy()
commands[FakeCommand.name] = FakeCommand
def teardown(self):
os.environ = self.environ_before
commands.pop(FakeCommand.name)
def test_require_virtualenv(self):
options1, args1 = main(['--require-virtualenv', 'fake'])
options2, args2 = main(['fake', '--require-virtualenv'])
assert options1.require_venv
assert options2.require_venv
def test_verbose(self):
options1, args1 = main(['--verbose', 'fake'])
options2, args2 = main(['fake', '--verbose'])
assert options1.verbose == options2.verbose == 1
def test_quiet(self):
options1, args1 = main(['--quiet', 'fake'])
options2, args2 = main(['fake', '--quiet'])
assert options1.quiet == options2.quiet == 1
options3, args3 = main(['--quiet', '--quiet', 'fake'])
options4, args4 = main(['fake', '--quiet', '--quiet'])
assert options3.quiet == options4.quiet == 2
options5, args5 = main(['--quiet', '--quiet', '--quiet', 'fake'])
options6, args6 = main(['fake', '--quiet', '--quiet', '--quiet'])
assert options5.quiet == options6.quiet == 3
def test_log(self):
options1, args1 = main(['--log', 'path', 'fake'])
options2, args2 = main(['fake', '--log', 'path'])
assert options1.log == options2.log == 'path'
def test_local_log(self):
options1, args1 = main(['--local-log', 'path', 'fake'])
options2, args2 = main(['fake', '--local-log', 'path'])
assert options1.log == options2.log == 'path'
def test_no_input(self):
options1, args1 = main(['--no-input', 'fake'])
options2, args2 = main(['fake', '--no-input'])
assert options1.no_input
assert options2.no_input
def test_proxy(self):
options1, args1 = main(['--proxy', 'path', 'fake'])
options2, args2 = main(['fake', '--proxy', 'path'])
assert options1.proxy == options2.proxy == 'path'
def test_retries(self):
options1, args1 = main(['--retries', '-1', 'fake'])
options2, args2 = main(['fake', '--retries', '-1'])
assert options1.retries == options2.retries == -1
def test_timeout(self):
options1, args1 = main(['--timeout', '-1', 'fake'])
options2, args2 = main(['fake', '--timeout', '-1'])
assert options1.timeout == options2.timeout == -1
def test_default_vcs(self):
options1, args1 = main(['--default-vcs', 'path', 'fake'])
options2, args2 = main(['fake', '--default-vcs', 'path'])
assert options1.default_vcs == options2.default_vcs == 'path'
def test_skip_requirements_regex(self):
options1, args1 = main(['--skip-requirements-regex', 'path', 'fake'])
options2, args2 = main(['fake', '--skip-requirements-regex', 'path'])
assert options1.skip_requirements_regex == 'path'
assert options2.skip_requirements_regex == 'path'
def test_exists_action(self):
options1, args1 = main(['--exists-action', 'w', 'fake'])
options2, args2 = main(['fake', '--exists-action', 'w'])
assert options1.exists_action == options2.exists_action == ['w']
def test_cert(self):
options1, args1 = main(['--cert', 'path', 'fake'])
options2, args2 = main(['fake', '--cert', 'path'])
assert options1.cert == options2.cert == 'path'
def test_client_cert(self):
options1, args1 = main(['--client-cert', 'path', 'fake'])
options2, args2 = main(['fake', '--client-cert', 'path'])
assert options1.client_cert == options2.client_cert == 'path'
class TestOptionsConfigFiles(object):
def test_venv_config_file_found(self, monkeypatch):
# We only want a dummy object to call the get_config_files method
monkeypatch.setattr(
pip.baseparser.ConfigOptionParser,
'__init__',
lambda self: None,
)
# strict limit on the site_config_files list
monkeypatch.setattr(pip.baseparser, 'site_config_files', ['/a/place'])
# If we are running in a virtualenv and all files appear to exist,
# we should see two config files.
monkeypatch.setattr(
pip.baseparser,
'running_under_virtualenv',
lambda: True,
)
monkeypatch.setattr(os.path, 'exists', lambda filename: True)
cp = pip.baseparser.ConfigOptionParser()
assert len(cp.get_config_files()) == 4
|
mit
|
kxliugang/edx-platform
|
openedx/core/djangoapps/credit/migrations/0005_auto__add_field_creditprovider_provider_url__add_field_creditprovider_.py
|
84
|
5535
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'CreditProvider.provider_url'
db.add_column('credit_creditprovider', 'provider_url',
self.gf('django.db.models.fields.URLField')(default='', unique=True, max_length=255),
keep_default=False)
# Adding field 'CreditProvider.eligibility_duration'
db.add_column('credit_creditprovider', 'eligibility_duration',
self.gf('django.db.models.fields.PositiveIntegerField')(default=0),
keep_default=False)
# Adding field 'CreditProvider.active'
db.add_column('credit_creditprovider', 'active',
self.gf('django.db.models.fields.BooleanField')(default=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'CreditProvider.provider_url'
db.delete_column('credit_creditprovider', 'provider_url')
# Deleting field 'CreditProvider.eligibility_duration'
db.delete_column('credit_creditprovider', 'eligibility_duration')
# Deleting field 'CreditProvider.active'
db.delete_column('credit_creditprovider', 'active')
models = {
'credit.creditcourse': {
'Meta': {'object_name': 'CreditCourse'},
'course_key': ('xmodule_django.models.CourseKeyField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'credit.crediteligibility': {
'Meta': {'unique_together': "(('username', 'course'),)", 'object_name': 'CreditEligibility'},
'course': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'eligibilities'", 'to': "orm['credit.CreditCourse']"}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'provider': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'eligibilities'", 'to': "orm['credit.CreditProvider']"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'credit.creditprovider': {
'Meta': {'object_name': 'CreditProvider'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'eligibility_duration': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'provider_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'provider_url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '255'})
},
'credit.creditrequirement': {
'Meta': {'unique_together': "(('namespace', 'name', 'course'),)", 'object_name': 'CreditRequirement'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'course': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'credit_requirements'", 'to': "orm['credit.CreditCourse']"}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'criteria': ('jsonfield.fields.JSONField', [], {}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'namespace': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'credit.creditrequirementstatus': {
'Meta': {'object_name': 'CreditRequirementStatus'},
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'reason': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'requirement': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'statuses'", 'to': "orm['credit.CreditRequirement']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
}
}
complete_apps = ['credit']
|
agpl-3.0
|
beck/django
|
tests/datetimes/tests.py
|
345
|
5922
|
from __future__ import unicode_literals
import datetime
from unittest import skipIf
from django.test import TestCase, override_settings
from django.utils import timezone
from .models import Article, Category, Comment
try:
import pytz
except ImportError:
pytz = None
class DateTimesTests(TestCase):
def test_related_model_traverse(self):
a1 = Article.objects.create(
title="First one",
pub_date=datetime.datetime(2005, 7, 28, 9, 0, 0),
)
a2 = Article.objects.create(
title="Another one",
pub_date=datetime.datetime(2010, 7, 28, 10, 0, 0),
)
a3 = Article.objects.create(
title="Third one, in the first day",
pub_date=datetime.datetime(2005, 7, 28, 17, 0, 0),
)
a1.comments.create(
text="Im the HULK!",
pub_date=datetime.datetime(2005, 7, 28, 9, 30, 0),
)
a1.comments.create(
text="HULK SMASH!",
pub_date=datetime.datetime(2005, 7, 29, 1, 30, 0),
)
a2.comments.create(
text="LMAO",
pub_date=datetime.datetime(2010, 7, 28, 10, 10, 10),
)
a3.comments.create(
text="+1",
pub_date=datetime.datetime(2005, 8, 29, 10, 10, 10),
)
c = Category.objects.create(name="serious-news")
c.articles.add(a1, a3)
self.assertQuerysetEqual(
Comment.objects.datetimes("article__pub_date", "year"), [
datetime.datetime(2005, 1, 1),
datetime.datetime(2010, 1, 1),
],
lambda d: d,
)
self.assertQuerysetEqual(
Comment.objects.datetimes("article__pub_date", "month"), [
datetime.datetime(2005, 7, 1),
datetime.datetime(2010, 7, 1),
],
lambda d: d
)
self.assertQuerysetEqual(
Comment.objects.datetimes("article__pub_date", "day"), [
datetime.datetime(2005, 7, 28),
datetime.datetime(2010, 7, 28),
],
lambda d: d
)
self.assertQuerysetEqual(
Article.objects.datetimes("comments__pub_date", "day"), [
datetime.datetime(2005, 7, 28),
datetime.datetime(2005, 7, 29),
datetime.datetime(2005, 8, 29),
datetime.datetime(2010, 7, 28),
],
lambda d: d
)
self.assertQuerysetEqual(
Article.objects.datetimes("comments__approval_date", "day"), []
)
self.assertQuerysetEqual(
Category.objects.datetimes("articles__pub_date", "day"), [
datetime.datetime(2005, 7, 28),
],
lambda d: d,
)
@skipIf(pytz is None, "this test requires pytz")
@override_settings(USE_TZ=True)
def test_21432(self):
now = timezone.localtime(timezone.now().replace(microsecond=0))
Article.objects.create(title="First one", pub_date=now)
qs = Article.objects.datetimes('pub_date', 'second')
self.assertEqual(qs[0], now)
def test_datetimes_returns_available_dates_for_given_scope_and_given_field(self):
pub_dates = [
datetime.datetime(2005, 7, 28, 12, 15),
datetime.datetime(2005, 7, 29, 2, 15),
datetime.datetime(2005, 7, 30, 5, 15),
datetime.datetime(2005, 7, 31, 19, 15)]
for i, pub_date in enumerate(pub_dates):
Article(pub_date=pub_date, title='title #{}'.format(i)).save()
self.assertQuerysetEqual(
Article.objects.datetimes('pub_date', 'year'),
["datetime.datetime(2005, 1, 1, 0, 0)"])
self.assertQuerysetEqual(
Article.objects.datetimes('pub_date', 'month'),
["datetime.datetime(2005, 7, 1, 0, 0)"])
self.assertQuerysetEqual(
Article.objects.datetimes('pub_date', 'day'),
["datetime.datetime(2005, 7, 28, 0, 0)",
"datetime.datetime(2005, 7, 29, 0, 0)",
"datetime.datetime(2005, 7, 30, 0, 0)",
"datetime.datetime(2005, 7, 31, 0, 0)"])
self.assertQuerysetEqual(
Article.objects.datetimes('pub_date', 'day', order='ASC'),
["datetime.datetime(2005, 7, 28, 0, 0)",
"datetime.datetime(2005, 7, 29, 0, 0)",
"datetime.datetime(2005, 7, 30, 0, 0)",
"datetime.datetime(2005, 7, 31, 0, 0)"])
self.assertQuerysetEqual(
Article.objects.datetimes('pub_date', 'day', order='DESC'),
["datetime.datetime(2005, 7, 31, 0, 0)",
"datetime.datetime(2005, 7, 30, 0, 0)",
"datetime.datetime(2005, 7, 29, 0, 0)",
"datetime.datetime(2005, 7, 28, 0, 0)"])
def test_datetimes_has_lazy_iterator(self):
pub_dates = [
datetime.datetime(2005, 7, 28, 12, 15),
datetime.datetime(2005, 7, 29, 2, 15),
datetime.datetime(2005, 7, 30, 5, 15),
datetime.datetime(2005, 7, 31, 19, 15)]
for i, pub_date in enumerate(pub_dates):
Article(pub_date=pub_date, title='title #{}'.format(i)).save()
# Use iterator() with datetimes() to return a generator that lazily
# requests each result one at a time, to save memory.
dates = []
with self.assertNumQueries(0):
article_datetimes_iterator = Article.objects.datetimes('pub_date', 'day', order='DESC').iterator()
with self.assertNumQueries(1):
for article in article_datetimes_iterator:
dates.append(article)
self.assertEqual(dates, [
datetime.datetime(2005, 7, 31, 0, 0),
datetime.datetime(2005, 7, 30, 0, 0),
datetime.datetime(2005, 7, 29, 0, 0),
datetime.datetime(2005, 7, 28, 0, 0)])
|
bsd-3-clause
|
svanschalkwyk/datafari
|
windows/python/Lib/difflib.py
|
42
|
82320
|
"""
Module difflib -- helpers for computing deltas between objects.
Function get_close_matches(word, possibilities, n=3, cutoff=0.6):
Use SequenceMatcher to return list of the best "good enough" matches.
Function context_diff(a, b):
For two lists of strings, return a delta in context diff format.
Function ndiff(a, b):
Return a delta: the difference between `a` and `b` (lists of strings).
Function restore(delta, which):
Return one of the two sequences that generated an ndiff delta.
Function unified_diff(a, b):
For two lists of strings, return a delta in unified diff format.
Class SequenceMatcher:
A flexible class for comparing pairs of sequences of any type.
Class Differ:
For producing human-readable deltas from sequences of lines of text.
Class HtmlDiff:
For producing HTML side by side comparison with change highlights.
"""
__all__ = ['get_close_matches', 'ndiff', 'restore', 'SequenceMatcher',
'Differ','IS_CHARACTER_JUNK', 'IS_LINE_JUNK', 'context_diff',
'unified_diff', 'HtmlDiff', 'Match']
import heapq
from collections import namedtuple as _namedtuple
from functools import reduce
Match = _namedtuple('Match', 'a b size')
def _calculate_ratio(matches, length):
if length:
return 2.0 * matches / length
return 1.0
class SequenceMatcher:
"""
SequenceMatcher is a flexible class for comparing pairs of sequences of
any type, so long as the sequence elements are hashable. The basic
algorithm predates, and is a little fancier than, an algorithm
published in the late 1980's by Ratcliff and Obershelp under the
hyperbolic name "gestalt pattern matching". The basic idea is to find
the longest contiguous matching subsequence that contains no "junk"
elements (R-O doesn't address junk). The same idea is then applied
recursively to the pieces of the sequences to the left and to the right
of the matching subsequence. This does not yield minimal edit
sequences, but does tend to yield matches that "look right" to people.
SequenceMatcher tries to compute a "human-friendly diff" between two
sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the
longest *contiguous* & junk-free matching subsequence. That's what
catches peoples' eyes. The Windows(tm) windiff has another interesting
notion, pairing up elements that appear uniquely in each sequence.
That, and the method here, appear to yield more intuitive difference
reports than does diff. This method appears to be the least vulnerable
to synching up on blocks of "junk lines", though (like blank lines in
ordinary text files, or maybe "<P>" lines in HTML files). That may be
because this is the only method of the 3 that has a *concept* of
"junk" <wink>.
Example, comparing two strings, and considering blanks to be "junk":
>>> s = SequenceMatcher(lambda x: x == " ",
... "private Thread currentThread;",
... "private volatile Thread currentThread;")
>>>
.ratio() returns a float in [0, 1], measuring the "similarity" of the
sequences. As a rule of thumb, a .ratio() value over 0.6 means the
sequences are close matches:
>>> print round(s.ratio(), 3)
0.866
>>>
If you're only interested in where the sequences match,
.get_matching_blocks() is handy:
>>> for block in s.get_matching_blocks():
... print "a[%d] and b[%d] match for %d elements" % block
a[0] and b[0] match for 8 elements
a[8] and b[17] match for 21 elements
a[29] and b[38] match for 0 elements
Note that the last tuple returned by .get_matching_blocks() is always a
dummy, (len(a), len(b), 0), and this is the only case in which the last
tuple element (number of elements matched) is 0.
If you want to know how to change the first sequence into the second,
use .get_opcodes():
>>> for opcode in s.get_opcodes():
... print "%6s a[%d:%d] b[%d:%d]" % opcode
equal a[0:8] b[0:8]
insert a[8:8] b[8:17]
equal a[8:29] b[17:38]
See the Differ class for a fancy human-friendly file differencer, which
uses SequenceMatcher both to compare sequences of lines, and to compare
sequences of characters within similar (near-matching) lines.
See also function get_close_matches() in this module, which shows how
simple code building on SequenceMatcher can be used to do useful work.
Timing: Basic R-O is cubic time worst case and quadratic time expected
case. SequenceMatcher is quadratic time for the worst case and has
expected-case behavior dependent in a complicated way on how many
elements the sequences have in common; best case time is linear.
Methods:
__init__(isjunk=None, a='', b='')
Construct a SequenceMatcher.
set_seqs(a, b)
Set the two sequences to be compared.
set_seq1(a)
Set the first sequence to be compared.
set_seq2(b)
Set the second sequence to be compared.
find_longest_match(alo, ahi, blo, bhi)
Find longest matching block in a[alo:ahi] and b[blo:bhi].
get_matching_blocks()
Return list of triples describing matching subsequences.
get_opcodes()
Return list of 5-tuples describing how to turn a into b.
ratio()
Return a measure of the sequences' similarity (float in [0,1]).
quick_ratio()
Return an upper bound on .ratio() relatively quickly.
real_quick_ratio()
Return an upper bound on ratio() very quickly.
"""
def __init__(self, isjunk=None, a='', b='', autojunk=True):
"""Construct a SequenceMatcher.
Optional arg isjunk is None (the default), or a one-argument
function that takes a sequence element and returns true iff the
element is junk. None is equivalent to passing "lambda x: 0", i.e.
no elements are considered to be junk. For example, pass
lambda x: x in " \\t"
if you're comparing lines as sequences of characters, and don't
want to synch up on blanks or hard tabs.
Optional arg a is the first of two sequences to be compared. By
default, an empty string. The elements of a must be hashable. See
also .set_seqs() and .set_seq1().
Optional arg b is the second of two sequences to be compared. By
default, an empty string. The elements of b must be hashable. See
also .set_seqs() and .set_seq2().
Optional arg autojunk should be set to False to disable the
"automatic junk heuristic" that treats popular elements as junk
(see module documentation for more information).
"""
# Members:
# a
# first sequence
# b
# second sequence; differences are computed as "what do
# we need to do to 'a' to change it into 'b'?"
# b2j
# for x in b, b2j[x] is a list of the indices (into b)
# at which x appears; junk elements do not appear
# fullbcount
# for x in b, fullbcount[x] == the number of times x
# appears in b; only materialized if really needed (used
# only for computing quick_ratio())
# matching_blocks
# a list of (i, j, k) triples, where a[i:i+k] == b[j:j+k];
# ascending & non-overlapping in i and in j; terminated by
# a dummy (len(a), len(b), 0) sentinel
# opcodes
# a list of (tag, i1, i2, j1, j2) tuples, where tag is
# one of
# 'replace' a[i1:i2] should be replaced by b[j1:j2]
# 'delete' a[i1:i2] should be deleted
# 'insert' b[j1:j2] should be inserted
# 'equal' a[i1:i2] == b[j1:j2]
# isjunk
# a user-supplied function taking a sequence element and
# returning true iff the element is "junk" -- this has
# subtle but helpful effects on the algorithm, which I'll
# get around to writing up someday <0.9 wink>.
# DON'T USE! Only __chain_b uses this. Use isbjunk.
# isbjunk
# for x in b, isbjunk(x) == isjunk(x) but much faster;
# it's really the __contains__ method of a hidden dict.
# DOES NOT WORK for x in a!
# isbpopular
# for x in b, isbpopular(x) is true iff b is reasonably long
# (at least 200 elements) and x accounts for more than 1 + 1% of
# its elements (when autojunk is enabled).
# DOES NOT WORK for x in a!
self.isjunk = isjunk
self.a = self.b = None
self.autojunk = autojunk
self.set_seqs(a, b)
def set_seqs(self, a, b):
"""Set the two sequences to be compared.
>>> s = SequenceMatcher()
>>> s.set_seqs("abcd", "bcde")
>>> s.ratio()
0.75
"""
self.set_seq1(a)
self.set_seq2(b)
def set_seq1(self, a):
"""Set the first sequence to be compared.
The second sequence to be compared is not changed.
>>> s = SequenceMatcher(None, "abcd", "bcde")
>>> s.ratio()
0.75
>>> s.set_seq1("bcde")
>>> s.ratio()
1.0
>>>
SequenceMatcher computes and caches detailed information about the
second sequence, so if you want to compare one sequence S against
many sequences, use .set_seq2(S) once and call .set_seq1(x)
repeatedly for each of the other sequences.
See also set_seqs() and set_seq2().
"""
if a is self.a:
return
self.a = a
self.matching_blocks = self.opcodes = None
def set_seq2(self, b):
"""Set the second sequence to be compared.
The first sequence to be compared is not changed.
>>> s = SequenceMatcher(None, "abcd", "bcde")
>>> s.ratio()
0.75
>>> s.set_seq2("abcd")
>>> s.ratio()
1.0
>>>
SequenceMatcher computes and caches detailed information about the
second sequence, so if you want to compare one sequence S against
many sequences, use .set_seq2(S) once and call .set_seq1(x)
repeatedly for each of the other sequences.
See also set_seqs() and set_seq1().
"""
if b is self.b:
return
self.b = b
self.matching_blocks = self.opcodes = None
self.fullbcount = None
self.__chain_b()
# For each element x in b, set b2j[x] to a list of the indices in
# b where x appears; the indices are in increasing order; note that
# the number of times x appears in b is len(b2j[x]) ...
# when self.isjunk is defined, junk elements don't show up in this
# map at all, which stops the central find_longest_match method
# from starting any matching block at a junk element ...
# also creates the fast isbjunk function ...
# b2j also does not contain entries for "popular" elements, meaning
# elements that account for more than 1 + 1% of the total elements, and
# when the sequence is reasonably large (>= 200 elements); this can
# be viewed as an adaptive notion of semi-junk, and yields an enormous
# speedup when, e.g., comparing program files with hundreds of
# instances of "return NULL;" ...
# note that this is only called when b changes; so for cross-product
# kinds of matches, it's best to call set_seq2 once, then set_seq1
# repeatedly
def __chain_b(self):
# Because isjunk is a user-defined (not C) function, and we test
# for junk a LOT, it's important to minimize the number of calls.
# Before the tricks described here, __chain_b was by far the most
# time-consuming routine in the whole module! If anyone sees
# Jim Roskind, thank him again for profile.py -- I never would
# have guessed that.
# The first trick is to build b2j ignoring the possibility
# of junk. I.e., we don't call isjunk at all yet. Throwing
# out the junk later is much cheaper than building b2j "right"
# from the start.
b = self.b
self.b2j = b2j = {}
for i, elt in enumerate(b):
indices = b2j.setdefault(elt, [])
indices.append(i)
# Purge junk elements
junk = set()
isjunk = self.isjunk
if isjunk:
for elt in list(b2j.keys()): # using list() since b2j is modified
if isjunk(elt):
junk.add(elt)
del b2j[elt]
# Purge popular elements that are not junk
popular = set()
n = len(b)
if self.autojunk and n >= 200:
ntest = n // 100 + 1
for elt, idxs in list(b2j.items()):
if len(idxs) > ntest:
popular.add(elt)
del b2j[elt]
# Now for x in b, isjunk(x) == x in junk, but the latter is much faster.
# Sicne the number of *unique* junk elements is probably small, the
# memory burden of keeping this set alive is likely trivial compared to
# the size of b2j.
self.isbjunk = junk.__contains__
self.isbpopular = popular.__contains__
def find_longest_match(self, alo, ahi, blo, bhi):
"""Find longest matching block in a[alo:ahi] and b[blo:bhi].
If isjunk is not defined:
Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where
alo <= i <= i+k <= ahi
blo <= j <= j+k <= bhi
and for all (i',j',k') meeting those conditions,
k >= k'
i <= i'
and if i == i', j <= j'
In other words, of all maximal matching blocks, return one that
starts earliest in a, and of all those maximal matching blocks that
start earliest in a, return the one that starts earliest in b.
>>> s = SequenceMatcher(None, " abcd", "abcd abcd")
>>> s.find_longest_match(0, 5, 0, 9)
Match(a=0, b=4, size=5)
If isjunk is defined, first the longest matching block is
determined as above, but with the additional restriction that no
junk element appears in the block. Then that block is extended as
far as possible by matching (only) junk elements on both sides. So
the resulting block never matches on junk except as identical junk
happens to be adjacent to an "interesting" match.
Here's the same example as before, but considering blanks to be
junk. That prevents " abcd" from matching the " abcd" at the tail
end of the second sequence directly. Instead only the "abcd" can
match, and matches the leftmost "abcd" in the second sequence:
>>> s = SequenceMatcher(lambda x: x==" ", " abcd", "abcd abcd")
>>> s.find_longest_match(0, 5, 0, 9)
Match(a=1, b=0, size=4)
If no blocks match, return (alo, blo, 0).
>>> s = SequenceMatcher(None, "ab", "c")
>>> s.find_longest_match(0, 2, 0, 1)
Match(a=0, b=0, size=0)
"""
# CAUTION: stripping common prefix or suffix would be incorrect.
# E.g.,
# ab
# acab
# Longest matching block is "ab", but if common prefix is
# stripped, it's "a" (tied with "b"). UNIX(tm) diff does so
# strip, so ends up claiming that ab is changed to acab by
# inserting "ca" in the middle. That's minimal but unintuitive:
# "it's obvious" that someone inserted "ac" at the front.
# Windiff ends up at the same place as diff, but by pairing up
# the unique 'b's and then matching the first two 'a's.
a, b, b2j, isbjunk = self.a, self.b, self.b2j, self.isbjunk
besti, bestj, bestsize = alo, blo, 0
# find longest junk-free match
# during an iteration of the loop, j2len[j] = length of longest
# junk-free match ending with a[i-1] and b[j]
j2len = {}
nothing = []
for i in xrange(alo, ahi):
# look at all instances of a[i] in b; note that because
# b2j has no junk keys, the loop is skipped if a[i] is junk
j2lenget = j2len.get
newj2len = {}
for j in b2j.get(a[i], nothing):
# a[i] matches b[j]
if j < blo:
continue
if j >= bhi:
break
k = newj2len[j] = j2lenget(j-1, 0) + 1
if k > bestsize:
besti, bestj, bestsize = i-k+1, j-k+1, k
j2len = newj2len
# Extend the best by non-junk elements on each end. In particular,
# "popular" non-junk elements aren't in b2j, which greatly speeds
# the inner loop above, but also means "the best" match so far
# doesn't contain any junk *or* popular non-junk elements.
while besti > alo and bestj > blo and \
not isbjunk(b[bestj-1]) and \
a[besti-1] == b[bestj-1]:
besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
while besti+bestsize < ahi and bestj+bestsize < bhi and \
not isbjunk(b[bestj+bestsize]) and \
a[besti+bestsize] == b[bestj+bestsize]:
bestsize += 1
# Now that we have a wholly interesting match (albeit possibly
# empty!), we may as well suck up the matching junk on each
# side of it too. Can't think of a good reason not to, and it
# saves post-processing the (possibly considerable) expense of
# figuring out what to do with it. In the case of an empty
# interesting match, this is clearly the right thing to do,
# because no other kind of match is possible in the regions.
while besti > alo and bestj > blo and \
isbjunk(b[bestj-1]) and \
a[besti-1] == b[bestj-1]:
besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
while besti+bestsize < ahi and bestj+bestsize < bhi and \
isbjunk(b[bestj+bestsize]) and \
a[besti+bestsize] == b[bestj+bestsize]:
bestsize = bestsize + 1
return Match(besti, bestj, bestsize)
def get_matching_blocks(self):
"""Return list of triples describing matching subsequences.
Each triple is of the form (i, j, n), and means that
a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in
i and in j. New in Python 2.5, it's also guaranteed that if
(i, j, n) and (i', j', n') are adjacent triples in the list, and
the second is not the last triple in the list, then i+n != i' or
j+n != j'. IOW, adjacent triples never describe adjacent equal
blocks.
The last triple is a dummy, (len(a), len(b), 0), and is the only
triple with n==0.
>>> s = SequenceMatcher(None, "abxcd", "abcd")
>>> s.get_matching_blocks()
[Match(a=0, b=0, size=2), Match(a=3, b=2, size=2), Match(a=5, b=4, size=0)]
"""
if self.matching_blocks is not None:
return self.matching_blocks
la, lb = len(self.a), len(self.b)
# This is most naturally expressed as a recursive algorithm, but
# at least one user bumped into extreme use cases that exceeded
# the recursion limit on their box. So, now we maintain a list
# ('queue`) of blocks we still need to look at, and append partial
# results to `matching_blocks` in a loop; the matches are sorted
# at the end.
queue = [(0, la, 0, lb)]
matching_blocks = []
while queue:
alo, ahi, blo, bhi = queue.pop()
i, j, k = x = self.find_longest_match(alo, ahi, blo, bhi)
# a[alo:i] vs b[blo:j] unknown
# a[i:i+k] same as b[j:j+k]
# a[i+k:ahi] vs b[j+k:bhi] unknown
if k: # if k is 0, there was no matching block
matching_blocks.append(x)
if alo < i and blo < j:
queue.append((alo, i, blo, j))
if i+k < ahi and j+k < bhi:
queue.append((i+k, ahi, j+k, bhi))
matching_blocks.sort()
# It's possible that we have adjacent equal blocks in the
# matching_blocks list now. Starting with 2.5, this code was added
# to collapse them.
i1 = j1 = k1 = 0
non_adjacent = []
for i2, j2, k2 in matching_blocks:
# Is this block adjacent to i1, j1, k1?
if i1 + k1 == i2 and j1 + k1 == j2:
# Yes, so collapse them -- this just increases the length of
# the first block by the length of the second, and the first
# block so lengthened remains the block to compare against.
k1 += k2
else:
# Not adjacent. Remember the first block (k1==0 means it's
# the dummy we started with), and make the second block the
# new block to compare against.
if k1:
non_adjacent.append((i1, j1, k1))
i1, j1, k1 = i2, j2, k2
if k1:
non_adjacent.append((i1, j1, k1))
non_adjacent.append( (la, lb, 0) )
self.matching_blocks = map(Match._make, non_adjacent)
return self.matching_blocks
def get_opcodes(self):
"""Return list of 5-tuples describing how to turn a into b.
Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple
has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the
tuple preceding it, and likewise for j1 == the previous j2.
The tags are strings, with these meanings:
'replace': a[i1:i2] should be replaced by b[j1:j2]
'delete': a[i1:i2] should be deleted.
Note that j1==j2 in this case.
'insert': b[j1:j2] should be inserted at a[i1:i1].
Note that i1==i2 in this case.
'equal': a[i1:i2] == b[j1:j2]
>>> a = "qabxcd"
>>> b = "abycdf"
>>> s = SequenceMatcher(None, a, b)
>>> for tag, i1, i2, j1, j2 in s.get_opcodes():
... print ("%7s a[%d:%d] (%s) b[%d:%d] (%s)" %
... (tag, i1, i2, a[i1:i2], j1, j2, b[j1:j2]))
delete a[0:1] (q) b[0:0] ()
equal a[1:3] (ab) b[0:2] (ab)
replace a[3:4] (x) b[2:3] (y)
equal a[4:6] (cd) b[3:5] (cd)
insert a[6:6] () b[5:6] (f)
"""
if self.opcodes is not None:
return self.opcodes
i = j = 0
self.opcodes = answer = []
for ai, bj, size in self.get_matching_blocks():
# invariant: we've pumped out correct diffs to change
# a[:i] into b[:j], and the next matching block is
# a[ai:ai+size] == b[bj:bj+size]. So we need to pump
# out a diff to change a[i:ai] into b[j:bj], pump out
# the matching block, and move (i,j) beyond the match
tag = ''
if i < ai and j < bj:
tag = 'replace'
elif i < ai:
tag = 'delete'
elif j < bj:
tag = 'insert'
if tag:
answer.append( (tag, i, ai, j, bj) )
i, j = ai+size, bj+size
# the list of matching blocks is terminated by a
# sentinel with size 0
if size:
answer.append( ('equal', ai, i, bj, j) )
return answer
def get_grouped_opcodes(self, n=3):
""" Isolate change clusters by eliminating ranges with no changes.
Return a generator of groups with up to n lines of context.
Each group is in the same format as returned by get_opcodes().
>>> from pprint import pprint
>>> a = map(str, range(1,40))
>>> b = a[:]
>>> b[8:8] = ['i'] # Make an insertion
>>> b[20] += 'x' # Make a replacement
>>> b[23:28] = [] # Make a deletion
>>> b[30] += 'y' # Make another replacement
>>> pprint(list(SequenceMatcher(None,a,b).get_grouped_opcodes()))
[[('equal', 5, 8, 5, 8), ('insert', 8, 8, 8, 9), ('equal', 8, 11, 9, 12)],
[('equal', 16, 19, 17, 20),
('replace', 19, 20, 20, 21),
('equal', 20, 22, 21, 23),
('delete', 22, 27, 23, 23),
('equal', 27, 30, 23, 26)],
[('equal', 31, 34, 27, 30),
('replace', 34, 35, 30, 31),
('equal', 35, 38, 31, 34)]]
"""
codes = self.get_opcodes()
if not codes:
codes = [("equal", 0, 1, 0, 1)]
# Fixup leading and trailing groups if they show no changes.
if codes[0][0] == 'equal':
tag, i1, i2, j1, j2 = codes[0]
codes[0] = tag, max(i1, i2-n), i2, max(j1, j2-n), j2
if codes[-1][0] == 'equal':
tag, i1, i2, j1, j2 = codes[-1]
codes[-1] = tag, i1, min(i2, i1+n), j1, min(j2, j1+n)
nn = n + n
group = []
for tag, i1, i2, j1, j2 in codes:
# End the current group and start a new one whenever
# there is a large range with no changes.
if tag == 'equal' and i2-i1 > nn:
group.append((tag, i1, min(i2, i1+n), j1, min(j2, j1+n)))
yield group
group = []
i1, j1 = max(i1, i2-n), max(j1, j2-n)
group.append((tag, i1, i2, j1 ,j2))
if group and not (len(group)==1 and group[0][0] == 'equal'):
yield group
def ratio(self):
"""Return a measure of the sequences' similarity (float in [0,1]).
Where T is the total number of elements in both sequences, and
M is the number of matches, this is 2.0*M / T.
Note that this is 1 if the sequences are identical, and 0 if
they have nothing in common.
.ratio() is expensive to compute if you haven't already computed
.get_matching_blocks() or .get_opcodes(), in which case you may
want to try .quick_ratio() or .real_quick_ratio() first to get an
upper bound.
>>> s = SequenceMatcher(None, "abcd", "bcde")
>>> s.ratio()
0.75
>>> s.quick_ratio()
0.75
>>> s.real_quick_ratio()
1.0
"""
matches = reduce(lambda sum, triple: sum + triple[-1],
self.get_matching_blocks(), 0)
return _calculate_ratio(matches, len(self.a) + len(self.b))
def quick_ratio(self):
"""Return an upper bound on ratio() relatively quickly.
This isn't defined beyond that it is an upper bound on .ratio(), and
is faster to compute.
"""
# viewing a and b as multisets, set matches to the cardinality
# of their intersection; this counts the number of matches
# without regard to order, so is clearly an upper bound
if self.fullbcount is None:
self.fullbcount = fullbcount = {}
for elt in self.b:
fullbcount[elt] = fullbcount.get(elt, 0) + 1
fullbcount = self.fullbcount
# avail[x] is the number of times x appears in 'b' less the
# number of times we've seen it in 'a' so far ... kinda
avail = {}
availhas, matches = avail.__contains__, 0
for elt in self.a:
if availhas(elt):
numb = avail[elt]
else:
numb = fullbcount.get(elt, 0)
avail[elt] = numb - 1
if numb > 0:
matches = matches + 1
return _calculate_ratio(matches, len(self.a) + len(self.b))
def real_quick_ratio(self):
"""Return an upper bound on ratio() very quickly.
This isn't defined beyond that it is an upper bound on .ratio(), and
is faster to compute than either .ratio() or .quick_ratio().
"""
la, lb = len(self.a), len(self.b)
# can't have more matches than the number of elements in the
# shorter sequence
return _calculate_ratio(min(la, lb), la + lb)
def get_close_matches(word, possibilities, n=3, cutoff=0.6):
"""Use SequenceMatcher to return list of the best "good enough" matches.
word is a sequence for which close matches are desired (typically a
string).
possibilities is a list of sequences against which to match word
(typically a list of strings).
Optional arg n (default 3) is the maximum number of close matches to
return. n must be > 0.
Optional arg cutoff (default 0.6) is a float in [0, 1]. Possibilities
that don't score at least that similar to word are ignored.
The best (no more than n) matches among the possibilities are returned
in a list, sorted by similarity score, most similar first.
>>> get_close_matches("appel", ["ape", "apple", "peach", "puppy"])
['apple', 'ape']
>>> import keyword as _keyword
>>> get_close_matches("wheel", _keyword.kwlist)
['while']
>>> get_close_matches("apple", _keyword.kwlist)
[]
>>> get_close_matches("accept", _keyword.kwlist)
['except']
"""
if not n > 0:
raise ValueError("n must be > 0: %r" % (n,))
if not 0.0 <= cutoff <= 1.0:
raise ValueError("cutoff must be in [0.0, 1.0]: %r" % (cutoff,))
result = []
s = SequenceMatcher()
s.set_seq2(word)
for x in possibilities:
s.set_seq1(x)
if s.real_quick_ratio() >= cutoff and \
s.quick_ratio() >= cutoff and \
s.ratio() >= cutoff:
result.append((s.ratio(), x))
# Move the best scorers to head of list
result = heapq.nlargest(n, result)
# Strip scores for the best n matches
return [x for score, x in result]
def _count_leading(line, ch):
"""
Return number of `ch` characters at the start of `line`.
Example:
>>> _count_leading(' abc', ' ')
3
"""
i, n = 0, len(line)
while i < n and line[i] == ch:
i += 1
return i
class Differ:
r"""
Differ is a class for comparing sequences of lines of text, and
producing human-readable differences or deltas. Differ uses
SequenceMatcher both to compare sequences of lines, and to compare
sequences of characters within similar (near-matching) lines.
Each line of a Differ delta begins with a two-letter code:
'- ' line unique to sequence 1
'+ ' line unique to sequence 2
' ' line common to both sequences
'? ' line not present in either input sequence
Lines beginning with '? ' attempt to guide the eye to intraline
differences, and were not present in either input sequence. These lines
can be confusing if the sequences contain tab characters.
Note that Differ makes no claim to produce a *minimal* diff. To the
contrary, minimal diffs are often counter-intuitive, because they synch
up anywhere possible, sometimes accidental matches 100 pages apart.
Restricting synch points to contiguous matches preserves some notion of
locality, at the occasional cost of producing a longer diff.
Example: Comparing two texts.
First we set up the texts, sequences of individual single-line strings
ending with newlines (such sequences can also be obtained from the
`readlines()` method of file-like objects):
>>> text1 = ''' 1. Beautiful is better than ugly.
... 2. Explicit is better than implicit.
... 3. Simple is better than complex.
... 4. Complex is better than complicated.
... '''.splitlines(1)
>>> len(text1)
4
>>> text1[0][-1]
'\n'
>>> text2 = ''' 1. Beautiful is better than ugly.
... 3. Simple is better than complex.
... 4. Complicated is better than complex.
... 5. Flat is better than nested.
... '''.splitlines(1)
Next we instantiate a Differ object:
>>> d = Differ()
Note that when instantiating a Differ object we may pass functions to
filter out line and character 'junk'. See Differ.__init__ for details.
Finally, we compare the two:
>>> result = list(d.compare(text1, text2))
'result' is a list of strings, so let's pretty-print it:
>>> from pprint import pprint as _pprint
>>> _pprint(result)
[' 1. Beautiful is better than ugly.\n',
'- 2. Explicit is better than implicit.\n',
'- 3. Simple is better than complex.\n',
'+ 3. Simple is better than complex.\n',
'? ++\n',
'- 4. Complex is better than complicated.\n',
'? ^ ---- ^\n',
'+ 4. Complicated is better than complex.\n',
'? ++++ ^ ^\n',
'+ 5. Flat is better than nested.\n']
As a single multi-line string it looks like this:
>>> print ''.join(result),
1. Beautiful is better than ugly.
- 2. Explicit is better than implicit.
- 3. Simple is better than complex.
+ 3. Simple is better than complex.
? ++
- 4. Complex is better than complicated.
? ^ ---- ^
+ 4. Complicated is better than complex.
? ++++ ^ ^
+ 5. Flat is better than nested.
Methods:
__init__(linejunk=None, charjunk=None)
Construct a text differencer, with optional filters.
compare(a, b)
Compare two sequences of lines; generate the resulting delta.
"""
def __init__(self, linejunk=None, charjunk=None):
"""
Construct a text differencer, with optional filters.
The two optional keyword parameters are for filter functions:
- `linejunk`: A function that should accept a single string argument,
and return true iff the string is junk. The module-level function
`IS_LINE_JUNK` may be used to filter out lines without visible
characters, except for at most one splat ('#'). It is recommended
to leave linejunk None; as of Python 2.3, the underlying
SequenceMatcher class has grown an adaptive notion of "noise" lines
that's better than any static definition the author has ever been
able to craft.
- `charjunk`: A function that should accept a string of length 1. The
module-level function `IS_CHARACTER_JUNK` may be used to filter out
whitespace characters (a blank or tab; **note**: bad idea to include
newline in this!). Use of IS_CHARACTER_JUNK is recommended.
"""
self.linejunk = linejunk
self.charjunk = charjunk
def compare(self, a, b):
r"""
Compare two sequences of lines; generate the resulting delta.
Each sequence must contain individual single-line strings ending with
newlines. Such sequences can be obtained from the `readlines()` method
of file-like objects. The delta generated also consists of newline-
terminated strings, ready to be printed as-is via the writeline()
method of a file-like object.
Example:
>>> print ''.join(Differ().compare('one\ntwo\nthree\n'.splitlines(1),
... 'ore\ntree\nemu\n'.splitlines(1))),
- one
? ^
+ ore
? ^
- two
- three
? -
+ tree
+ emu
"""
cruncher = SequenceMatcher(self.linejunk, a, b)
for tag, alo, ahi, blo, bhi in cruncher.get_opcodes():
if tag == 'replace':
g = self._fancy_replace(a, alo, ahi, b, blo, bhi)
elif tag == 'delete':
g = self._dump('-', a, alo, ahi)
elif tag == 'insert':
g = self._dump('+', b, blo, bhi)
elif tag == 'equal':
g = self._dump(' ', a, alo, ahi)
else:
raise ValueError, 'unknown tag %r' % (tag,)
for line in g:
yield line
def _dump(self, tag, x, lo, hi):
"""Generate comparison results for a same-tagged range."""
for i in xrange(lo, hi):
yield '%s %s' % (tag, x[i])
def _plain_replace(self, a, alo, ahi, b, blo, bhi):
assert alo < ahi and blo < bhi
# dump the shorter block first -- reduces the burden on short-term
# memory if the blocks are of very different sizes
if bhi - blo < ahi - alo:
first = self._dump('+', b, blo, bhi)
second = self._dump('-', a, alo, ahi)
else:
first = self._dump('-', a, alo, ahi)
second = self._dump('+', b, blo, bhi)
for g in first, second:
for line in g:
yield line
def _fancy_replace(self, a, alo, ahi, b, blo, bhi):
r"""
When replacing one block of lines with another, search the blocks
for *similar* lines; the best-matching pair (if any) is used as a
synch point, and intraline difference marking is done on the
similar pair. Lots of work, but often worth it.
Example:
>>> d = Differ()
>>> results = d._fancy_replace(['abcDefghiJkl\n'], 0, 1,
... ['abcdefGhijkl\n'], 0, 1)
>>> print ''.join(results),
- abcDefghiJkl
? ^ ^ ^
+ abcdefGhijkl
? ^ ^ ^
"""
# don't synch up unless the lines have a similarity score of at
# least cutoff; best_ratio tracks the best score seen so far
best_ratio, cutoff = 0.74, 0.75
cruncher = SequenceMatcher(self.charjunk)
eqi, eqj = None, None # 1st indices of equal lines (if any)
# search for the pair that matches best without being identical
# (identical lines must be junk lines, & we don't want to synch up
# on junk -- unless we have to)
for j in xrange(blo, bhi):
bj = b[j]
cruncher.set_seq2(bj)
for i in xrange(alo, ahi):
ai = a[i]
if ai == bj:
if eqi is None:
eqi, eqj = i, j
continue
cruncher.set_seq1(ai)
# computing similarity is expensive, so use the quick
# upper bounds first -- have seen this speed up messy
# compares by a factor of 3.
# note that ratio() is only expensive to compute the first
# time it's called on a sequence pair; the expensive part
# of the computation is cached by cruncher
if cruncher.real_quick_ratio() > best_ratio and \
cruncher.quick_ratio() > best_ratio and \
cruncher.ratio() > best_ratio:
best_ratio, best_i, best_j = cruncher.ratio(), i, j
if best_ratio < cutoff:
# no non-identical "pretty close" pair
if eqi is None:
# no identical pair either -- treat it as a straight replace
for line in self._plain_replace(a, alo, ahi, b, blo, bhi):
yield line
return
# no close pair, but an identical pair -- synch up on that
best_i, best_j, best_ratio = eqi, eqj, 1.0
else:
# there's a close pair, so forget the identical pair (if any)
eqi = None
# a[best_i] very similar to b[best_j]; eqi is None iff they're not
# identical
# pump out diffs from before the synch point
for line in self._fancy_helper(a, alo, best_i, b, blo, best_j):
yield line
# do intraline marking on the synch pair
aelt, belt = a[best_i], b[best_j]
if eqi is None:
# pump out a '-', '?', '+', '?' quad for the synched lines
atags = btags = ""
cruncher.set_seqs(aelt, belt)
for tag, ai1, ai2, bj1, bj2 in cruncher.get_opcodes():
la, lb = ai2 - ai1, bj2 - bj1
if tag == 'replace':
atags += '^' * la
btags += '^' * lb
elif tag == 'delete':
atags += '-' * la
elif tag == 'insert':
btags += '+' * lb
elif tag == 'equal':
atags += ' ' * la
btags += ' ' * lb
else:
raise ValueError, 'unknown tag %r' % (tag,)
for line in self._qformat(aelt, belt, atags, btags):
yield line
else:
# the synch pair is identical
yield ' ' + aelt
# pump out diffs from after the synch point
for line in self._fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi):
yield line
def _fancy_helper(self, a, alo, ahi, b, blo, bhi):
g = []
if alo < ahi:
if blo < bhi:
g = self._fancy_replace(a, alo, ahi, b, blo, bhi)
else:
g = self._dump('-', a, alo, ahi)
elif blo < bhi:
g = self._dump('+', b, blo, bhi)
for line in g:
yield line
def _qformat(self, aline, bline, atags, btags):
r"""
Format "?" output and deal with leading tabs.
Example:
>>> d = Differ()
>>> results = d._qformat('\tabcDefghiJkl\n', '\tabcdefGhijkl\n',
... ' ^ ^ ^ ', ' ^ ^ ^ ')
>>> for line in results: print repr(line)
...
'- \tabcDefghiJkl\n'
'? \t ^ ^ ^\n'
'+ \tabcdefGhijkl\n'
'? \t ^ ^ ^\n'
"""
# Can hurt, but will probably help most of the time.
common = min(_count_leading(aline, "\t"),
_count_leading(bline, "\t"))
common = min(common, _count_leading(atags[:common], " "))
common = min(common, _count_leading(btags[:common], " "))
atags = atags[common:].rstrip()
btags = btags[common:].rstrip()
yield "- " + aline
if atags:
yield "? %s%s\n" % ("\t" * common, atags)
yield "+ " + bline
if btags:
yield "? %s%s\n" % ("\t" * common, btags)
# With respect to junk, an earlier version of ndiff simply refused to
# *start* a match with a junk element. The result was cases like this:
# before: private Thread currentThread;
# after: private volatile Thread currentThread;
# If you consider whitespace to be junk, the longest contiguous match
# not starting with junk is "e Thread currentThread". So ndiff reported
# that "e volatil" was inserted between the 't' and the 'e' in "private".
# While an accurate view, to people that's absurd. The current version
# looks for matching blocks that are entirely junk-free, then extends the
# longest one of those as far as possible but only with matching junk.
# So now "currentThread" is matched, then extended to suck up the
# preceding blank; then "private" is matched, and extended to suck up the
# following blank; then "Thread" is matched; and finally ndiff reports
# that "volatile " was inserted before "Thread". The only quibble
# remaining is that perhaps it was really the case that " volatile"
# was inserted after "private". I can live with that <wink>.
import re
def IS_LINE_JUNK(line, pat=re.compile(r"\s*#?\s*$").match):
r"""
Return 1 for ignorable line: iff `line` is blank or contains a single '#'.
Examples:
>>> IS_LINE_JUNK('\n')
True
>>> IS_LINE_JUNK(' # \n')
True
>>> IS_LINE_JUNK('hello\n')
False
"""
return pat(line) is not None
def IS_CHARACTER_JUNK(ch, ws=" \t"):
r"""
Return 1 for ignorable character: iff `ch` is a space or tab.
Examples:
>>> IS_CHARACTER_JUNK(' ')
True
>>> IS_CHARACTER_JUNK('\t')
True
>>> IS_CHARACTER_JUNK('\n')
False
>>> IS_CHARACTER_JUNK('x')
False
"""
return ch in ws
########################################################################
### Unified Diff
########################################################################
def _format_range_unified(start, stop):
'Convert range to the "ed" format'
# Per the diff spec at http://www.unix.org/single_unix_specification/
beginning = start + 1 # lines start numbering with one
length = stop - start
if length == 1:
return '{}'.format(beginning)
if not length:
beginning -= 1 # empty ranges begin at line just before the range
return '{},{}'.format(beginning, length)
def unified_diff(a, b, fromfile='', tofile='', fromfiledate='',
tofiledate='', n=3, lineterm='\n'):
r"""
Compare two sequences of lines; generate the delta as a unified diff.
Unified diffs are a compact way of showing line changes and a few
lines of context. The number of context lines is set by 'n' which
defaults to three.
By default, the diff control lines (those with ---, +++, or @@) are
created with a trailing newline. This is helpful so that inputs
created from file.readlines() result in diffs that are suitable for
file.writelines() since both the inputs and outputs have trailing
newlines.
For inputs that do not have trailing newlines, set the lineterm
argument to "" so that the output will be uniformly newline free.
The unidiff format normally has a header for filenames and modification
times. Any or all of these may be specified using strings for
'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'.
The modification times are normally expressed in the ISO 8601 format.
Example:
>>> for line in unified_diff('one two three four'.split(),
... 'zero one tree four'.split(), 'Original', 'Current',
... '2005-01-26 23:30:50', '2010-04-02 10:20:52',
... lineterm=''):
... print line # doctest: +NORMALIZE_WHITESPACE
--- Original 2005-01-26 23:30:50
+++ Current 2010-04-02 10:20:52
@@ -1,4 +1,4 @@
+zero
one
-two
-three
+tree
four
"""
started = False
for group in SequenceMatcher(None,a,b).get_grouped_opcodes(n):
if not started:
started = True
fromdate = '\t{}'.format(fromfiledate) if fromfiledate else ''
todate = '\t{}'.format(tofiledate) if tofiledate else ''
yield '--- {}{}{}'.format(fromfile, fromdate, lineterm)
yield '+++ {}{}{}'.format(tofile, todate, lineterm)
first, last = group[0], group[-1]
file1_range = _format_range_unified(first[1], last[2])
file2_range = _format_range_unified(first[3], last[4])
yield '@@ -{} +{} @@{}'.format(file1_range, file2_range, lineterm)
for tag, i1, i2, j1, j2 in group:
if tag == 'equal':
for line in a[i1:i2]:
yield ' ' + line
continue
if tag in ('replace', 'delete'):
for line in a[i1:i2]:
yield '-' + line
if tag in ('replace', 'insert'):
for line in b[j1:j2]:
yield '+' + line
########################################################################
### Context Diff
########################################################################
def _format_range_context(start, stop):
'Convert range to the "ed" format'
# Per the diff spec at http://www.unix.org/single_unix_specification/
beginning = start + 1 # lines start numbering with one
length = stop - start
if not length:
beginning -= 1 # empty ranges begin at line just before the range
if length <= 1:
return '{}'.format(beginning)
return '{},{}'.format(beginning, beginning + length - 1)
# See http://www.unix.org/single_unix_specification/
def context_diff(a, b, fromfile='', tofile='',
fromfiledate='', tofiledate='', n=3, lineterm='\n'):
r"""
Compare two sequences of lines; generate the delta as a context diff.
Context diffs are a compact way of showing line changes and a few
lines of context. The number of context lines is set by 'n' which
defaults to three.
By default, the diff control lines (those with *** or ---) are
created with a trailing newline. This is helpful so that inputs
created from file.readlines() result in diffs that are suitable for
file.writelines() since both the inputs and outputs have trailing
newlines.
For inputs that do not have trailing newlines, set the lineterm
argument to "" so that the output will be uniformly newline free.
The context diff format normally has a header for filenames and
modification times. Any or all of these may be specified using
strings for 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'.
The modification times are normally expressed in the ISO 8601 format.
If not specified, the strings default to blanks.
Example:
>>> print ''.join(context_diff('one\ntwo\nthree\nfour\n'.splitlines(1),
... 'zero\none\ntree\nfour\n'.splitlines(1), 'Original', 'Current')),
*** Original
--- Current
***************
*** 1,4 ****
one
! two
! three
four
--- 1,4 ----
+ zero
one
! tree
four
"""
prefix = dict(insert='+ ', delete='- ', replace='! ', equal=' ')
started = False
for group in SequenceMatcher(None,a,b).get_grouped_opcodes(n):
if not started:
started = True
fromdate = '\t{}'.format(fromfiledate) if fromfiledate else ''
todate = '\t{}'.format(tofiledate) if tofiledate else ''
yield '*** {}{}{}'.format(fromfile, fromdate, lineterm)
yield '--- {}{}{}'.format(tofile, todate, lineterm)
first, last = group[0], group[-1]
yield '***************' + lineterm
file1_range = _format_range_context(first[1], last[2])
yield '*** {} ****{}'.format(file1_range, lineterm)
if any(tag in ('replace', 'delete') for tag, _, _, _, _ in group):
for tag, i1, i2, _, _ in group:
if tag != 'insert':
for line in a[i1:i2]:
yield prefix[tag] + line
file2_range = _format_range_context(first[3], last[4])
yield '--- {} ----{}'.format(file2_range, lineterm)
if any(tag in ('replace', 'insert') for tag, _, _, _, _ in group):
for tag, _, _, j1, j2 in group:
if tag != 'delete':
for line in b[j1:j2]:
yield prefix[tag] + line
def ndiff(a, b, linejunk=None, charjunk=IS_CHARACTER_JUNK):
r"""
Compare `a` and `b` (lists of strings); return a `Differ`-style delta.
Optional keyword parameters `linejunk` and `charjunk` are for filter
functions (or None):
- linejunk: A function that should accept a single string argument, and
return true iff the string is junk. The default is None, and is
recommended; as of Python 2.3, an adaptive notion of "noise" lines is
used that does a good job on its own.
- charjunk: A function that should accept a string of length 1. The
default is module-level function IS_CHARACTER_JUNK, which filters out
whitespace characters (a blank or tab; note: bad idea to include newline
in this!).
Tools/scripts/ndiff.py is a command-line front-end to this function.
Example:
>>> diff = ndiff('one\ntwo\nthree\n'.splitlines(1),
... 'ore\ntree\nemu\n'.splitlines(1))
>>> print ''.join(diff),
- one
? ^
+ ore
? ^
- two
- three
? -
+ tree
+ emu
"""
return Differ(linejunk, charjunk).compare(a, b)
def _mdiff(fromlines, tolines, context=None, linejunk=None,
charjunk=IS_CHARACTER_JUNK):
r"""Returns generator yielding marked up from/to side by side differences.
Arguments:
fromlines -- list of text lines to compared to tolines
tolines -- list of text lines to be compared to fromlines
context -- number of context lines to display on each side of difference,
if None, all from/to text lines will be generated.
linejunk -- passed on to ndiff (see ndiff documentation)
charjunk -- passed on to ndiff (see ndiff documentation)
This function returns an iterator which returns a tuple:
(from line tuple, to line tuple, boolean flag)
from/to line tuple -- (line num, line text)
line num -- integer or None (to indicate a context separation)
line text -- original line text with following markers inserted:
'\0+' -- marks start of added text
'\0-' -- marks start of deleted text
'\0^' -- marks start of changed text
'\1' -- marks end of added/deleted/changed text
boolean flag -- None indicates context separation, True indicates
either "from" or "to" line contains a change, otherwise False.
This function/iterator was originally developed to generate side by side
file difference for making HTML pages (see HtmlDiff class for example
usage).
Note, this function utilizes the ndiff function to generate the side by
side difference markup. Optional ndiff arguments may be passed to this
function and they in turn will be passed to ndiff.
"""
import re
# regular expression for finding intraline change indices
change_re = re.compile('(\++|\-+|\^+)')
# create the difference iterator to generate the differences
diff_lines_iterator = ndiff(fromlines,tolines,linejunk,charjunk)
def _make_line(lines, format_key, side, num_lines=[0,0]):
"""Returns line of text with user's change markup and line formatting.
lines -- list of lines from the ndiff generator to produce a line of
text from. When producing the line of text to return, the
lines used are removed from this list.
format_key -- '+' return first line in list with "add" markup around
the entire line.
'-' return first line in list with "delete" markup around
the entire line.
'?' return first line in list with add/delete/change
intraline markup (indices obtained from second line)
None return first line in list with no markup
side -- indice into the num_lines list (0=from,1=to)
num_lines -- from/to current line number. This is NOT intended to be a
passed parameter. It is present as a keyword argument to
maintain memory of the current line numbers between calls
of this function.
Note, this function is purposefully not defined at the module scope so
that data it needs from its parent function (within whose context it
is defined) does not need to be of module scope.
"""
num_lines[side] += 1
# Handle case where no user markup is to be added, just return line of
# text with user's line format to allow for usage of the line number.
if format_key is None:
return (num_lines[side],lines.pop(0)[2:])
# Handle case of intraline changes
if format_key == '?':
text, markers = lines.pop(0), lines.pop(0)
# find intraline changes (store change type and indices in tuples)
sub_info = []
def record_sub_info(match_object,sub_info=sub_info):
sub_info.append([match_object.group(1)[0],match_object.span()])
return match_object.group(1)
change_re.sub(record_sub_info,markers)
# process each tuple inserting our special marks that won't be
# noticed by an xml/html escaper.
for key,(begin,end) in sub_info[::-1]:
text = text[0:begin]+'\0'+key+text[begin:end]+'\1'+text[end:]
text = text[2:]
# Handle case of add/delete entire line
else:
text = lines.pop(0)[2:]
# if line of text is just a newline, insert a space so there is
# something for the user to highlight and see.
if not text:
text = ' '
# insert marks that won't be noticed by an xml/html escaper.
text = '\0' + format_key + text + '\1'
# Return line of text, first allow user's line formatter to do its
# thing (such as adding the line number) then replace the special
# marks with what the user's change markup.
return (num_lines[side],text)
def _line_iterator():
"""Yields from/to lines of text with a change indication.
This function is an iterator. It itself pulls lines from a
differencing iterator, processes them and yields them. When it can
it yields both a "from" and a "to" line, otherwise it will yield one
or the other. In addition to yielding the lines of from/to text, a
boolean flag is yielded to indicate if the text line(s) have
differences in them.
Note, this function is purposefully not defined at the module scope so
that data it needs from its parent function (within whose context it
is defined) does not need to be of module scope.
"""
lines = []
num_blanks_pending, num_blanks_to_yield = 0, 0
while True:
# Load up next 4 lines so we can look ahead, create strings which
# are a concatenation of the first character of each of the 4 lines
# so we can do some very readable comparisons.
while len(lines) < 4:
try:
lines.append(diff_lines_iterator.next())
except StopIteration:
lines.append('X')
s = ''.join([line[0] for line in lines])
if s.startswith('X'):
# When no more lines, pump out any remaining blank lines so the
# corresponding add/delete lines get a matching blank line so
# all line pairs get yielded at the next level.
num_blanks_to_yield = num_blanks_pending
elif s.startswith('-?+?'):
# simple intraline change
yield _make_line(lines,'?',0), _make_line(lines,'?',1), True
continue
elif s.startswith('--++'):
# in delete block, add block coming: we do NOT want to get
# caught up on blank lines yet, just process the delete line
num_blanks_pending -= 1
yield _make_line(lines,'-',0), None, True
continue
elif s.startswith(('--?+', '--+', '- ')):
# in delete block and see a intraline change or unchanged line
# coming: yield the delete line and then blanks
from_line,to_line = _make_line(lines,'-',0), None
num_blanks_to_yield,num_blanks_pending = num_blanks_pending-1,0
elif s.startswith('-+?'):
# intraline change
yield _make_line(lines,None,0), _make_line(lines,'?',1), True
continue
elif s.startswith('-?+'):
# intraline change
yield _make_line(lines,'?',0), _make_line(lines,None,1), True
continue
elif s.startswith('-'):
# delete FROM line
num_blanks_pending -= 1
yield _make_line(lines,'-',0), None, True
continue
elif s.startswith('+--'):
# in add block, delete block coming: we do NOT want to get
# caught up on blank lines yet, just process the add line
num_blanks_pending += 1
yield None, _make_line(lines,'+',1), True
continue
elif s.startswith(('+ ', '+-')):
# will be leaving an add block: yield blanks then add line
from_line, to_line = None, _make_line(lines,'+',1)
num_blanks_to_yield,num_blanks_pending = num_blanks_pending+1,0
elif s.startswith('+'):
# inside an add block, yield the add line
num_blanks_pending += 1
yield None, _make_line(lines,'+',1), True
continue
elif s.startswith(' '):
# unchanged text, yield it to both sides
yield _make_line(lines[:],None,0),_make_line(lines,None,1),False
continue
# Catch up on the blank lines so when we yield the next from/to
# pair, they are lined up.
while(num_blanks_to_yield < 0):
num_blanks_to_yield += 1
yield None,('','\n'),True
while(num_blanks_to_yield > 0):
num_blanks_to_yield -= 1
yield ('','\n'),None,True
if s.startswith('X'):
raise StopIteration
else:
yield from_line,to_line,True
def _line_pair_iterator():
"""Yields from/to lines of text with a change indication.
This function is an iterator. It itself pulls lines from the line
iterator. Its difference from that iterator is that this function
always yields a pair of from/to text lines (with the change
indication). If necessary it will collect single from/to lines
until it has a matching pair from/to pair to yield.
Note, this function is purposefully not defined at the module scope so
that data it needs from its parent function (within whose context it
is defined) does not need to be of module scope.
"""
line_iterator = _line_iterator()
fromlines,tolines=[],[]
while True:
# Collecting lines of text until we have a from/to pair
while (len(fromlines)==0 or len(tolines)==0):
from_line, to_line, found_diff =line_iterator.next()
if from_line is not None:
fromlines.append((from_line,found_diff))
if to_line is not None:
tolines.append((to_line,found_diff))
# Once we have a pair, remove them from the collection and yield it
from_line, fromDiff = fromlines.pop(0)
to_line, to_diff = tolines.pop(0)
yield (from_line,to_line,fromDiff or to_diff)
# Handle case where user does not want context differencing, just yield
# them up without doing anything else with them.
line_pair_iterator = _line_pair_iterator()
if context is None:
while True:
yield line_pair_iterator.next()
# Handle case where user wants context differencing. We must do some
# storage of lines until we know for sure that they are to be yielded.
else:
context += 1
lines_to_write = 0
while True:
# Store lines up until we find a difference, note use of a
# circular queue because we only need to keep around what
# we need for context.
index, contextLines = 0, [None]*(context)
found_diff = False
while(found_diff is False):
from_line, to_line, found_diff = line_pair_iterator.next()
i = index % context
contextLines[i] = (from_line, to_line, found_diff)
index += 1
# Yield lines that we have collected so far, but first yield
# the user's separator.
if index > context:
yield None, None, None
lines_to_write = context
else:
lines_to_write = index
index = 0
while(lines_to_write):
i = index % context
index += 1
yield contextLines[i]
lines_to_write -= 1
# Now yield the context lines after the change
lines_to_write = context-1
while(lines_to_write):
from_line, to_line, found_diff = line_pair_iterator.next()
# If another change within the context, extend the context
if found_diff:
lines_to_write = context-1
else:
lines_to_write -= 1
yield from_line, to_line, found_diff
_file_template = """
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html>
<head>
<meta http-equiv="Content-Type"
content="text/html; charset=ISO-8859-1" />
<title></title>
<style type="text/css">%(styles)s
</style>
</head>
<body>
%(table)s%(legend)s
</body>
</html>"""
_styles = """
table.diff {font-family:Courier; border:medium;}
.diff_header {background-color:#e0e0e0}
td.diff_header {text-align:right}
.diff_next {background-color:#c0c0c0}
.diff_add {background-color:#aaffaa}
.diff_chg {background-color:#ffff77}
.diff_sub {background-color:#ffaaaa}"""
_table_template = """
<table class="diff" id="difflib_chg_%(prefix)s_top"
cellspacing="0" cellpadding="0" rules="groups" >
<colgroup></colgroup> <colgroup></colgroup> <colgroup></colgroup>
<colgroup></colgroup> <colgroup></colgroup> <colgroup></colgroup>
%(header_row)s
<tbody>
%(data_rows)s </tbody>
</table>"""
_legend = """
<table class="diff" summary="Legends">
<tr> <th colspan="2"> Legends </th> </tr>
<tr> <td> <table border="" summary="Colors">
<tr><th> Colors </th> </tr>
<tr><td class="diff_add"> Added </td></tr>
<tr><td class="diff_chg">Changed</td> </tr>
<tr><td class="diff_sub">Deleted</td> </tr>
</table></td>
<td> <table border="" summary="Links">
<tr><th colspan="2"> Links </th> </tr>
<tr><td>(f)irst change</td> </tr>
<tr><td>(n)ext change</td> </tr>
<tr><td>(t)op</td> </tr>
</table></td> </tr>
</table>"""
class HtmlDiff(object):
"""For producing HTML side by side comparison with change highlights.
This class can be used to create an HTML table (or a complete HTML file
containing the table) showing a side by side, line by line comparison
of text with inter-line and intra-line change highlights. The table can
be generated in either full or contextual difference mode.
The following methods are provided for HTML generation:
make_table -- generates HTML for a single side by side table
make_file -- generates complete HTML file with a single side by side table
See tools/scripts/diff.py for an example usage of this class.
"""
_file_template = _file_template
_styles = _styles
_table_template = _table_template
_legend = _legend
_default_prefix = 0
def __init__(self,tabsize=8,wrapcolumn=None,linejunk=None,
charjunk=IS_CHARACTER_JUNK):
"""HtmlDiff instance initializer
Arguments:
tabsize -- tab stop spacing, defaults to 8.
wrapcolumn -- column number where lines are broken and wrapped,
defaults to None where lines are not wrapped.
linejunk,charjunk -- keyword arguments passed into ndiff() (used to by
HtmlDiff() to generate the side by side HTML differences). See
ndiff() documentation for argument default values and descriptions.
"""
self._tabsize = tabsize
self._wrapcolumn = wrapcolumn
self._linejunk = linejunk
self._charjunk = charjunk
def make_file(self,fromlines,tolines,fromdesc='',todesc='',context=False,
numlines=5):
"""Returns HTML file of side by side comparison with change highlights
Arguments:
fromlines -- list of "from" lines
tolines -- list of "to" lines
fromdesc -- "from" file column header string
todesc -- "to" file column header string
context -- set to True for contextual differences (defaults to False
which shows full differences).
numlines -- number of context lines. When context is set True,
controls number of lines displayed before and after the change.
When context is False, controls the number of lines to place
the "next" link anchors before the next change (so click of
"next" link jumps to just before the change).
"""
return self._file_template % dict(
styles = self._styles,
legend = self._legend,
table = self.make_table(fromlines,tolines,fromdesc,todesc,
context=context,numlines=numlines))
def _tab_newline_replace(self,fromlines,tolines):
"""Returns from/to line lists with tabs expanded and newlines removed.
Instead of tab characters being replaced by the number of spaces
needed to fill in to the next tab stop, this function will fill
the space with tab characters. This is done so that the difference
algorithms can identify changes in a file when tabs are replaced by
spaces and vice versa. At the end of the HTML generation, the tab
characters will be replaced with a nonbreakable space.
"""
def expand_tabs(line):
# hide real spaces
line = line.replace(' ','\0')
# expand tabs into spaces
line = line.expandtabs(self._tabsize)
# replace spaces from expanded tabs back into tab characters
# (we'll replace them with markup after we do differencing)
line = line.replace(' ','\t')
return line.replace('\0',' ').rstrip('\n')
fromlines = [expand_tabs(line) for line in fromlines]
tolines = [expand_tabs(line) for line in tolines]
return fromlines,tolines
def _split_line(self,data_list,line_num,text):
"""Builds list of text lines by splitting text lines at wrap point
This function will determine if the input text line needs to be
wrapped (split) into separate lines. If so, the first wrap point
will be determined and the first line appended to the output
text line list. This function is used recursively to handle
the second part of the split line to further split it.
"""
# if blank line or context separator, just add it to the output list
if not line_num:
data_list.append((line_num,text))
return
# if line text doesn't need wrapping, just add it to the output list
size = len(text)
max = self._wrapcolumn
if (size <= max) or ((size -(text.count('\0')*3)) <= max):
data_list.append((line_num,text))
return
# scan text looking for the wrap point, keeping track if the wrap
# point is inside markers
i = 0
n = 0
mark = ''
while n < max and i < size:
if text[i] == '\0':
i += 1
mark = text[i]
i += 1
elif text[i] == '\1':
i += 1
mark = ''
else:
i += 1
n += 1
# wrap point is inside text, break it up into separate lines
line1 = text[:i]
line2 = text[i:]
# if wrap point is inside markers, place end marker at end of first
# line and start marker at beginning of second line because each
# line will have its own table tag markup around it.
if mark:
line1 = line1 + '\1'
line2 = '\0' + mark + line2
# tack on first line onto the output list
data_list.append((line_num,line1))
# use this routine again to wrap the remaining text
self._split_line(data_list,'>',line2)
def _line_wrapper(self,diffs):
"""Returns iterator that splits (wraps) mdiff text lines"""
# pull from/to data and flags from mdiff iterator
for fromdata,todata,flag in diffs:
# check for context separators and pass them through
if flag is None:
yield fromdata,todata,flag
continue
(fromline,fromtext),(toline,totext) = fromdata,todata
# for each from/to line split it at the wrap column to form
# list of text lines.
fromlist,tolist = [],[]
self._split_line(fromlist,fromline,fromtext)
self._split_line(tolist,toline,totext)
# yield from/to line in pairs inserting blank lines as
# necessary when one side has more wrapped lines
while fromlist or tolist:
if fromlist:
fromdata = fromlist.pop(0)
else:
fromdata = ('',' ')
if tolist:
todata = tolist.pop(0)
else:
todata = ('',' ')
yield fromdata,todata,flag
def _collect_lines(self,diffs):
"""Collects mdiff output into separate lists
Before storing the mdiff from/to data into a list, it is converted
into a single line of text with HTML markup.
"""
fromlist,tolist,flaglist = [],[],[]
# pull from/to data and flags from mdiff style iterator
for fromdata,todata,flag in diffs:
try:
# store HTML markup of the lines into the lists
fromlist.append(self._format_line(0,flag,*fromdata))
tolist.append(self._format_line(1,flag,*todata))
except TypeError:
# exceptions occur for lines where context separators go
fromlist.append(None)
tolist.append(None)
flaglist.append(flag)
return fromlist,tolist,flaglist
def _format_line(self,side,flag,linenum,text):
"""Returns HTML markup of "from" / "to" text lines
side -- 0 or 1 indicating "from" or "to" text
flag -- indicates if difference on line
linenum -- line number (used for line number column)
text -- line text to be marked up
"""
try:
linenum = '%d' % linenum
id = ' id="%s%s"' % (self._prefix[side],linenum)
except TypeError:
# handle blank lines where linenum is '>' or ''
id = ''
# replace those things that would get confused with HTML symbols
text=text.replace("&","&").replace(">",">").replace("<","<")
# make space non-breakable so they don't get compressed or line wrapped
text = text.replace(' ',' ').rstrip()
return '<td class="diff_header"%s>%s</td><td nowrap="nowrap">%s</td>' \
% (id,linenum,text)
def _make_prefix(self):
"""Create unique anchor prefixes"""
# Generate a unique anchor prefix so multiple tables
# can exist on the same HTML page without conflicts.
fromprefix = "from%d_" % HtmlDiff._default_prefix
toprefix = "to%d_" % HtmlDiff._default_prefix
HtmlDiff._default_prefix += 1
# store prefixes so line format method has access
self._prefix = [fromprefix,toprefix]
def _convert_flags(self,fromlist,tolist,flaglist,context,numlines):
"""Makes list of "next" links"""
# all anchor names will be generated using the unique "to" prefix
toprefix = self._prefix[1]
# process change flags, generating middle column of next anchors/links
next_id = ['']*len(flaglist)
next_href = ['']*len(flaglist)
num_chg, in_change = 0, False
last = 0
for i,flag in enumerate(flaglist):
if flag:
if not in_change:
in_change = True
last = i
# at the beginning of a change, drop an anchor a few lines
# (the context lines) before the change for the previous
# link
i = max([0,i-numlines])
next_id[i] = ' id="difflib_chg_%s_%d"' % (toprefix,num_chg)
# at the beginning of a change, drop a link to the next
# change
num_chg += 1
next_href[last] = '<a href="#difflib_chg_%s_%d">n</a>' % (
toprefix,num_chg)
else:
in_change = False
# check for cases where there is no content to avoid exceptions
if not flaglist:
flaglist = [False]
next_id = ['']
next_href = ['']
last = 0
if context:
fromlist = ['<td></td><td> No Differences Found </td>']
tolist = fromlist
else:
fromlist = tolist = ['<td></td><td> Empty File </td>']
# if not a change on first line, drop a link
if not flaglist[0]:
next_href[0] = '<a href="#difflib_chg_%s_0">f</a>' % toprefix
# redo the last link to link to the top
next_href[last] = '<a href="#difflib_chg_%s_top">t</a>' % (toprefix)
return fromlist,tolist,flaglist,next_href,next_id
def make_table(self,fromlines,tolines,fromdesc='',todesc='',context=False,
numlines=5):
"""Returns HTML table of side by side comparison with change highlights
Arguments:
fromlines -- list of "from" lines
tolines -- list of "to" lines
fromdesc -- "from" file column header string
todesc -- "to" file column header string
context -- set to True for contextual differences (defaults to False
which shows full differences).
numlines -- number of context lines. When context is set True,
controls number of lines displayed before and after the change.
When context is False, controls the number of lines to place
the "next" link anchors before the next change (so click of
"next" link jumps to just before the change).
"""
# make unique anchor prefixes so that multiple tables may exist
# on the same page without conflict.
self._make_prefix()
# change tabs to spaces before it gets more difficult after we insert
# markup
fromlines,tolines = self._tab_newline_replace(fromlines,tolines)
# create diffs iterator which generates side by side from/to data
if context:
context_lines = numlines
else:
context_lines = None
diffs = _mdiff(fromlines,tolines,context_lines,linejunk=self._linejunk,
charjunk=self._charjunk)
# set up iterator to wrap lines that exceed desired width
if self._wrapcolumn:
diffs = self._line_wrapper(diffs)
# collect up from/to lines and flags into lists (also format the lines)
fromlist,tolist,flaglist = self._collect_lines(diffs)
# process change flags, generating middle column of next anchors/links
fromlist,tolist,flaglist,next_href,next_id = self._convert_flags(
fromlist,tolist,flaglist,context,numlines)
s = []
fmt = ' <tr><td class="diff_next"%s>%s</td>%s' + \
'<td class="diff_next">%s</td>%s</tr>\n'
for i in range(len(flaglist)):
if flaglist[i] is None:
# mdiff yields None on separator lines skip the bogus ones
# generated for the first line
if i > 0:
s.append(' </tbody> \n <tbody>\n')
else:
s.append( fmt % (next_id[i],next_href[i],fromlist[i],
next_href[i],tolist[i]))
if fromdesc or todesc:
header_row = '<thead><tr>%s%s%s%s</tr></thead>' % (
'<th class="diff_next"><br /></th>',
'<th colspan="2" class="diff_header">%s</th>' % fromdesc,
'<th class="diff_next"><br /></th>',
'<th colspan="2" class="diff_header">%s</th>' % todesc)
else:
header_row = ''
table = self._table_template % dict(
data_rows=''.join(s),
header_row=header_row,
prefix=self._prefix[1])
return table.replace('\0+','<span class="diff_add">'). \
replace('\0-','<span class="diff_sub">'). \
replace('\0^','<span class="diff_chg">'). \
replace('\1','</span>'). \
replace('\t',' ')
del re
def restore(delta, which):
r"""
Generate one of the two sequences that generated a delta.
Given a `delta` produced by `Differ.compare()` or `ndiff()`, extract
lines originating from file 1 or 2 (parameter `which`), stripping off line
prefixes.
Examples:
>>> diff = ndiff('one\ntwo\nthree\n'.splitlines(1),
... 'ore\ntree\nemu\n'.splitlines(1))
>>> diff = list(diff)
>>> print ''.join(restore(diff, 1)),
one
two
three
>>> print ''.join(restore(diff, 2)),
ore
tree
emu
"""
try:
tag = {1: "- ", 2: "+ "}[int(which)]
except KeyError:
raise ValueError, ('unknown delta choice (must be 1 or 2): %r'
% which)
prefixes = (" ", tag)
for line in delta:
if line[:2] in prefixes:
yield line[2:]
def _test():
import doctest, difflib
return doctest.testmod(difflib)
if __name__ == "__main__":
_test()
|
apache-2.0
|
dawehner/root
|
tutorials/pyroot/first.py
|
28
|
1524
|
from ROOT import TCanvas, TF1, TPaveLabel, TPad, TText
from ROOT import gROOT
nut = TCanvas( 'nut', 'FirstSession', 100, 10, 700, 900 )
nut.Range( 0, 0, 20, 24 )
nut.SetFillColor( 10 )
nut.SetBorderSize( 2 )
pl = TPaveLabel( 3, 22, 17, 23.7, 'My first PyROOT interactive session', 'br' )
pl.SetFillColor( 18 )
pl.Draw()
t = TText( 0, 0, 'a' )
t.SetTextFont( 62 )
t.SetTextSize( 0.025 )
t.SetTextAlign( 12 )
t.DrawText( 2, 20.3, 'PyROOT provides ROOT bindings for Python, a powerful interpreter.' )
t.DrawText( 2, 19.3, 'Blocks of lines can be entered typographically.' )
t.DrawText( 2, 18.3, 'Previous typed lines can be recalled.' )
t.SetTextFont( 72 )
t.SetTextSize( 0.026 )
t.DrawText( 3, 17, r'>>> x, y = 5, 7' )
t.DrawText( 3, 16, r'>>> import math; x*math.sqrt(y)' )
t.DrawText( 3, 14, r'>>> for i in range(2,7): print "sqrt(%d) = %f" % (i,math.sqrt(i))' )
t.DrawText( 3, 10, r'>>> import ROOT; f1 = ROOT.TF1( "f1", "sin(x)/x", 0, 10 )' )
t.DrawText( 3, 9, r'>>> f1.Draw()' )
t.SetTextFont( 81 )
t.SetTextSize( 0.018 )
t.DrawText( 4, 15, '13.228756555322953' )
t.DrawText( 4, 13.3, 'sqrt(2) = 1.414214' )
t.DrawText( 4, 12.7, 'sqrt(3) = 1.732051' )
t.DrawText( 4, 12.1, 'sqrt(4) = 2.000000' )
t.DrawText( 4, 11.5, 'sqrt(5) = 2.236068' )
t.DrawText( 4, 10.9, 'sqrt(6) = 2.449490' )
pad = TPad( 'pad', 'pad', .2, .05, .8, .35 )
pad.SetFillColor( 42 )
pad.SetFrameFillColor( 33 )
pad.SetBorderSize( 10 )
pad.Draw()
pad.cd()
pad.SetGrid()
f1 = TF1( 'f1', 'sin(x)/x', 0, 10 )
f1.Draw()
nut.cd()
nut.Update()
|
lgpl-2.1
|
sorenk/ansible
|
test/units/plugins/connection/test_ssh.py
|
21
|
29848
|
# -*- coding: utf-8 -*-
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from io import StringIO
import pytest
from ansible import constants as C
from ansible.compat.selectors import SelectorKey, EVENT_READ
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock, PropertyMock
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
from ansible.module_utils.six.moves import shlex_quote
from ansible.module_utils._text import to_bytes
from ansible.playbook.play_context import PlayContext
from ansible.plugins.connection import ssh
class TestConnectionBaseClass(unittest.TestCase):
def test_plugins_connection_ssh_basic(self):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
# connect just returns self, so assert that
res = conn._connect()
self.assertEqual(conn, res)
ssh.SSHPASS_AVAILABLE = False
self.assertFalse(conn._sshpass_available())
ssh.SSHPASS_AVAILABLE = True
self.assertTrue(conn._sshpass_available())
with patch('subprocess.Popen') as p:
ssh.SSHPASS_AVAILABLE = None
p.return_value = MagicMock()
self.assertTrue(conn._sshpass_available())
ssh.SSHPASS_AVAILABLE = None
p.return_value = None
p.side_effect = OSError()
self.assertFalse(conn._sshpass_available())
conn.close()
self.assertFalse(conn._connected)
def test_plugins_connection_ssh__build_command(self):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn._build_command('ssh')
def test_plugins_connection_ssh_exec_command(self):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn._build_command = MagicMock()
conn._build_command.return_value = 'ssh something something'
conn._run = MagicMock()
conn._run.return_value = (0, 'stdout', 'stderr')
conn.get_option = MagicMock()
conn.get_option.return_value = True
res, stdout, stderr = conn.exec_command('ssh')
res, stdout, stderr = conn.exec_command('ssh', 'this is some data')
def test_plugins_connection_ssh__examine_output(self):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn.check_password_prompt = MagicMock()
conn.check_become_success = MagicMock()
conn.check_incorrect_password = MagicMock()
conn.check_missing_password = MagicMock()
def _check_password_prompt(line):
if b'foo' in line:
return True
return False
def _check_become_success(line):
if b'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz' in line:
return True
return False
def _check_incorrect_password(line):
if b'incorrect password' in line:
return True
return False
def _check_missing_password(line):
if b'bad password' in line:
return True
return False
conn.check_password_prompt.side_effect = _check_password_prompt
conn.check_become_success.side_effect = _check_become_success
conn.check_incorrect_password.side_effect = _check_incorrect_password
conn.check_missing_password.side_effect = _check_missing_password
# test examining output for prompt
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = True
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nfoo\nline 3\nthis should be the remainder', False)
self.assertEqual(output, b'line 1\nline 2\nline 3\n')
self.assertEqual(unprocessed, b'this should be the remainder')
self.assertTrue(conn._flags['become_prompt'])
self.assertFalse(conn._flags['become_success'])
self.assertFalse(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test examining output for become prompt
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = False
pc.success_key = u'BECOME-SUCCESS-abcdefghijklmnopqrstuvxyz'
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nBECOME-SUCCESS-abcdefghijklmnopqrstuvxyz\nline 3\n', False)
self.assertEqual(output, b'line 1\nline 2\nline 3\n')
self.assertEqual(unprocessed, b'')
self.assertFalse(conn._flags['become_prompt'])
self.assertTrue(conn._flags['become_success'])
self.assertFalse(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test examining output for become failure
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = False
pc.success_key = None
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nline 2\nincorrect password\n', True)
self.assertEqual(output, b'line 1\nline 2\nincorrect password\n')
self.assertEqual(unprocessed, b'')
self.assertFalse(conn._flags['become_prompt'])
self.assertFalse(conn._flags['become_success'])
self.assertTrue(conn._flags['become_error'])
self.assertFalse(conn._flags['become_nopasswd_error'])
# test examining output for missing password
conn._flags = dict(
become_prompt=False,
become_success=False,
become_error=False,
become_nopasswd_error=False,
)
pc.prompt = False
pc.success_key = None
output, unprocessed = conn._examine_output(u'source', u'state', b'line 1\nbad password\n', True)
self.assertEqual(output, b'line 1\nbad password\n')
self.assertEqual(unprocessed, b'')
self.assertFalse(conn._flags['become_prompt'])
self.assertFalse(conn._flags['become_success'])
self.assertFalse(conn._flags['become_error'])
self.assertTrue(conn._flags['become_nopasswd_error'])
@patch('time.sleep')
@patch('os.path.exists')
def test_plugins_connection_ssh_put_file(self, mock_ospe, mock_sleep):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn._build_command = MagicMock()
conn._bare_run = MagicMock()
mock_ospe.return_value = True
conn._build_command.return_value = 'some command to run'
conn._bare_run.return_value = (0, '', '')
conn.host = "some_host"
C.ANSIBLE_SSH_RETRIES = 9
# Test with C.DEFAULT_SCP_IF_SSH set to smart
# Test when SFTP works
C.DEFAULT_SCP_IF_SSH = 'smart'
expected_in_data = b' '.join((b'put', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n'
conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)
# Test when SFTP doesn't work but SCP does
conn._bare_run.side_effect = [(1, 'stdout', 'some errors'), (0, '', '')]
conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', None, checkrc=False)
conn._bare_run.side_effect = None
# test with C.DEFAULT_SCP_IF_SSH enabled
C.DEFAULT_SCP_IF_SSH = True
conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', None, checkrc=False)
conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._bare_run.assert_called_with('some command to run', None, checkrc=False)
# test with C.DEFAULT_SCP_IF_SSH disabled
C.DEFAULT_SCP_IF_SSH = False
expected_in_data = b' '.join((b'put', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n'
conn.put_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)
expected_in_data = b' '.join((b'put',
to_bytes(shlex_quote('/path/to/in/file/with/unicode-fö〩')),
to_bytes(shlex_quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
conn.put_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)
# test that a non-zero rc raises an error
conn._bare_run.return_value = (1, 'stdout', 'some errors')
self.assertRaises(AnsibleError, conn.put_file, '/path/to/bad/file', '/remote/path/to/file')
# test that a not-found path raises an error
mock_ospe.return_value = False
conn._bare_run.return_value = (0, 'stdout', '')
self.assertRaises(AnsibleFileNotFound, conn.put_file, '/path/to/bad/file', '/remote/path/to/file')
@patch('time.sleep')
def test_plugins_connection_ssh_fetch_file(self, mock_sleep):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn._build_command = MagicMock()
conn._bare_run = MagicMock()
conn._build_command.return_value = 'some command to run'
conn._bare_run.return_value = (0, '', '')
conn.host = "some_host"
C.ANSIBLE_SSH_RETRIES = 9
# Test with C.DEFAULT_SCP_IF_SSH set to smart
# Test when SFTP works
C.DEFAULT_SCP_IF_SSH = 'smart'
expected_in_data = b' '.join((b'get', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n'
conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)
# Test when SFTP doesn't work but SCP does
conn._bare_run.side_effect = [(1, 'stdout', 'some errors'), (0, '', '')]
conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', None, checkrc=False)
conn._bare_run.side_effect = None
# test with C.DEFAULT_SCP_IF_SSH enabled
C.DEFAULT_SCP_IF_SSH = True
conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', None, checkrc=False)
conn.fetch_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._bare_run.assert_called_with('some command to run', None, checkrc=False)
# test with C.DEFAULT_SCP_IF_SSH disabled
C.DEFAULT_SCP_IF_SSH = False
expected_in_data = b' '.join((b'get', to_bytes(shlex_quote('/path/to/in/file')), to_bytes(shlex_quote('/path/to/dest/file')))) + b'\n'
conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)
expected_in_data = b' '.join((b'get',
to_bytes(shlex_quote('/path/to/in/file/with/unicode-fö〩')),
to_bytes(shlex_quote('/path/to/dest/file/with/unicode-fö〩')))) + b'\n'
conn.fetch_file(u'/path/to/in/file/with/unicode-fö〩', u'/path/to/dest/file/with/unicode-fö〩')
conn._bare_run.assert_called_with('some command to run', expected_in_data, checkrc=False)
# test that a non-zero rc raises an error
conn._bare_run.return_value = (1, 'stdout', 'some errors')
self.assertRaises(AnsibleError, conn.fetch_file, '/path/to/bad/file', '/remote/path/to/file')
class MockSelector(object):
def __init__(self):
self.files_watched = 0
self.register = MagicMock(side_effect=self._register)
self.unregister = MagicMock(side_effect=self._unregister)
self.close = MagicMock()
self.get_map = MagicMock(side_effect=self._get_map)
self.select = MagicMock()
def _register(self, *args, **kwargs):
self.files_watched += 1
def _unregister(self, *args, **kwargs):
self.files_watched -= 1
def _get_map(self, *args, **kwargs):
return self.files_watched
@pytest.fixture
def mock_run_env(request, mocker):
pc = PlayContext()
new_stdin = StringIO()
conn = ssh.Connection(pc, new_stdin)
conn._send_initial_data = MagicMock()
conn._examine_output = MagicMock()
conn._terminate_process = MagicMock()
conn.sshpass_pipe = [MagicMock(), MagicMock()]
request.cls.pc = pc
request.cls.conn = conn
mock_popen_res = MagicMock()
mock_popen_res.poll = MagicMock()
mock_popen_res.wait = MagicMock()
mock_popen_res.stdin = MagicMock()
mock_popen_res.stdin.fileno.return_value = 1000
mock_popen_res.stdout = MagicMock()
mock_popen_res.stdout.fileno.return_value = 1001
mock_popen_res.stderr = MagicMock()
mock_popen_res.stderr.fileno.return_value = 1002
mock_popen_res.returncode = 0
request.cls.mock_popen_res = mock_popen_res
mock_popen = mocker.patch('subprocess.Popen', return_value=mock_popen_res)
request.cls.mock_popen = mock_popen
request.cls.mock_selector = MockSelector()
mocker.patch('ansible.compat.selectors.DefaultSelector', lambda: request.cls.mock_selector)
request.cls.mock_openpty = mocker.patch('pty.openpty')
mocker.patch('fcntl.fcntl')
mocker.patch('os.write')
mocker.patch('os.close')
@pytest.mark.usefixtures('mock_run_env')
class TestSSHConnectionRun(object):
# FIXME:
# These tests are little more than a smoketest. Need to enhance them
# a bit to check that they're calling the relevant functions and making
# complete coverage of the code paths
def test_no_escalation(self):
self.mock_popen_res.stdout.read.side_effect = [b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"my_stderr"]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "this is input data")
assert return_code == 0
assert b_stdout == b'my_stdout\nsecond_line'
assert b_stderr == b'my_stderr'
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is True
assert self.conn._send_initial_data.call_count == 1
assert self.conn._send_initial_data.call_args[0][1] == 'this is input data'
def test_with_password(self):
# test with a password set to trigger the sshpass write
self.pc.password = '12345'
self.mock_popen_res.stdout.read.side_effect = [b"some data", b"", b""]
self.mock_popen_res.stderr.read.side_effect = [b""]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run(["ssh", "is", "a", "cmd"], "this is more data")
assert return_code == 0
assert b_stdout == b'some data'
assert b_stderr == b''
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is True
assert self.conn._send_initial_data.call_count == 1
assert self.conn._send_initial_data.call_args[0][1] == 'this is more data'
def _password_with_prompt_examine_output(self, sourice, state, b_chunk, sudoable):
if state == 'awaiting_prompt':
self.conn._flags['become_prompt'] = True
elif state == 'awaiting_escalation':
self.conn._flags['become_success'] = True
return (b'', b'')
def test_password_with_prompt(self):
# test with password prompting enabled
self.pc.password = None
self.pc.prompt = b'Password:'
self.conn._examine_output.side_effect = self._password_with_prompt_examine_output
self.mock_popen_res.stdout.read.side_effect = [b"Password:", b"Success", b""]
self.mock_popen_res.stderr.read.side_effect = [b""]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ),
(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "this is input data")
assert return_code == 0
assert b_stdout == b''
assert b_stderr == b''
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is True
assert self.conn._send_initial_data.call_count == 1
assert self.conn._send_initial_data.call_args[0][1] == 'this is input data'
def test_password_with_become(self):
# test with some become settings
self.pc.prompt = b'Password:'
self.pc.become = True
self.pc.success_key = 'BECOME-SUCCESS-abcdefg'
self.conn._examine_output.side_effect = self._password_with_prompt_examine_output
self.mock_popen_res.stdout.read.side_effect = [b"Password:", b"BECOME-SUCCESS-abcdefg", b"abc"]
self.mock_popen_res.stderr.read.side_effect = [b"123"]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "this is input data")
self.mock_popen_res.stdin.flush.assert_called_once_with()
assert return_code == 0
assert b_stdout == b'abc'
assert b_stderr == b'123'
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is True
assert self.conn._send_initial_data.call_count == 1
assert self.conn._send_initial_data.call_args[0][1] == 'this is input data'
def test_pasword_without_data(self):
# simulate no data input
self.mock_openpty.return_value = (98, 99)
self.mock_popen_res.stdout.read.side_effect = [b"some data", b"", b""]
self.mock_popen_res.stderr.read.side_effect = [b""]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "")
assert return_code == 0
assert b_stdout == b'some data'
assert b_stderr == b''
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is False
def test_pasword_without_data(self):
# simulate no data input but Popen using new pty's fails
self.mock_popen.return_value = None
self.mock_popen.side_effect = [OSError(), self.mock_popen_res]
# simulate no data input
self.mock_openpty.return_value = (98, 99)
self.mock_popen_res.stdout.read.side_effect = [b"some data", b"", b""]
self.mock_popen_res.stderr.read.side_effect = [b""]
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[]]
self.mock_selector.get_map.side_effect = lambda: True
return_code, b_stdout, b_stderr = self.conn._run("ssh", "")
assert return_code == 0
assert b_stdout == b'some data'
assert b_stderr == b''
assert self.mock_selector.register.called is True
assert self.mock_selector.register.call_count == 2
assert self.conn._send_initial_data.called is False
@pytest.mark.usefixtures('mock_run_env')
class TestSSHConnectionRetries(object):
def test_retry_then_success(self, monkeypatch):
monkeypatch.setattr(C, 'HOST_KEY_CHECKING', False)
monkeypatch.setattr(C, 'ANSIBLE_SSH_RETRIES', 3)
monkeypatch.setattr('time.sleep', lambda x: None)
self.mock_popen_res.stdout.read.side_effect = [b"", b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"", b"my_stderr"]
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[255] * 3 + [0] * 4)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[]
]
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'ssh'
self.conn.get_option = MagicMock()
self.conn.get_option.return_value = True
return_code, b_stdout, b_stderr = self.conn.exec_command('ssh', 'some data')
assert return_code == 0
assert b_stdout == b'my_stdout\nsecond_line'
assert b_stderr == b'my_stderr'
def test_multiple_failures(self, monkeypatch):
monkeypatch.setattr(C, 'HOST_KEY_CHECKING', False)
monkeypatch.setattr(C, 'ANSIBLE_SSH_RETRIES', 9)
monkeypatch.setattr('time.sleep', lambda x: None)
self.mock_popen_res.stdout.read.side_effect = [b""] * 10
self.mock_popen_res.stderr.read.side_effect = [b""] * 10
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[255] * 30)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
] * 10
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'ssh'
self.conn.get_option = MagicMock()
self.conn.get_option.return_value = True
pytest.raises(AnsibleConnectionFailure, self.conn.exec_command, 'ssh', 'some data')
assert self.mock_popen.call_count == 10
def test_abitrary_exceptions(self, monkeypatch):
monkeypatch.setattr(C, 'HOST_KEY_CHECKING', False)
monkeypatch.setattr(C, 'ANSIBLE_SSH_RETRIES', 9)
monkeypatch.setattr('time.sleep', lambda x: None)
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'ssh'
self.conn.get_option = MagicMock()
self.conn.get_option.return_value = True
self.mock_popen.side_effect = [Exception('bad')] * 10
pytest.raises(Exception, self.conn.exec_command, 'ssh', 'some data')
assert self.mock_popen.call_count == 10
def test_put_file_retries(self, monkeypatch):
monkeypatch.setattr(C, 'HOST_KEY_CHECKING', False)
monkeypatch.setattr(C, 'ANSIBLE_SSH_RETRIES', 3)
monkeypatch.setattr('time.sleep', lambda x: None)
monkeypatch.setattr('ansible.plugins.connection.ssh.os.path.exists', lambda x: True)
self.mock_popen_res.stdout.read.side_effect = [b"", b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"", b"my_stderr"]
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[255] * 4 + [0] * 4)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[]
]
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'sftp'
return_code, b_stdout, b_stderr = self.conn.put_file('/path/to/in/file', '/path/to/dest/file')
assert return_code == 0
assert b_stdout == b"my_stdout\nsecond_line"
assert b_stderr == b"my_stderr"
assert self.mock_popen.call_count == 2
def test_fetch_file_retries(self, monkeypatch):
monkeypatch.setattr(C, 'HOST_KEY_CHECKING', False)
monkeypatch.setattr(C, 'ANSIBLE_SSH_RETRIES', 3)
monkeypatch.setattr('time.sleep', lambda x: None)
monkeypatch.setattr('ansible.plugins.connection.ssh.os.path.exists', lambda x: True)
self.mock_popen_res.stdout.read.side_effect = [b"", b"my_stdout\n", b"second_line"]
self.mock_popen_res.stderr.read.side_effect = [b"", b"my_stderr"]
type(self.mock_popen_res).returncode = PropertyMock(side_effect=[255] * 4 + [0] * 4)
self.mock_selector.select.side_effect = [
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stdout, 1001, [EVENT_READ], None), EVENT_READ)],
[(SelectorKey(self.mock_popen_res.stderr, 1002, [EVENT_READ], None), EVENT_READ)],
[]
]
self.mock_selector.get_map.side_effect = lambda: True
self.conn._build_command = MagicMock()
self.conn._build_command.return_value = 'sftp'
return_code, b_stdout, b_stderr = self.conn.fetch_file('/path/to/in/file', '/path/to/dest/file')
assert return_code == 0
assert b_stdout == b"my_stdout\nsecond_line"
assert b_stderr == b"my_stderr"
assert self.mock_popen.call_count == 2
|
gpl-3.0
|
dchilds7/Deysha-Star-Formation
|
vispy/util/config.py
|
21
|
14665
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""Vispy configuration functions
"""
import os
from os import path as op
import json
import sys
import platform
import getopt
import traceback
import tempfile
import atexit
from shutil import rmtree
from .event import EmitterGroup, EventEmitter, Event
from .logs import logger, set_log_level, use_log_level
from ..ext.six import string_types, file_types
file_types = list(file_types)
try:
file_types += [tempfile._TemporaryFileWrapper] # Py3k Windows this happens
except Exception:
pass
file_types = tuple(file_types)
config = None
_data_path = None
_allowed_config_keys = None
def _init():
""" Create global Config object, parse command flags
"""
global config, _data_path, _allowed_config_keys
app_dir = _get_vispy_app_dir()
if app_dir is not None:
_data_path = op.join(app_dir, 'data')
_test_data_path = op.join(app_dir, 'test_data')
else:
_data_path = _test_data_path = None
# All allowed config keys and the types they may have
_allowed_config_keys = {
'data_path': string_types,
'default_backend': string_types,
'gl_backend': string_types,
'gl_debug': (bool,),
'glir_file': string_types+file_types,
'include_path': list,
'logging_level': string_types,
'qt_lib': string_types,
'dpi': (int, type(None)),
'profile': string_types + (type(None),),
'audit_tests': (bool,),
'test_data_path': string_types + (type(None),),
}
# Default values for all config options
default_config_options = {
'data_path': _data_path,
'default_backend': '',
'gl_backend': 'gl2',
'gl_debug': False,
'glir_file': '',
'include_path': [],
'logging_level': 'info',
'qt_lib': 'any',
'dpi': None,
'profile': None,
'audit_tests': False,
'test_data_path': _test_data_path,
}
config = Config(**default_config_options)
try:
config.update(**_load_config())
except Exception as err:
raise Exception('Error while reading vispy config file "%s":\n %s' %
(_get_config_fname(), err.message))
set_log_level(config['logging_level'])
_parse_command_line_arguments()
###############################################################################
# Command line flag parsing
VISPY_HELP = """
VisPy command line arguments:
--vispy-backend=(qt|pyqt4|pyqt5|pyside|glfw|pyglet|sdl2|wx)
Selects the backend system for VisPy to use. This will override the default
backend selection in your configuration file.
--vispy-log=(debug|info|warning|error|critical)[,search string]
Sets the verbosity of logging output. The default is 'warning'. If a search
string is given, messages will only be displayed if they match the string,
or if their call location (module.class:method(line) or
module:function(line)) matches the string.
--vispy-dpi=resolution
Force the screen resolution to a certain value (in pixels per inch). By
default, the OS is queried to determine the screen DPI.
--vispy-fps
Print the framerate (in Frames Per Second) in the console.
--vispy-gl-debug
Enables error checking for all OpenGL calls.
--vispy-glir-file
Export glir commands to specified file.
--vispy-profile=locations
Measure performance at specific code locations and display results.
*locations* may be "all" or a comma-separated list of method names like
"SceneCanvas.draw_visual".
--vispy-cprofile
Enable profiling using the built-in cProfile module and display results
when the program exits.
--vispy-audit-tests
Enable user auditing of image test results.
--vispy-help
Display this help message.
"""
def _parse_command_line_arguments():
""" Transform vispy specific command line args to vispy config.
Put into a function so that any variables dont leak in the vispy namespace.
"""
global config
# Get command line args for vispy
argnames = ['vispy-backend=', 'vispy-gl-debug', 'vispy-glir-file=',
'vispy-log=', 'vispy-help', 'vispy-profile=', 'vispy-cprofile',
'vispy-dpi=', 'vispy-audit-tests']
try:
opts, args = getopt.getopt(sys.argv[1:], '', argnames)
except getopt.GetoptError:
opts = []
# Use them to set the config values
for o, a in opts:
if o.startswith('--vispy'):
if o == '--vispy-backend':
config['default_backend'] = a
logger.info('vispy backend: %s', a)
elif o == '--vispy-gl-debug':
config['gl_debug'] = True
elif o == '--vispy-glir-file':
config['glir_file'] = a
elif o == '--vispy-log':
if ',' in a:
verbose, match = a.split(',')
else:
verbose = a
match = None
config['logging_level'] = a
set_log_level(verbose, match)
elif o == '--vispy-profile':
config['profile'] = a
elif o == '--vispy-cprofile':
_enable_profiling()
elif o == '--vispy-help':
print(VISPY_HELP)
elif o == '--vispy-dpi':
config['dpi'] = int(a)
elif o == '--vispy-audit-tests':
config['audit_tests'] = True
else:
logger.warning("Unsupported vispy flag: %s" % o)
###############################################################################
# CONFIG
# Adapted from pyzolib/paths.py:
# https://bitbucket.org/pyzo/pyzolib/src/tip/paths.py
def _get_vispy_app_dir():
"""Helper to get the default directory for storing vispy data"""
# Define default user directory
user_dir = os.path.expanduser('~')
# Get system app data dir
path = None
if sys.platform.startswith('win'):
path1, path2 = os.getenv('LOCALAPPDATA'), os.getenv('APPDATA')
path = path1 or path2
elif sys.platform.startswith('darwin'):
path = os.path.join(user_dir, 'Library', 'Application Support')
# On Linux and as fallback
if not (path and os.path.isdir(path)):
path = user_dir
# Maybe we should store things local to the executable (in case of a
# portable distro or a frozen application that wants to be portable)
prefix = sys.prefix
if getattr(sys, 'frozen', None): # See application_dir() function
prefix = os.path.abspath(os.path.dirname(sys.path[0]))
for reldir in ('settings', '../settings'):
localpath = os.path.abspath(os.path.join(prefix, reldir))
if os.path.isdir(localpath):
try:
open(os.path.join(localpath, 'test.write'), 'wb').close()
os.remove(os.path.join(localpath, 'test.write'))
except IOError:
pass # We cannot write in this directory
else:
path = localpath
break
# Get path specific for this app
appname = '.vispy' if path == user_dir else 'vispy'
path = os.path.join(path, appname)
return path
class ConfigEvent(Event):
""" Event indicating a configuration change.
This class has a 'changes' attribute which is a dict of all name:value
pairs that have changed in the configuration.
"""
def __init__(self, changes):
Event.__init__(self, type='config_change')
self.changes = changes
class Config(object):
""" Container for global settings used application-wide in vispy.
Events:
-------
Config.events.changed - Emits ConfigEvent whenever the configuration
changes.
"""
def __init__(self, **kwargs):
self.events = EmitterGroup(source=self)
self.events['changed'] = EventEmitter(
event_class=ConfigEvent,
source=self)
self._config = {}
self.update(**kwargs)
self._known_keys = get_config_keys()
def __getitem__(self, item):
return self._config[item]
def __setitem__(self, item, val):
self._check_key_val(item, val)
self._config[item] = val
# inform any listeners that a configuration option has changed
self.events.changed(changes={item: val})
def _check_key_val(self, key, val):
global _allowed_config_keys
# check values against acceptable ones
known_keys = _allowed_config_keys
if key not in known_keys:
raise KeyError('key "%s" not in known keys: "%s"'
% (key, known_keys))
if not isinstance(val, known_keys[key]):
raise TypeError('Value for key "%s" must be one of %s, not %s.'
% (key, known_keys[key], type(val)))
def update(self, **kwargs):
for key, val in kwargs.items():
self._check_key_val(key, val)
self._config.update(kwargs)
self.events.changed(changes=kwargs)
def __repr__(self):
return repr(self._config)
def get_config_keys():
"""The config keys known by vispy and their allowed data types.
Returns
-------
keys : dict
Dict of {key: (types,)} pairs.
"""
global _allowed_config_keys
return _allowed_config_keys.copy()
def _get_config_fname():
"""Helper for the vispy config file"""
directory = _get_vispy_app_dir()
if directory is None:
return None
fname = op.join(directory, 'vispy.json')
if os.environ.get('_VISPY_CONFIG_TESTING', None) is not None:
fname = op.join(_TempDir(), 'vispy.json')
return fname
def _load_config():
"""Helper to load prefs from ~/.vispy/vispy.json"""
fname = _get_config_fname()
if fname is None or not op.isfile(fname):
return dict()
with open(fname, 'r') as fid:
config = json.load(fid)
return config
def save_config(**kwargs):
"""Save configuration keys to vispy config file
Parameters
----------
**kwargs : keyword arguments
Key/value pairs to save to the config file.
"""
if kwargs == {}:
kwargs = config._config
current_config = _load_config()
current_config.update(**kwargs)
# write to disk
fname = _get_config_fname()
if fname is None:
raise RuntimeError('config filename could not be determined')
if not op.isdir(op.dirname(fname)):
os.mkdir(op.dirname(fname))
with open(fname, 'w') as fid:
json.dump(current_config, fid, sort_keys=True, indent=0)
def set_data_dir(directory=None, create=False, save=False):
"""Set vispy data download directory
Parameters
----------
directory : str | None
The directory to use.
create : bool
If True, create directory if it doesn't exist.
save : bool
If True, save the configuration to the vispy config.
"""
if directory is None:
directory = _data_path
if _data_path is None:
raise IOError('default path cannot be determined, please '
'set it manually (directory != None)')
if not op.isdir(directory):
if not create:
raise IOError('directory "%s" does not exist, perhaps try '
'create=True to create it?' % directory)
os.mkdir(directory)
config.update(data_path=directory)
if save:
save_config(data_path=directory)
def _enable_profiling():
""" Start profiling and register callback to print stats when the program
exits.
"""
import cProfile
import atexit
global _profiler
_profiler = cProfile.Profile()
_profiler.enable()
atexit.register(_profile_atexit)
_profiler = None
def _profile_atexit():
global _profiler
_profiler.print_stats(sort='cumulative')
def sys_info(fname=None, overwrite=False):
"""Get relevant system and debugging information
Parameters
----------
fname : str | None
Filename to dump info to. Use None to simply print.
overwrite : bool
If True, overwrite file (if it exists).
Returns
-------
out : str
The system information as a string.
"""
if fname is not None and op.isfile(fname) and not overwrite:
raise IOError('file exists, use overwrite=True to overwrite')
out = ''
try:
# Nest all imports here to avoid any circular imports
from ..app import use_app, Canvas
from ..app.backends import BACKEND_NAMES
from ..gloo import gl
from ..testing import has_backend
# get default app
with use_log_level('warning'):
app = use_app(call_reuse=False) # suppress messages
out += 'Platform: %s\n' % platform.platform()
out += 'Python: %s\n' % str(sys.version).replace('\n', ' ')
out += 'Backend: %s\n' % app.backend_name
for backend in BACKEND_NAMES:
if backend.startswith('ipynb_'):
continue
with use_log_level('warning', print_msg=False):
which = has_backend(backend, out=['which'])[1]
out += '{0:<9} {1}\n'.format(backend + ':', which)
out += '\n'
# We need an OpenGL context to get GL info
canvas = Canvas('Test', (10, 10), show=False, app=app)
canvas._backend._vispy_set_current()
out += 'GL version: %r\n' % (gl.glGetParameter(gl.GL_VERSION),)
x_ = gl.GL_MAX_TEXTURE_SIZE
out += 'MAX_TEXTURE_SIZE: %r\n' % (gl.glGetParameter(x_),)
out += 'Extensions: %r\n' % (gl.glGetParameter(gl.GL_EXTENSIONS),)
canvas.close()
except Exception: # don't stop printing info
out += '\nInfo-gathering error:\n%s' % traceback.format_exc()
pass
if fname is not None:
with open(fname, 'w') as fid:
fid.write(out)
return out
class _TempDir(str):
"""Class for creating and auto-destroying temp dir
This is designed to be used with testing modules.
We cannot simply use __del__() method for cleanup here because the rmtree
function may be cleaned up before this object, so we use the atexit module
instead.
"""
def __new__(self):
new = str.__new__(self, tempfile.mkdtemp())
return new
def __init__(self):
self._path = self.__str__()
atexit.register(self.cleanup)
def cleanup(self):
rmtree(self._path, ignore_errors=True)
# initialize config options
_init()
|
bsd-3-clause
|
CCI-MOC/nova
|
nova/api/openstack/compute/fping.py
|
32
|
4924
|
# Copyright 2011 Grid Dynamics
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
import os
from oslo_config import cfg
import six
from webob import exc
from nova.api.openstack import common
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import compute
from nova.i18n import _
from nova import utils
ALIAS = "os-fping"
authorize = extensions.os_compute_authorizer(ALIAS)
CONF = cfg.CONF
CONF.import_opt('fping_path', 'nova.api.openstack.compute.legacy_v2.contrib.'
'fping')
class FpingController(wsgi.Controller):
def __init__(self, network_api=None):
self.compute_api = compute.API(skip_policy_check=True)
self.last_call = {}
def check_fping(self):
if not os.access(CONF.fping_path, os.X_OK):
raise exc.HTTPServiceUnavailable(
explanation=_("fping utility is not found."))
@staticmethod
def fping(ips):
fping_ret = utils.execute(CONF.fping_path, *ips,
check_exit_code=False)
if not fping_ret:
return set()
alive_ips = set()
for line in fping_ret[0].split("\n"):
ip = line.split(" ", 1)[0]
if "alive" in line:
alive_ips.add(ip)
return alive_ips
@staticmethod
def _get_instance_ips(context, instance):
ret = []
for network in common.get_networks_for_instance(
context, instance).values():
all_ips = itertools.chain(network["ips"], network["floating_ips"])
ret += [ip["address"] for ip in all_ips]
return ret
@extensions.expected_errors(503)
def index(self, req):
context = req.environ["nova.context"]
search_opts = dict(deleted=False)
if "all_tenants" in req.GET:
authorize(context, action='all_tenants')
else:
authorize(context)
if context.project_id:
search_opts["project_id"] = context.project_id
else:
search_opts["user_id"] = context.user_id
self.check_fping()
include = req.GET.get("include", None)
if include:
include = set(include.split(","))
exclude = set()
else:
include = None
exclude = req.GET.get("exclude", None)
if exclude:
exclude = set(exclude.split(","))
else:
exclude = set()
instance_list = self.compute_api.get_all(
context, search_opts=search_opts, want_objects=True)
ip_list = []
instance_ips = {}
instance_projects = {}
for instance in instance_list:
uuid = instance.uuid
if uuid in exclude or (include is not None and
uuid not in include):
continue
ips = [str(ip) for ip in self._get_instance_ips(context, instance)]
instance_ips[uuid] = ips
instance_projects[uuid] = instance.project_id
ip_list += ips
alive_ips = self.fping(ip_list)
res = []
for instance_uuid, ips in six.iteritems(instance_ips):
res.append({
"id": instance_uuid,
"project_id": instance_projects[instance_uuid],
"alive": bool(set(ips) & alive_ips),
})
return {"servers": res}
@extensions.expected_errors((404, 503))
def show(self, req, id):
context = req.environ["nova.context"]
authorize(context)
self.check_fping()
instance = common.get_instance(self.compute_api, context, id)
ips = [str(ip) for ip in self._get_instance_ips(context, instance)]
alive_ips = self.fping(ips)
return {
"server": {
"id": instance.uuid,
"project_id": instance.project_id,
"alive": bool(set(ips) & alive_ips),
}
}
class Fping(extensions.V21APIExtensionBase):
"""Fping Management Extension."""
name = "Fping"
alias = ALIAS
version = 1
def get_resources(self):
res = extensions.ResourceExtension(ALIAS, FpingController())
return [res]
def get_controller_extensions(self):
return []
|
apache-2.0
|
nonemaw/pynet
|
learnpy_ecourse/class10/uptime.py
|
4
|
2224
|
import re
def find_uptime_field(a_pattern, uptime_str):
'''
If there is a match return the match group(1)
Else return 0
'''
a_check = re.search(a_pattern, uptime_str)
if a_check:
return int(a_check.group(1))
else:
return 0
class Uptime(object):
'''
Create an Uptime object for Cisco IOS uptime strings
'''
def __init__(self, uptime_str):
# process the uptime string
_, uptime_str = uptime_str.split("uptime is")
# [years, weeks, days, hours, minutes]
uptime_list = [0, 0, 0, 0, 0]
pattern_list = [
r" ([0-9]+) year",
r" ([0-9]+) week",
r" ([0-9]+) day",
r" ([0-9]+) hour",
r" ([0-9]+) minute",
]
for i, a_pattern in enumerate(pattern_list):
uptime_list[i] = find_uptime_field(a_pattern, uptime_str)
(self.years, self.weeks, self.days, self.hours, self.minutes) = uptime_list
def uptime_seconds(self):
'''
Return the uptime in seconds
'''
MINUTE_S = 60
HOUR_S = MINUTE_S * 60
DAY_S = HOUR_S * 24
WEEK_S = DAY_S * 7
YEAR_S = DAY_S * 365
return ((self.years * YEAR_S) + (self.weeks * WEEK_S) + (self.days * DAY_S) +
(self.hours * HOUR_S) + (self.minutes * MINUTE_S))
def main():
'''
Some test code
'''
uptime_strings = [
'twb-sf-881 uptime is 6 weeks, 4 days, 2 hours, 25 minutes',
'3750RJ uptime is 1 hour, 29 minutes',
'CATS3560 uptime is 8 weeks, 4 days, 18 hours, 16 minutes',
'rtr1 uptime is 5 years, 18 weeks, 8 hours, 23 minutes',
]
for uptime_str in uptime_strings:
test_obj = Uptime(uptime_str)
print
print "> " + uptime_str
print "%-20s: %s" % ('years', test_obj.years)
print "%-20s: %s" % ('weeks', test_obj.weeks)
print "%-20s: %s" % ('days', test_obj.days)
print "%-20s: %s" % ('hours', test_obj.hours)
print "%-20s: %s" % ('minutes', test_obj.minutes)
print "%-20s: %s" % ('Uptime in seconds: ', test_obj.uptime_seconds())
print
if __name__ == "__main__":
main()
|
gpl-2.0
|
wimnat/ansible-modules-extras
|
packaging/os/layman.py
|
45
|
7881
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Jakub Jirutka <jakub@jirutka.cz>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import shutil
from os import path
DOCUMENTATION = '''
---
module: layman
author: "Jakub Jirutka (@jirutka)"
version_added: "1.6"
short_description: Manage Gentoo overlays
description:
- Uses Layman to manage an additional repositories for the Portage package manager on Gentoo Linux.
Please note that Layman must be installed on a managed node prior using this module.
requirements:
- "python >= 2.6"
- layman python module
options:
name:
description:
- The overlay id to install, synchronize, or uninstall.
Use 'ALL' to sync all of the installed overlays (can be used only when C(state=updated)).
required: true
list_url:
description:
- An URL of the alternative overlays list that defines the overlay to install.
This list will be fetched and saved under C(${overlay_defs})/${name}.xml), where
C(overlay_defs) is readed from the Layman's configuration.
required: false
state:
description:
- Whether to install (C(present)), sync (C(updated)), or uninstall (C(absent)) the overlay.
required: false
default: present
choices: [present, absent, updated]
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be
set to C(no) when no other option exists. Prior to 1.9.3 the code
defaulted to C(no).
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: '1.9.3'
'''
EXAMPLES = '''
# Install the overlay 'mozilla' which is on the central overlays list.
- layman: name=mozilla
# Install the overlay 'cvut' from the specified alternative list.
- layman: name=cvut list_url=http://raw.github.com/cvut/gentoo-overlay/master/overlay.xml
# Update (sync) the overlay 'cvut', or install if not installed yet.
- layman: name=cvut list_url=http://raw.github.com/cvut/gentoo-overlay/master/overlay.xml state=updated
# Update (sync) all of the installed overlays.
- layman: name=ALL state=updated
# Uninstall the overlay 'cvut'.
- layman: name=cvut state=absent
'''
USERAGENT = 'ansible-httpget'
try:
from layman.api import LaymanAPI
from layman.config import BareConfig
HAS_LAYMAN_API = True
except ImportError:
HAS_LAYMAN_API = False
class ModuleError(Exception): pass
def init_layman(config=None):
'''Returns the initialized ``LaymanAPI``.
:param config: the layman's configuration to use (optional)
'''
if config is None:
config = BareConfig(read_configfile=True, quietness=1)
return LaymanAPI(config)
def download_url(module, url, dest):
'''
:param url: the URL to download
:param dest: the absolute path of where to save the downloaded content to;
it must be writable and not a directory
:raises ModuleError
'''
# Hack to add params in the form that fetch_url expects
module.params['http_agent'] = USERAGENT
response, info = fetch_url(module, url)
if info['status'] != 200:
raise ModuleError("Failed to get %s: %s" % (url, info['msg']))
try:
with open(dest, 'w') as f:
shutil.copyfileobj(response, f)
except IOError as e:
raise ModuleError("Failed to write: %s" % str(e))
def install_overlay(module, name, list_url=None):
'''Installs the overlay repository. If not on the central overlays list,
then :list_url of an alternative list must be provided. The list will be
fetched and saved under ``%(overlay_defs)/%(name.xml)`` (location of the
``overlay_defs`` is read from the Layman's configuration).
:param name: the overlay id
:param list_url: the URL of the remote repositories list to look for the overlay
definition (optional, default: None)
:returns: True if the overlay was installed, or False if already exists
(i.e. nothing has changed)
:raises ModuleError
'''
# read Layman configuration
layman_conf = BareConfig(read_configfile=True)
layman = init_layman(layman_conf)
if layman.is_installed(name):
return False
if module.check_mode:
mymsg = 'Would add layman repo \'' + name + '\''
module.exit_json(changed=True, msg=mymsg)
if not layman.is_repo(name):
if not list_url:
raise ModuleError("Overlay '%s' is not on the list of known " \
"overlays and URL of the remote list was not provided." % name)
overlay_defs = layman_conf.get_option('overlay_defs')
dest = path.join(overlay_defs, name + '.xml')
download_url(module, list_url, dest)
# reload config
layman = init_layman()
if not layman.add_repos(name):
raise ModuleError(layman.get_errors())
return True
def uninstall_overlay(module, name):
'''Uninstalls the given overlay repository from the system.
:param name: the overlay id to uninstall
:returns: True if the overlay was uninstalled, or False if doesn't exist
(i.e. nothing has changed)
:raises ModuleError
'''
layman = init_layman()
if not layman.is_installed(name):
return False
if module.check_mode:
mymsg = 'Would remove layman repo \'' + name + '\''
module.exit_json(changed=True, msg=mymsg)
layman.delete_repos(name)
if layman.get_errors(): raise ModuleError(layman.get_errors())
return True
def sync_overlay(name):
'''Synchronizes the specified overlay repository.
:param name: the overlay repository id to sync
:raises ModuleError
'''
layman = init_layman()
if not layman.sync(name):
messages = [ str(item[1]) for item in layman.sync_results[2] ]
raise ModuleError(messages)
def sync_overlays():
'''Synchronize all of the installed overlays.
:raises ModuleError
'''
layman = init_layman()
for name in layman.get_installed():
sync_overlay(name)
def main():
# define module
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
list_url = dict(aliases=['url']),
state = dict(default="present", choices=['present', 'absent', 'updated']),
validate_certs = dict(required=False, default=True, type='bool'),
),
supports_check_mode=True
)
if not HAS_LAYMAN_API:
module.fail_json(msg='Layman is not installed')
state, name, url = (module.params[key] for key in ['state', 'name', 'list_url'])
changed = False
try:
if state == 'present':
changed = install_overlay(module, name, url)
elif state == 'updated':
if name == 'ALL':
sync_overlays()
elif install_overlay(module, name, url):
changed = True
else:
sync_overlay(name)
else:
changed = uninstall_overlay(module, name)
except ModuleError as e:
module.fail_json(msg=e.message)
else:
module.exit_json(changed=changed, name=name)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
tcwicklund/django
|
django/contrib/sessions/backends/signed_cookies.py
|
383
|
2895
|
from django.conf import settings
from django.contrib.sessions.backends.base import SessionBase
from django.core import signing
class SessionStore(SessionBase):
def load(self):
"""
We load the data from the key itself instead of fetching from
some external data store. Opposite of _get_session_key(),
raises BadSignature if signature fails.
"""
try:
return signing.loads(self.session_key,
serializer=self.serializer,
# This doesn't handle non-default expiry dates, see #19201
max_age=settings.SESSION_COOKIE_AGE,
salt='django.contrib.sessions.backends.signed_cookies')
except Exception:
# BadSignature, ValueError, or unpickling exceptions. If any of
# these happen, reset the session.
self.create()
return {}
def create(self):
"""
To create a new key, we simply make sure that the modified flag is set
so that the cookie is set on the client for the current request.
"""
self.modified = True
def save(self, must_create=False):
"""
To save, we get the session key as a securely signed string and then
set the modified flag so that the cookie is set on the client for the
current request.
"""
self._session_key = self._get_session_key()
self.modified = True
def exists(self, session_key=None):
"""
This method makes sense when you're talking to a shared resource, but
it doesn't matter when you're storing the information in the client's
cookie.
"""
return False
def delete(self, session_key=None):
"""
To delete, we clear the session key and the underlying data structure
and set the modified flag so that the cookie is set on the client for
the current request.
"""
self._session_key = ''
self._session_cache = {}
self.modified = True
def cycle_key(self):
"""
Keeps the same data but with a new key. To do this, we just have to
call ``save()`` and it will automatically save a cookie with a new key
at the end of the request.
"""
self.save()
def _get_session_key(self):
"""
Most session backends don't need to override this method, but we do,
because instead of generating a random string, we want to actually
generate a secure url-safe Base64-encoded string of data as our
session key.
"""
session_cache = getattr(self, '_session_cache', {})
return signing.dumps(session_cache, compress=True,
salt='django.contrib.sessions.backends.signed_cookies',
serializer=self.serializer)
@classmethod
def clear_expired(cls):
pass
|
bsd-3-clause
|
sholsapp/cryptography
|
tests/hazmat/primitives/fixtures_dsa.py
|
10
|
7822
|
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography.hazmat.primitives.asymmetric.dsa import (
DSAParameterNumbers, DSAPrivateNumbers, DSAPublicNumbers
)
DSA_KEY_1024 = DSAPrivateNumbers(
public_numbers=DSAPublicNumbers(
parameter_numbers=DSAParameterNumbers(
p=int(
'd38311e2cd388c3ed698e82fdf88eb92b5a9a483dc88005d4b725ef34'
'1eabb47cf8a7a8a41e792a156b7ce97206c4f9c5ce6fc5ae7912102b6'
'b502e59050b5b21ce263dddb2044b652236f4d42ab4b5d6aa73189cef'
'1ace778d7845a5c1c1c7147123188f8dc551054ee162b634d60f097f7'
'19076640e20980a0093113a8bd73', 16
),
q=int('96c5390a8b612c0e422bb2b0ea194a3ec935a281', 16),
g=int(
'06b7861abbd35cc89e79c52f68d20875389b127361ca66822138ce499'
'1d2b862259d6b4548a6495b195aa0e0b6137ca37eb23b94074d3c3d30'
'0042bdf15762812b6333ef7b07ceba78607610fcc9ee68491dbc1e34c'
'd12615474e52b18bc934fb00c61d39e7da8902291c4434a4e2224c3f4'
'fd9f93cd6f4f17fc076341a7e7d9', 16
)
),
y=int(
'6f26d98d41de7d871b6381851c9d91fa03942092ab6097e76422070edb71d'
'b44ff568280fdb1709f8fc3feab39f1f824adaeb2a298088156ac31af1aa0'
'4bf54f475bdcfdcf2f8a2dd973e922d83e76f016558617603129b21c70bf7'
'd0e5dc9e68fe332e295b65876eb9a12fe6fca9f1a1ce80204646bf99b5771'
'd249a6fea627', 16
)
),
x=int('8185fee9cc7c0e91fd85503274f1cd5a3fd15a49', 16)
)
DSA_KEY_2048 = DSAPrivateNumbers(
public_numbers=DSAPublicNumbers(
parameter_numbers=DSAParameterNumbers(
p=int(
'ea1fb1af22881558ef93be8a5f8653c5a559434c49c8c2c12ace5e9c4'
'1434c9cf0a8e9498acb0f4663c08b4484eace845f6fb17dac62c98e70'
'6af0fc74e4da1c6c2b3fbf5a1d58ff82fc1a66f3e8b12252c40278fff'
'9dd7f102eed2cb5b7323ebf1908c234d935414dded7f8d244e54561b0'
'dca39b301de8c49da9fb23df33c6182e3f983208c560fb5119fbf78eb'
'e3e6564ee235c6a15cbb9ac247baba5a423bc6582a1a9d8a2b4f0e9e3'
'd9dbac122f750dd754325135257488b1f6ecabf21bff2947fe0d3b2cb'
'7ffe67f4e7fcdf1214f6053e72a5bb0dd20a0e9fe6db2df0a908c36e9'
'5e60bf49ca4368b8b892b9c79f61ef91c47567c40e1f80ac5aa66ef7',
16
),
q=int(
'8ec73f3761caf5fdfe6e4e82098bf10f898740dcb808204bf6b18f507'
'192c19d', 16
),
g=int(
'e4c4eca88415b23ecf811c96e48cd24200fe916631a68a684e6ccb6b1'
'913413d344d1d8d84a333839d88eee431521f6e357c16e6a93be111a9'
'8076739cd401bab3b9d565bf4fb99e9d185b1e14d61c93700133f908b'
'ae03e28764d107dcd2ea7674217622074bb19efff482f5f5c1a86d555'
'1b2fc68d1c6e9d8011958ef4b9c2a3a55d0d3c882e6ad7f9f0f3c6156'
'8f78d0706b10a26f23b4f197c322b825002284a0aca91807bba98ece9'
'12b80e10cdf180cf99a35f210c1655fbfdd74f13b1b5046591f840387'
'3d12239834dd6c4eceb42bf7482e1794a1601357b629ddfa971f2ed27'
'3b146ec1ca06d0adf55dd91d65c37297bda78c6d210c0bc26e558302',
16
)
),
y=int(
'6b32e31ab9031dc4dd0b5039a78d07826687ab087ae6de4736f5b0434e125'
'3092e8a0b231f9c87f3fc8a4cb5634eb194bf1b638b7a7889620ce6711567'
'e36aa36cda4604cfaa601a45918371d4ccf68d8b10a50a0460eb1dc0fff62'
'ef5e6ee4d473e18ea4a66c196fb7e677a49b48241a0b4a97128eff30fa437'
'050501a584f8771e7280d26d5af30784039159c11ebfea10b692fd0a58215'
'eeb18bff117e13f08db792ed4151a218e4bed8dddfb0793225bd1e9773505'
'166f4bd8cedbb286ea28232972da7bae836ba97329ba6b0a36508e50a52a7'
'675e476d4d4137eae13f22a9d2fefde708ba8f34bf336c6e76331761e4b06'
'17633fe7ec3f23672fb19d27', 16
)
),
x=int(
'405772da6e90d809e77d5de796562a2dd4dfd10ef00a83a3aba6bd818a0348a1',
16
)
)
DSA_KEY_3072 = DSAPrivateNumbers(
public_numbers=DSAPublicNumbers(
parameter_numbers=DSAParameterNumbers(
p=int(
'f335666dd1339165af8b9a5e3835adfe15c158e4c3c7bd53132e7d582'
'8c352f593a9a787760ce34b789879941f2f01f02319f6ae0b756f1a84'
'2ba54c85612ed632ee2d79ef17f06b77c641b7b080aff52a03fc2462e'
'80abc64d223723c236deeb7d201078ec01ca1fbc1763139e25099a84e'
'c389159c409792080736bd7caa816b92edf23f2c351f90074aa5ea265'
'1b372f8b58a0a65554db2561d706a63685000ac576b7e4562e262a142'
'85a9c6370b290e4eb7757527d80b6c0fd5df831d36f3d1d35f12ab060'
'548de1605fd15f7c7aafed688b146a02c945156e284f5b71282045aba'
'9844d48b5df2e9e7a5887121eae7d7b01db7cdf6ff917cd8eb50c6bf1'
'd54f90cce1a491a9c74fea88f7e7230b047d16b5a6027881d6f154818'
'f06e513faf40c8814630e4e254f17a47bfe9cb519b98289935bf17673'
'ae4c8033504a20a898d0032ee402b72d5986322f3bdfb27400561f747'
'6cd715eaabb7338b854e51fc2fa026a5a579b6dcea1b1c0559c13d3c1'
'136f303f4b4d25ad5b692229957', 16
),
q=int(
'd3eba6521240694015ef94412e08bf3cf8d635a455a398d6f210f6169'
'041653b', 16
),
g=int(
'ce84b30ddf290a9f787a7c2f1ce92c1cbf4ef400e3cd7ce4978db2104'
'd7394b493c18332c64cec906a71c3778bd93341165dee8e6cd4ca6f13'
'afff531191194ada55ecf01ff94d6cf7c4768b82dd29cd131aaf202ae'
'fd40e564375285c01f3220af4d70b96f1395420d778228f1461f5d0b8'
'e47357e87b1fe3286223b553e3fc9928f16ae3067ded6721bedf1d1a0'
'1bfd22b9ae85fce77820d88cdf50a6bde20668ad77a707d1c60fcc5d5'
'1c9de488610d0285eb8ff721ff141f93a9fb23c1d1f7654c07c46e588'
'36d1652828f71057b8aff0b0778ef2ca934ea9d0f37daddade2d823a4'
'd8e362721082e279d003b575ee59fd050d105dfd71cd63154efe431a0'
'869178d9811f4f231dc5dcf3b0ec0f2b0f9896c32ec6c7ee7d60aa971'
'09e09224907328d4e6acd10117e45774406c4c947da8020649c3168f6'
'90e0bd6e91ac67074d1d436b58ae374523deaf6c93c1e6920db4a080b'
'744804bb073cecfe83fa9398cf150afa286dc7eb7949750cf5001ce10'
'4e9187f7e16859afa8fd0d775ae', 16
)
),
y=int(
'814824e435e1e6f38daa239aad6dad21033afce6a3ebd35c1359348a0f241'
'8871968c2babfc2baf47742148828f8612183178f126504da73566b6bab33'
'ba1f124c15aa461555c2451d86c94ee21c3e3fc24c55527e01b1f03adcdd8'
'ec5cb08082803a7b6a829c3e99eeb332a2cf5c035b0ce0078d3d414d31fa4'
'7e9726be2989b8d06da2e6cd363f5a7d1515e3f4925e0b32adeae3025cc5a'
'996f6fd27494ea408763de48f3bb39f6a06514b019899b312ec570851637b'
'8865cff3a52bf5d54ad5a19e6e400a2d33251055d0a440b50d53f4791391d'
'c754ad02b9eab74c46b4903f9d76f824339914db108057af7cde657d41766'
'a99991ac8787694f4185d6f91d7627048f827b405ec67bf2fe56141c4c581'
'd8c317333624e073e5879a82437cb0c7b435c0ce434e15965db1315d64895'
'991e6bbe7dac040c42052408bbc53423fd31098248a58f8a67da3a39895cd'
'0cc927515d044c1e3cb6a3259c3d0da354cce89ea3552c59609db10ee9899'
'86527436af21d9485ddf25f90f7dff6d2bae', 16
)
),
x=int(
'b2764c46113983777d3e7e97589f1303806d14ad9f2f1ef033097de954b17706',
16
)
)
|
bsd-3-clause
|
klusark/android_external_chromium_org
|
tools/deep_memory_profiler/subcommands/stacktrace.py
|
59
|
1156
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
from lib.bucket import BUCKET_ID
from lib.subcommand import SubCommand
class StacktraceCommand(SubCommand):
def __init__(self):
super(StacktraceCommand, self).__init__(
'Usage: %prog stacktrace <dump>')
def do(self, sys_argv):
_, args = self._parse_args(sys_argv, 1)
dump_path = args[1]
(bucket_set, dump) = SubCommand.load_basic_files(dump_path, False)
StacktraceCommand._output(dump, bucket_set, sys.stdout)
return 0
@staticmethod
def _output(dump, bucket_set, out):
"""Outputs a given stacktrace.
Args:
bucket_set: A BucketSet object.
out: A file object to output.
"""
for line in dump.iter_stacktrace:
words = line.split()
bucket = bucket_set.get(int(words[BUCKET_ID]))
if not bucket:
continue
for i in range(0, BUCKET_ID - 1):
out.write(words[i] + ' ')
for frame in bucket.symbolized_stackfunction:
out.write(frame + ' ')
out.write('\n')
|
bsd-3-clause
|
hgrif/incubator-airflow
|
tests/contrib/operators/test_mlengine_operator_utils.py
|
11
|
7521
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import unittest
from airflow import configuration, DAG
from airflow.contrib.operators import mlengine_operator_utils
from airflow.contrib.operators.mlengine_operator_utils import create_evaluate_ops
from airflow.exceptions import AirflowException
from mock import ANY
from mock import patch
DEFAULT_DATE = datetime.datetime(2017, 6, 6)
class CreateEvaluateOpsTest(unittest.TestCase):
INPUT_MISSING_ORIGIN = {
'dataFormat': 'TEXT',
'inputPaths': ['gs://legal-bucket/fake-input-path/*'],
'outputPath': 'gs://legal-bucket/fake-output-path',
'region': 'us-east1',
'versionName': 'projects/test-project/models/test_model/versions/test_version',
}
SUCCESS_MESSAGE_MISSING_INPUT = {
'jobId': 'eval_test_prediction',
'predictionOutput': {
'outputPath': 'gs://fake-output-path',
'predictionCount': 5000,
'errorCount': 0,
'nodeHours': 2.78
},
'state': 'SUCCEEDED'
}
def setUp(self):
super(CreateEvaluateOpsTest, self).setUp()
configuration.load_test_config()
self.dag = DAG(
'test_dag',
default_args={
'owner': 'airflow',
'start_date': DEFAULT_DATE,
'end_date': DEFAULT_DATE,
'project_id': 'test-project',
'region': 'us-east1',
'model_name': 'test_model',
'version_name': 'test_version',
},
schedule_interval='@daily')
self.metric_fn = lambda x: (0.1,)
self.metric_fn_encoded = mlengine_operator_utils.base64.b64encode(
mlengine_operator_utils.dill.dumps(self.metric_fn, recurse=True))
def testSuccessfulRun(self):
input_with_model = self.INPUT_MISSING_ORIGIN.copy()
pred, summary, validate = create_evaluate_ops(
task_prefix='eval-test',
batch_prediction_job_id='eval-test-prediction',
data_format=input_with_model['dataFormat'],
input_paths=input_with_model['inputPaths'],
prediction_path=input_with_model['outputPath'],
metric_fn_and_keys=(self.metric_fn, ['err']),
validate_fn=(lambda x: 'err=%.1f' % x['err']),
dag=self.dag)
with patch('airflow.contrib.operators.mlengine_operator.'
'MLEngineHook') as mock_mlengine_hook:
success_message = self.SUCCESS_MESSAGE_MISSING_INPUT.copy()
success_message['predictionInput'] = input_with_model
hook_instance = mock_mlengine_hook.return_value
hook_instance.create_job.return_value = success_message
result = pred.execute(None)
mock_mlengine_hook.assert_called_with('google_cloud_default', None)
hook_instance.create_job.assert_called_once_with(
'test-project',
{
'jobId': 'eval_test_prediction',
'predictionInput': input_with_model,
},
ANY)
self.assertEqual(success_message['predictionOutput'], result)
with patch('airflow.contrib.operators.dataflow_operator.'
'DataFlowHook') as mock_dataflow_hook:
hook_instance = mock_dataflow_hook.return_value
hook_instance.start_python_dataflow.return_value = None
summary.execute(None)
mock_dataflow_hook.assert_called_with(
gcp_conn_id='google_cloud_default', delegate_to=None, poll_sleep=10)
hook_instance.start_python_dataflow.assert_called_once_with(
'eval-test-summary',
{
'prediction_path': 'gs://legal-bucket/fake-output-path',
'metric_keys': 'err',
'metric_fn_encoded': self.metric_fn_encoded,
},
'airflow.contrib.operators.mlengine_prediction_summary',
['-m'])
with patch('airflow.contrib.operators.mlengine_operator_utils.'
'GoogleCloudStorageHook') as mock_gcs_hook:
hook_instance = mock_gcs_hook.return_value
hook_instance.download.return_value = '{"err": 0.9, "count": 9}'
result = validate.execute({})
hook_instance.download.assert_called_once_with(
'legal-bucket', 'fake-output-path/prediction.summary.json')
self.assertEqual('err=0.9', result)
def testFailures(self):
dag = DAG(
'test_dag',
default_args={
'owner': 'airflow',
'start_date': DEFAULT_DATE,
'end_date': DEFAULT_DATE,
'project_id': 'test-project',
'region': 'us-east1',
},
schedule_interval='@daily')
input_with_model = self.INPUT_MISSING_ORIGIN.copy()
other_params_but_models = {
'task_prefix': 'eval-test',
'batch_prediction_job_id': 'eval-test-prediction',
'data_format': input_with_model['dataFormat'],
'input_paths': input_with_model['inputPaths'],
'prediction_path': input_with_model['outputPath'],
'metric_fn_and_keys': (self.metric_fn, ['err']),
'validate_fn': (lambda x: 'err=%.1f' % x['err']),
'dag': dag,
}
with self.assertRaisesRegexp(ValueError, 'Missing model origin'):
_ = create_evaluate_ops(**other_params_but_models)
with self.assertRaisesRegexp(ValueError, 'Ambiguous model origin'):
_ = create_evaluate_ops(model_uri='abc', model_name='cde',
**other_params_but_models)
with self.assertRaisesRegexp(ValueError, 'Ambiguous model origin'):
_ = create_evaluate_ops(model_uri='abc', version_name='vvv',
**other_params_but_models)
with self.assertRaisesRegexp(AirflowException,
'`metric_fn` param must be callable'):
params = other_params_but_models.copy()
params['metric_fn_and_keys'] = (None, ['abc'])
_ = create_evaluate_ops(model_uri='gs://blah', **params)
with self.assertRaisesRegexp(AirflowException,
'`validate_fn` param must be callable'):
params = other_params_but_models.copy()
params['validate_fn'] = None
_ = create_evaluate_ops(model_uri='gs://blah', **params)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
patsissons/Flexget
|
flexget/plugins/urlrewrite_deadfrog.py
|
24
|
1630
|
from __future__ import unicode_literals, division, absolute_import
import logging
import re
import urllib2
from flexget import plugin
from flexget.event import event
from flexget.plugins.plugin_urlrewriting import UrlRewritingError
from flexget.utils.tools import urlopener
from flexget.utils.soup import get_soup
log = logging.getLogger('deadfrog')
class UrlRewriteDeadFrog(object):
"""DeadFrog urlrewriter."""
# urlrewriter API
def url_rewritable(self, task, entry):
url = entry['url']
if url.startswith('http://www.deadfrog.us/download/'):
return False
if url.startswith('http://www.deadfrog.us/') or url.startswith('http://deadfrog.us/'):
return True
return False
# urlrewriter API
def url_rewrite(self, task, entry):
entry['url'] = self.parse_download_page(entry['url'])
@plugin.internet(log)
def parse_download_page(self, url):
txheaders = {'User-agent': 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)'}
req = urllib2.Request(url, None, txheaders)
page = urlopener(req, log)
try:
soup = get_soup(page)
except Exception as e:
raise UrlRewritingError(e)
down_link = soup.find('a', attrs={'href': re.compile("download/\d+/.*\.torrent")})
if not down_link:
raise UrlRewritingError('Unable to locate download link from url %s' % url)
return 'http://www.deadfrog.us/' + down_link.get('href')
@event('plugin.register')
def register_plugin():
plugin.register(UrlRewriteDeadFrog, 'deadfrog', groups=['urlrewriter'], api_ver=2)
|
mit
|
AlexHill/django
|
django/contrib/gis/tests/layermap/models.py
|
5
|
2279
|
from django.contrib.gis.db import models
class State(models.Model):
name = models.CharField(max_length=20)
objects = models.GeoManager()
class County(models.Model):
name = models.CharField(max_length=25)
state = models.ForeignKey(State)
mpoly = models.MultiPolygonField(srid=4269) # Multipolygon in NAD83
objects = models.GeoManager()
class CountyFeat(models.Model):
name = models.CharField(max_length=25)
poly = models.PolygonField(srid=4269)
objects = models.GeoManager()
class City(models.Model):
name = models.CharField(max_length=25)
name_txt = models.TextField(default='')
population = models.IntegerField()
density = models.DecimalField(max_digits=7, decimal_places=1)
dt = models.DateField()
point = models.PointField()
objects = models.GeoManager()
class Interstate(models.Model):
name = models.CharField(max_length=20)
length = models.DecimalField(max_digits=6, decimal_places=2)
path = models.LineStringField()
objects = models.GeoManager()
# Same as `City` above, but for testing model inheritance.
class CityBase(models.Model):
name = models.CharField(max_length=25)
population = models.IntegerField()
density = models.DecimalField(max_digits=7, decimal_places=1)
point = models.PointField()
objects = models.GeoManager()
class ICity1(CityBase):
dt = models.DateField()
class ICity2(ICity1):
dt_time = models.DateTimeField(auto_now=True)
class Invalid(models.Model):
point = models.PointField()
# Mapping dictionaries for the models above.
co_mapping = {'name': 'Name',
'state': {'name': 'State'}, # ForeignKey's use another mapping dictionary for the _related_ Model (State in this case).
'mpoly': 'MULTIPOLYGON', # Will convert POLYGON features into MULTIPOLYGONS.
}
cofeat_mapping = {'name': 'Name',
'poly': 'POLYGON',
}
city_mapping = {'name': 'Name',
'population': 'Population',
'density': 'Density',
'dt': 'Created',
'point': 'POINT',
}
inter_mapping = {'name': 'Name',
'length': 'Length',
'path': 'LINESTRING',
}
|
bsd-3-clause
|
NielsZeilemaker/incubator-airflow
|
airflow/hooks/samba_hook.py
|
61
|
1562
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from smbclient import SambaClient
import os
from airflow.hooks.base_hook import BaseHook
class SambaHook(BaseHook):
'''
Allows for interaction with an samba server.
'''
def __init__(self, samba_conn_id):
self.conn = self.get_connection(samba_conn_id)
def get_conn(self):
samba = SambaClient(
server=self.conn.host,
share=self.conn.schema,
username=self.conn.login,
ip=self.conn.host,
password=self.conn.password)
return samba
def push_from_local(self, destination_filepath, local_filepath):
samba = self.get_conn()
if samba.exists(destination_filepath):
if samba.isfile(destination_filepath):
samba.remove(destination_filepath)
else:
folder = os.path.dirname(destination_filepath)
if not samba.exists(folder):
samba.mkdir(folder)
samba.upload(local_filepath, destination_filepath)
|
apache-2.0
|
jbedorf/tensorflow
|
tensorflow/python/keras/utils/data_utils_test.py
|
15
|
10290
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for data_utils."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from itertools import cycle
import os
import tarfile
import threading
import unittest
import zipfile
import numpy as np
from six.moves.urllib.parse import urljoin
from six.moves.urllib.request import pathname2url
from tensorflow.python import keras
from tensorflow.python.platform import test
class TestGetFileAndValidateIt(test.TestCase):
def test_get_file_and_validate_it(self):
"""Tests get_file from a url, plus extraction and validation.
"""
dest_dir = self.get_temp_dir()
orig_dir = self.get_temp_dir()
text_file_path = os.path.join(orig_dir, 'test.txt')
zip_file_path = os.path.join(orig_dir, 'test.zip')
tar_file_path = os.path.join(orig_dir, 'test.tar.gz')
with open(text_file_path, 'w') as text_file:
text_file.write('Float like a butterfly, sting like a bee.')
with tarfile.open(tar_file_path, 'w:gz') as tar_file:
tar_file.add(text_file_path)
with zipfile.ZipFile(zip_file_path, 'w') as zip_file:
zip_file.write(text_file_path)
origin = urljoin('file://', pathname2url(os.path.abspath(tar_file_path)))
path = keras.utils.data_utils.get_file('test.txt', origin,
untar=True, cache_subdir=dest_dir)
filepath = path + '.tar.gz'
hashval_sha256 = keras.utils.data_utils._hash_file(filepath)
hashval_md5 = keras.utils.data_utils._hash_file(filepath, algorithm='md5')
path = keras.utils.data_utils.get_file(
'test.txt', origin, md5_hash=hashval_md5,
untar=True, cache_subdir=dest_dir)
path = keras.utils.data_utils.get_file(
filepath, origin, file_hash=hashval_sha256,
extract=True, cache_subdir=dest_dir)
self.assertTrue(os.path.exists(filepath))
self.assertTrue(keras.utils.data_utils.validate_file(filepath,
hashval_sha256))
self.assertTrue(keras.utils.data_utils.validate_file(filepath, hashval_md5))
os.remove(filepath)
origin = urljoin('file://', pathname2url(os.path.abspath(zip_file_path)))
hashval_sha256 = keras.utils.data_utils._hash_file(zip_file_path)
hashval_md5 = keras.utils.data_utils._hash_file(zip_file_path,
algorithm='md5')
path = keras.utils.data_utils.get_file(
'test', origin, md5_hash=hashval_md5,
extract=True, cache_subdir=dest_dir)
path = keras.utils.data_utils.get_file(
'test', origin, file_hash=hashval_sha256,
extract=True, cache_subdir=dest_dir)
self.assertTrue(os.path.exists(path))
self.assertTrue(keras.utils.data_utils.validate_file(path, hashval_sha256))
self.assertTrue(keras.utils.data_utils.validate_file(path, hashval_md5))
class ThreadsafeIter(object):
def __init__(self, it):
self.it = it
self.lock = threading.Lock()
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
with self.lock:
return next(self.it)
def threadsafe_generator(f):
def g(*a, **kw):
return ThreadsafeIter(f(*a, **kw))
return g
class TestSequence(keras.utils.data_utils.Sequence):
def __init__(self, shape, value=1.):
self.shape = shape
self.inner = value
def __getitem__(self, item):
return np.ones(self.shape, dtype=np.uint32) * item * self.inner
def __len__(self):
return 100
def on_epoch_end(self):
self.inner *= 5.0
class FaultSequence(keras.utils.data_utils.Sequence):
def __getitem__(self, item):
raise IndexError(item, 'item is not present')
def __len__(self):
return 100
@threadsafe_generator
def create_generator_from_sequence_threads(ds):
for i in cycle(range(len(ds))):
yield ds[i]
def create_generator_from_sequence_pcs(ds):
for i in cycle(range(len(ds))):
yield ds[i]
class TestEnqueuers(test.TestCase):
def test_generator_enqueuer_threads(self):
enqueuer = keras.utils.data_utils.GeneratorEnqueuer(
create_generator_from_sequence_threads(TestSequence([3, 200, 200, 3])),
use_multiprocessing=False)
enqueuer.start(3, 10)
gen_output = enqueuer.get()
acc = []
for _ in range(100):
acc.append(int(next(gen_output)[0, 0, 0, 0]))
self.assertEqual(len(set(acc) - set(range(100))), 0)
enqueuer.stop()
@unittest.skipIf(
os.name == 'nt',
'use_multiprocessing=True does not work on windows properly.')
def test_generator_enqueuer_processes(self):
enqueuer = keras.utils.data_utils.GeneratorEnqueuer(
create_generator_from_sequence_pcs(TestSequence([3, 200, 200, 3])),
use_multiprocessing=True)
enqueuer.start(3, 10)
gen_output = enqueuer.get()
acc = []
for _ in range(100):
acc.append(int(next(gen_output)[0, 0, 0, 0]))
self.assertNotEqual(acc, list(range(100)))
enqueuer.stop()
def test_generator_enqueuer_fail_threads(self):
enqueuer = keras.utils.data_utils.GeneratorEnqueuer(
create_generator_from_sequence_threads(FaultSequence()),
use_multiprocessing=False)
enqueuer.start(3, 10)
gen_output = enqueuer.get()
with self.assertRaises(IndexError):
next(gen_output)
@unittest.skipIf(
os.name == 'nt',
'use_multiprocessing=True does not work on windows properly.')
def test_generator_enqueuer_fail_processes(self):
enqueuer = keras.utils.data_utils.GeneratorEnqueuer(
create_generator_from_sequence_pcs(FaultSequence()),
use_multiprocessing=True)
enqueuer.start(3, 10)
gen_output = enqueuer.get()
with self.assertRaises(IndexError):
next(gen_output)
def test_ordered_enqueuer_threads(self):
enqueuer = keras.utils.data_utils.OrderedEnqueuer(
TestSequence([3, 200, 200, 3]), use_multiprocessing=False)
enqueuer.start(3, 10)
gen_output = enqueuer.get()
acc = []
for _ in range(100):
acc.append(next(gen_output)[0, 0, 0, 0])
self.assertEqual(acc, list(range(100)))
enqueuer.stop()
def test_ordered_enqueuer_processes(self):
enqueuer = keras.utils.data_utils.OrderedEnqueuer(
TestSequence([3, 200, 200, 3]), use_multiprocessing=True)
enqueuer.start(3, 10)
gen_output = enqueuer.get()
acc = []
for _ in range(100):
acc.append(next(gen_output)[0, 0, 0, 0])
self.assertEqual(acc, list(range(100)))
enqueuer.stop()
def test_ordered_enqueuer_fail_threads(self):
enqueuer = keras.utils.data_utils.OrderedEnqueuer(
FaultSequence(), use_multiprocessing=False)
enqueuer.start(3, 10)
gen_output = enqueuer.get()
with self.assertRaises(IndexError):
next(gen_output)
def test_ordered_enqueuer_fail_processes(self):
enqueuer = keras.utils.data_utils.OrderedEnqueuer(
FaultSequence(), use_multiprocessing=True)
enqueuer.start(3, 10)
gen_output = enqueuer.get()
with self.assertRaises(IndexError):
next(gen_output)
def test_on_epoch_end_processes(self):
enqueuer = keras.utils.data_utils.OrderedEnqueuer(
TestSequence([3, 200, 200, 3]), use_multiprocessing=True)
enqueuer.start(3, 10)
gen_output = enqueuer.get()
acc = []
for _ in range(200):
acc.append(next(gen_output)[0, 0, 0, 0])
# Check that order was keep in GeneratorEnqueuer with processes
self.assertEqual(acc[100:], list([k * 5 for k in range(100)]))
enqueuer.stop()
def test_context_switch(self):
enqueuer = keras.utils.data_utils.OrderedEnqueuer(
TestSequence([3, 200, 200, 3]), use_multiprocessing=True)
enqueuer2 = keras.utils.data_utils.OrderedEnqueuer(
TestSequence([3, 200, 200, 3], value=15), use_multiprocessing=True)
enqueuer.start(3, 10)
enqueuer2.start(3, 10)
gen_output = enqueuer.get()
gen_output2 = enqueuer2.get()
acc = []
for _ in range(100):
acc.append(next(gen_output)[0, 0, 0, 0])
self.assertEqual(acc[-1], 99)
# One epoch is completed so enqueuer will switch the Sequence
acc = []
for _ in range(100):
acc.append(next(gen_output2)[0, 0, 0, 0])
self.assertEqual(acc[-1], 99 * 15)
# One epoch has been completed so enqueuer2 will switch
# Be sure that both Sequence were updated
self.assertEqual(next(gen_output)[0, 0, 0, 0], 0)
self.assertEqual(next(gen_output)[0, 0, 0, 0], 5)
self.assertEqual(next(gen_output2)[0, 0, 0, 0], 0)
self.assertEqual(next(gen_output2)[0, 0, 0, 0], 15 * 5)
# Tear down everything
enqueuer.stop()
enqueuer2.stop()
def test_on_epoch_end_threads(self):
enqueuer = keras.utils.data_utils.OrderedEnqueuer(
TestSequence([3, 200, 200, 3]), use_multiprocessing=False)
enqueuer.start(3, 10)
gen_output = enqueuer.get()
acc = []
for _ in range(100):
acc.append(next(gen_output)[0, 0, 0, 0])
acc = []
for _ in range(100):
acc.append(next(gen_output)[0, 0, 0, 0])
# Check that order was keep in GeneratorEnqueuer with processes
self.assertEqual(acc, list([k * 5 for k in range(100)]))
enqueuer.stop()
if __name__ == '__main__':
# Bazel sets these environment variables to very long paths.
# Tempfile uses them to create long paths, and in turn multiprocessing
# library tries to create sockets named after paths. Delete whatever bazel
# writes to these to avoid tests failing due to socket addresses being too
# long.
for var in ('TMPDIR', 'TMP', 'TEMP'):
if var in os.environ:
del os.environ[var]
test.main()
|
apache-2.0
|
n3wb13/OpenNfrGui-5.0-1
|
lib/python/Screens/OScamInfo.py
|
1
|
41624
|
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Screens.ChoiceBox import ChoiceBox
from Components.ActionMap import ActionMap, NumberActionMap
from Components.Sources.List import List
from Components.Sources.StaticText import StaticText
from Components.config import config, configfile, getConfigListEntry
from Components.ConfigList import ConfigList, ConfigListScreen
from Components.MenuList import MenuList
from Tools.LoadPixmap import LoadPixmap
from enigma import eTimer, RT_HALIGN_LEFT, eListboxPythonMultiContent, gFont, getDesktop, eSize, ePoint
from xml.etree import ElementTree
from operator import itemgetter
import os, time
import urllib2
fb = getDesktop(0).size()
if fb.width() > 1280:
sizeH = fb.width() - 100
HDSKIN = True
else:
# sizeH = fb.width() - 50
sizeH = 720
HDSKIN = False
class OscamInfo:
def __init__(self):
pass
TYPE = 0
NAME = 1
PROT = 2
CAID_SRVID = 3
SRVNAME = 4
ECMTIME = 5
IP_PORT = 6
HEAD = { NAME: _("Label"), PROT: _("Protocol"),
CAID_SRVID: "CAID:SrvID", SRVNAME: _("Serv.Name"),
ECMTIME: _("ECM-Time"), IP_PORT: _("IP address") }
version = ""
def confPath(self):
search_dirs = [ "/usr", "/var", "/etc" ]
sdirs = " ".join(search_dirs)
cmd = 'find %s -name "oscam.conf"' % sdirs
res = os.popen(cmd).read()
if res == "":
return None
else:
return res.replace("\n", "")
def getUserData(self):
err = ""
self.oscamconf = self.confPath()
self.username = ""
self.password = ""
if self.oscamconf is not None:
data = open(self.oscamconf, "r").readlines()
webif = False
httpuser = httppwd = httpport = False
for i in data:
if "[webif]" in i.lower():
webif = True
elif "httpuser" in i.lower():
httpuser = True
user = i.split("=")[1].strip()
elif "httppwd" in i.lower():
httppwd = True
pwd = i.split("=")[1].strip()
elif "httpport" in i.lower():
httpport = True
port = i.split("=")[1].strip()
self.port = port
if not webif:
err = _("There is no [webif] section in oscam.conf")
elif not httpuser:
err = _("No httpuser defined in oscam.conf")
elif not httppwd:
err = _("No httppwd defined in oscam.conf")
elif not httpport:
err = _("No httpport defined in oscam.conf. This value is required!")
if err != "":
return err
else:
return user, pwd, port
else:
return _("file oscam.conf could not be found")
def openWebIF(self, part = None, reader = None):
if config.oscaminfo.userdatafromconf.getValue():
self.ip = "127.0.0.1"
udata = self.getUserData()
if isinstance(udata, str):
if "httpuser" in udata:
self.username=""
elif "httppwd" in udata:
self.password = ""
else:
return False, udata
else:
self.port = udata[2]
self.username = udata[0]
self.password = udata[1]
else:
self.ip = ".".join("%d" % d for d in config.oscaminfo.ip.getValue())
self.port = config.oscaminfo.port.getValue()
self.username = config.oscaminfo.username.getValue()
self.password = config.oscaminfo.password.getValue()
if part is None:
self.url = "http://%s:%s/oscamapi.html?part=status" % ( self.ip, self.port )
else:
self.url = "http://%s:%s/oscamapi.html?part=%s" % (self.ip, self.port, part )
if part is not None and reader is not None:
self.url = "http://%s:%s/oscamapi.html?part=%s&label=%s" % ( self.ip, self.port, part, reader )
print "URL=%s" % self.url
pwman = urllib2.HTTPPasswordMgrWithDefaultRealm()
pwman.add_password( None, self.url, self.username, self.password )
handlers = urllib2.HTTPDigestAuthHandler( pwman )
opener = urllib2.build_opener( urllib2.HTTPHandler, handlers )
urllib2.install_opener( opener )
request = urllib2.Request( self.url )
err = False
try:
data = urllib2.urlopen( request ).read()
# print data
except urllib2.URLError, e:
if hasattr(e, "reason"):
err = str(e.reason)
elif hasattr(e, "code"):
err = str(e.code)
if err is not False:
print "[openWebIF] Fehler: %s" % err
return False, err
else:
return True, data
def readXML(self, typ):
if typ == "l":
self.showLog = True
part = "status&appendlog=1"
else:
self.showLog = False
part = None
result = self.openWebIF(part)
retval = []
tmp = {}
if result[0]:
if not self.showLog:
data = ElementTree.XML(result[1])
# if typ=="version":
# if data.attrib.has_key("version"):
# self.version = data.attrib["version"]
# else:
# self.version = "n/a"
# return self.version
status = data.find("status")
clients = status.findall("client")
for cl in clients:
name = cl.attrib["name"]
proto = cl.attrib["protocol"]
if cl.attrib.has_key("au"):
au = cl.attrib["au"]
else:
au = ""
caid = cl.find("request").attrib["caid"]
srvid = cl.find("request").attrib["srvid"]
if cl.find("request").attrib.has_key("ecmtime"):
ecmtime = cl.find("request").attrib["ecmtime"]
if ecmtime == "0" or ecmtime == "":
ecmtime = "n/a"
else:
ecmtime = str(float(ecmtime) / 1000)[:5]
else:
ecmtime = "not available"
srvname = cl.find("request").text
if srvname is not None:
if ":" in srvname:
srvname_short = srvname.split(":")[1].strip()
else:
srvname_short = srvname
else:
srvname_short = "n/A"
login = cl.find("times").attrib["login"]
online = cl.find("times").attrib["online"]
if proto.lower() == "dvbapi":
ip = ""
else:
ip = cl.find("connection").attrib["ip"]
if ip == "0.0.0.0":
ip = ""
port = cl.find("connection").attrib["port"]
connstatus = cl.find("connection").text
if name != "" and name != "anonymous" and proto != "":
try:
tmp[cl.attrib["type"]].append( (name, proto, "%s:%s" % (caid, srvid), srvname_short, ecmtime, ip, connstatus) )
except KeyError:
tmp[cl.attrib["type"]] = []
tmp[cl.attrib["type"]].append( (name, proto, "%s:%s" % (caid, srvid), srvname_short, ecmtime, ip, connstatus) )
else:
if "<![CDATA" not in result[1]:
tmp = result[1].replace("<log>", "<log><![CDATA[").replace("</log>", "]]></log>")
else:
tmp = result[1]
data = ElementTree.XML(tmp)
log = data.find("log")
logtext = log.text
if typ == "s":
if tmp.has_key("r"):
for i in tmp["r"]:
retval.append(i)
if tmp.has_key("p"):
for i in tmp["p"]:
retval.append(i)
elif typ == "c":
if tmp.has_key("c"):
for i in tmp["c"]:
retval.append(i)
elif typ == "l":
tmp = logtext.split("\n")
retval = []
for i in tmp:
tmp2 = i.split(" ")
if len(tmp2) > 2:
del tmp2[2]
txt = ""
for j in tmp2:
txt += "%s " % j.strip()
retval.append( txt )
return retval
else:
return result[1]
def getVersion(self):
xmldata = self.openWebIF()
if xmldata[0]:
data = ElementTree.XML(xmldata[1])
if data.attrib.has_key("version"):
self.version = data.attrib["version"]
else:
self.version = "n/a"
return self.version
else:
self.version = "n/a"
return self.version
def getTotalCards(self, reader):
xmldata = self.openWebIF(part = "entitlement", reader = reader)
if xmldata[0]:
xmld = ElementTree.XML(xmldata[1])
cards = xmld.find("reader").find("cardlist")
cardTotal = cards.attrib["totalcards"]
return cardTotal
else:
return None
def getReaders(self, spec = None):
xmldata = self.openWebIF()
readers = []
if xmldata[0]:
data = ElementTree.XML(xmldata[1])
status = data.find("status")
clients = status.findall("client")
for cl in clients:
if cl.attrib.has_key("type"):
if cl.attrib["type"] == "p" or cl.attrib["type"] == "r":
if spec is not None:
proto = cl.attrib["protocol"]
if spec in proto:
name = cl.attrib["name"]
cards = self.getTotalCards(name)
readers.append( ( "%s ( %s Cards )" % (name, cards), name) )
else:
if cl.attrib["name"] != "" and cl.attrib["name"] != "" and cl.attrib["protocol"] != "":
readers.append( (cl.attrib["name"], cl.attrib["name"]) ) # return tuple for later use in Choicebox
return readers
else:
return None
def getClients(self):
xmldata = self.openWebIF()
clientnames = []
if xmldata[0]:
data = ElementTree.XML(xmldata[1])
status = data.find("status")
clients = status.findall("client")
for cl in clients:
if cl.attrib.has_key("type"):
if cl.attrib["type"] == "c":
readers.append( (cl.attrib["name"], cl.attrib["name"]) ) # return tuple for later use in Choicebox
return clientnames
else:
return None
def getECMInfo(self, ecminfo):
result = []
if os.path.exists(ecminfo):
data = open(ecminfo, "r").readlines()
for i in data:
if "caid" in i:
result.append( ("CAID", i.split(":")[1].strip()) )
elif "pid" in i:
result.append( ("PID", i.split(":")[1].strip()) )
elif "prov" in i:
result.append( (_("Provider"), i.split(":")[1].strip()) )
elif "reader" in i:
result.append( ("Reader", i.split(":")[1].strip()) )
elif "from" in i:
result.append( (_("Address"), i.split(":")[1].strip()) )
elif "protocol" in i:
result.append( (_("Protocol"), i.split(":")[1].strip()) )
elif "hops" in i:
result.append( ("Hops", i.split(":")[1].strip()) )
elif "ecm time" in i:
result.append( (_("ECM Time"), i.split(":")[1].strip()) )
return result
else:
return "%s not found" % self.ecminfo
class oscMenuList(MenuList):
def __init__(self, list, itemH = 25):
if getDesktop(0).size().width() == 1920:
MenuList.__init__(self, list, False, eListboxPythonMultiContent)
self.l.setItemHeight(itemH)
self.l.setFont(0, gFont("Regular", 28))
self.l.setFont(1, gFont("Regular", 24))
self.clientFont = gFont("Regular", 22)
self.l.setFont(2, self.clientFont)
self.l.setFont(3, gFont("Regular", 22))
else:
MenuList.__init__(self, list, False, eListboxPythonMultiContent)
self.l.setItemHeight(itemH)
self.l.setFont(0, gFont("Regular", 20))
self.l.setFont(1, gFont("Regular", 16))
self.clientFont = gFont("Regular", 14)
self.l.setFont(2, self.clientFont)
self.l.setFont(3, gFont("Regular", 12))
class OscamInfoMenu(Screen):
def __init__(self, session):
self.session = session
self.menu = [ _("Show /tmp/ecm.info"), _("Show Clients"), _("Show Readers/Proxies"), _("Show Log"), _("Card infos (CCcam-Reader)"), _("ECM Statistics"), _("Setup") ]
Screen.__init__(self, session)
self.osc = OscamInfo()
self["mainmenu"] = oscMenuList([])
self["actions"] = NumberActionMap(["OkCancelActions", "InputActions", "ColorActions"],
{
"ok": self.ok,
"cancel": self.exit,
"red": self.red,
"green": self.green,
"yellow": self.yellow,
"blue": self.blue,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal,
"up": self.up,
"down": self.down
}, -1)
self.onLayoutFinish.append(self.showMenu)
def ok(self):
selected = self["mainmenu"].getSelectedIndex()
self.goEntry(selected)
def cancel(self):
self.close()
def exit(self):
self.close()
def keyNumberGlobal(self, num):
if num == 0:
numkey = 10
else:
numkey = num
if numkey < len(self.menu) - 3:
self["mainmenu"].moveToIndex(numkey + 3)
self.goEntry(numkey + 3)
def red(self):
self["mainmenu"].moveToIndex(0)
self.goEntry(0)
def green(self):
self["mainmenu"].moveToIndex(1)
self.goEntry(1)
def yellow(self):
self["mainmenu"].moveToIndex(2)
self.goEntry(2)
def blue(self):
self["mainmenu"].moveToIndex(3)
self.goEntry(3)
def up(self):
pass
def down(self):
pass
def goEntry(self, entry):
if entry == 0:
if os.path.exists("/tmp/ecm.info"):
self.session.open(oscECMInfo)
else:
pass
elif entry == 1:
if config.oscaminfo.userdatafromconf.getValue():
if self.osc.confPath() is None:
config.oscaminfo.userdatafromconf.setValue(False)
config.oscaminfo.userdatafromconf.save()
self.session.openWithCallback(self.ErrMsgCallback, MessageBox, _("File oscam.conf not found.\nPlease enter username/password manually."), MessageBox.TYPE_ERROR)
else:
self.session.open(oscInfo, "c")
else:
self.session.open(oscInfo, "c")
elif entry == 2:
if config.oscaminfo.userdatafromconf.getValue():
if self.osc.confPath() is None:
config.oscaminfo.userdatafromconf.setValue(False)
config.oscaminfo.userdatafromconf.save()
self.session.openWithCallback(self.ErrMsgCallback, MessageBox, _("File oscam.conf not found.\nPlease enter username/password manually."), MessageBox.TYPE_ERROR)
else:
self.session.open(oscInfo, "s")
else:
self.session.open(oscInfo, "s")
elif entry == 3:
if config.oscaminfo.userdatafromconf.getValue():
if self.osc.confPath() is None:
config.oscaminfo.userdatafromconf.setValue(False)
config.oscaminfo.userdatafromconf.save()
self.session.openWithCallback(self.ErrMsgCallback, MessageBox, _("File oscam.conf not found.\nPlease enter username/password manually."), MessageBox.TYPE_ERROR)
else:
self.session.open(oscInfo, "l")
else:
self.session.open(oscInfo, "l")
elif entry == 4:
osc = OscamInfo()
reader = osc.getReaders("cccam") # get list of available CCcam-Readers
if isinstance(reader, list):
if len(reader) == 1:
self.session.open(oscEntitlements, reader[0][1])
else:
self.callbackmode = "cccam"
self.session.openWithCallback(self.chooseReaderCallback, ChoiceBox, title = _("Please choose CCcam-Reader"), list=reader)
elif entry == 5:
osc = OscamInfo()
reader = osc.getReaders()
if reader is not None:
reader.append( ("All", "all") )
if isinstance(reader, list):
if len(reader) == 1:
self.session.open(oscReaderStats, reader[0][1])
else:
self.callbackmode = "readers"
self.session.openWithCallback(self.chooseReaderCallback, ChoiceBox, title = _("Please choose reader"), list=reader)
elif entry == 6:
self.session.open(OscamInfoConfigScreen)
def chooseReaderCallback(self, retval):
print retval
if retval is not None:
if self.callbackmode == "cccam":
self.session.open(oscEntitlements, retval[1])
else:
self.session.open(oscReaderStats, retval[1])
def ErrMsgCallback(self, retval):
print retval
self.session.open(OscamInfoConfigScreen)
def buildMenu(self, mlist):
keys = ["red", "green", "yellow", "blue", "1", "2", "3", "4", "5", "6", "7", "8", "9", "0", ""]
menuentries = []
y = 0
for x in mlist:
res = [ x ]
if x.startswith("--"):
png = LoadPixmap("/usr/share/enigma2/skin_default/div-h.png")
if png is not None:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP, 10,0,360, 2, png))
res.append((eListboxPythonMultiContent.TYPE_TEXT, 45, 3, 800, 25, 0, RT_HALIGN_LEFT, x[2:]))
png2 = LoadPixmap("/usr/share/enigma2/skin_default/buttons/key_" + keys[y] + ".png")
if png2 is not None:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 5, 3, 35, 25, png2))
else:
if getDesktop(0).size().width() == 1920:
res.append((eListboxPythonMultiContent.TYPE_TEXT, 60, 00, 800, 35, 0, RT_HALIGN_LEFT, x))
png2 = LoadPixmap("/usr/share/enigma2/SkalliHD-NFR-FullHD/buttons/key_" + keys[y] + ".png")
if png2 is not None:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 5, 2, 35, 35, png2))
else:
res.append((eListboxPythonMultiContent.TYPE_TEXT, 45, 5, 800, 25, 0, RT_HALIGN_LEFT, x))
png2 = LoadPixmap("/usr/share/enigma2/skin_default/buttons/key_" + keys[y] + ".png")
if png2 is not None:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 5, 2, 35, 30, png2))
menuentries.append(res)
if y < len(keys) - 1:
y += 1
return menuentries
def showMenu(self):
entr = self.buildMenu(self.menu)
self.setTitle(_("Oscam Info - Main Menu"))
self["mainmenu"].l.setList(entr)
self["mainmenu"].moveToIndex(0)
class oscECMInfo(Screen, OscamInfo):
def __init__(self, session):
Screen.__init__(self, session)
self.ecminfo = "/tmp/ecm.info"
self["output"] = oscMenuList([])
if config.oscaminfo.autoupdate.getValue():
self.loop = eTimer()
self.loop.callback.append(self.showData)
timeout = config.oscaminfo.intervall.getValue() * 1000
self.loop.start(timeout, False)
self["actions"] = ActionMap(["OkCancelActions"],
{
"ok": self.exit,
"cancel": self.exit
}, -1)
self.onLayoutFinish.append(self.showData)
def exit(self):
if config.oscaminfo.autoupdate.getValue():
self.loop.stop()
self.close()
def buildListEntry(self, listentry):
return [
None,
(eListboxPythonMultiContent.TYPE_TEXT, 10, 00, 300, 30, 0, RT_HALIGN_LEFT, listentry[0]),
(eListboxPythonMultiContent.TYPE_TEXT, 300, 00, 300, 30, 0, RT_HALIGN_LEFT, listentry[1])
]
def showData(self):
data = self.getECMInfo(self.ecminfo)
#print data
out = []
y = 0
for i in data:
out.append(self.buildListEntry(i))
self["output"].l.setItemHeight(35)
self["output"].l.setList(out)
self["output"].selectionEnabled(True)
class oscInfo(Screen, OscamInfo):
def __init__(self, session, what):
global HDSKIN, sizeH
self.session = session
self.what = what
self.firstrun = True
self.webif_data = self.readXML(typ = self.what)
entry_count = len( self.webif_data )
# entry_count = len(self.readXML(typ = self.what))
ysize = (entry_count + 4) * 25
ypos = 10
self.sizeLH = sizeH - 20
self.skin = """<screen position="center,center" size="%d, %d" title="Client Info" >""" % (sizeH, ysize)
button_width = int(sizeH / 4)
for k, v in enumerate(["red", "green", "yellow", "blue"]):
xpos = k * button_width
self.skin += """<ePixmap name="%s" position="%d,%d" size="35,25" pixmap="/usr/share/enigma2/skin_default/buttons/key_%s.png" zPosition="1" transparent="1" alphatest="on" />""" % (v, xpos, ypos, v)
self.skin += """<widget source="key_%s" render="Label" position="%d,%d" size="%d,%d" font="Regular;16" zPosition="1" valign="center" transparent="1" />""" % (v, xpos + 40, ypos, button_width, 20)
self.skin +="""<ePixmap name="divh" position="0,37" size="%d,2" pixmap="/usr/share/enigma2/skin_default/div-h.png" transparent="1" alphatest="on" />""" % sizeH
self.skin +="""<widget name="output" position="10,45" size="%d,%d" zPosition="1" />""" % ( self.sizeLH, ysize)
self.skin += """</screen>"""
Screen.__init__(self, session)
self.mlist = oscMenuList([])
self["output"] = self.mlist
self.errmsg = ""
self["key_red"] = StaticText(_("Close"))
if self.what == "c":
self["key_green"] = StaticText("")
self["key_yellow"] = StaticText("Servers")
self["key_blue"] = StaticText("Log")
elif self.what == "s":
self["key_green"] = StaticText("Clients")
self["key_yellow"] = StaticText("")
self["key_blue"] = StaticText("Log")
elif self.what == "l":
self["key_green"] = StaticText("Clients")
self["key_yellow"] = StaticText("Servers")
self["key_blue"] = StaticText("")
else:
self["key_green"] = StaticText("Clients")
self["key_yellow"] = StaticText("Servers")
self["key_blue"] = StaticText("Log")
self.fieldSizes = []
self.fs2 = {}
if config.oscaminfo.autoupdate.getValue():
self.loop = eTimer()
self.loop.callback.append(self.showData)
timeout = config.oscaminfo.intervall.getValue() * 1000
self.loop.start(timeout, False)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions"],
{
"ok": self.showData,
"cancel": self.exit,
"red": self.exit,
"green": self.key_green,
"yellow": self.key_yellow,
"blue": self.key_blue
}, -1)
self.onLayoutFinish.append(self.showData)
def key_green(self):
if self.what == "c":
pass
else:
self.what = "c"
self.showData()
def key_yellow(self):
if self.what == "s":
pass
else:
self.what = "s"
self.showData()
def key_blue(self):
if self.what == "l":
pass
else:
self.what = "l"
self.showData()
def exit(self):
if config.oscaminfo.autoupdate.getValue():
self.loop.stop()
self.close()
def buildListEntry(self, listentry, heading = False):
res = [ None ]
x = 0
if not HDSKIN:
self.fieldsize = [ 100, 130, 100, 150, 80, 130 ]
self.startPos = [ 10, 110, 240, 340, 490, 570 ]
useFont = 3
else:
self.fieldsize = [ 150, 250, 130, 200, 100, 180 ]
self.startPos = [ 50, 400, 900, 1030, 1330, 1600 ]
useFont = 1
if isinstance(self.errmsg, tuple):
useFont = 0 # overrides previous font-size in case of an error message. (if self.errmsg is a tuple, an error occurred which will be displayed instead of regular results
if not heading:
status = listentry[len(listentry)-1]
colour = "0xffffff"
if status == "OK" or "CONNECTED" or status == "CARDOK":
colour = "0x389416"
if status == "NEEDINIT" or status == "CARDOK":
colour = "0xbab329"
if status == "OFF" or status == "ERROR":
colour = "0xf23d21"
else:
colour = "0xffffff"
for i in listentry[:-1]:
xsize = self.fieldsize[x]
xpos = self.startPos[x]
if getDesktop(0).size().width() == 1920:
res.append( (eListboxPythonMultiContent.TYPE_TEXT, xpos, 0, xsize, 40, useFont, RT_HALIGN_LEFT, i, int(colour, 16)) )
x += 1
else:
res.append( (eListboxPythonMultiContent.TYPE_TEXT, xpos, 0, xsize, 20, useFont, RT_HALIGN_LEFT, i, int(colour, 16)) )
x += 1
if heading:
pos = 19
res.append( (eListboxPythonMultiContent.TYPE_PIXMAP, 0, pos, self.sizeLH, useFont, LoadPixmap("/usr/share/enigma2/skin_default/div-h.png")))
return res
def buildLogListEntry(self, listentry):
res = [ None ]
for i in listentry:
if i.strip() != "" or i is not None:
if getDesktop(0).size().width() == 1920:
res.append( (eListboxPythonMultiContent.TYPE_TEXT, 5, 0, self.sizeLH,34, 2, RT_HALIGN_LEFT, i) )
else:
res.append( (eListboxPythonMultiContent.TYPE_TEXT, 5, 0, self.sizeLH,14, 2, RT_HALIGN_LEFT, i) )
return res
def calcSizes(self, entries):
self.fs2 = {}
colSize = [ 100, 200, 150, 200, 150, 100 ]
for h in entries:
for i, j in enumerate(h[:-1]):
try:
self.fs2[i].append(colSize[i])
except KeyError:
self.fs2[i] = []
self.fs2[i].append(colSize[i])
sizes = []
for i in self.fs2.keys():
sizes.append(self.fs2[i])
return sizes
def changeScreensize(self, new_height, new_width = None):
if new_width is None:
new_width = sizeH
self.instance.resize(eSize(new_width, new_height))
fb = getDesktop(0).size()
new_posY = int(( fb.height() / 2 ) - ( new_height / 2 ))
x = int( ( fb.width() - sizeH ) / 2 )
self.instance.move(ePoint(x, new_posY))
self["output"].resize(eSize(self.sizeLH, new_height - 20))
self["key_red"].setText(_("Close"))
if getDesktop(0).size().width() == 1920:
if self.what == "c":
self["key_green"].setText("")
self["key_yellow"].setText("Servers")
self["key_blue"].setText("Log")
self["output"].l.setItemHeight(40)
elif self.what == "s":
self["key_green"].setText("Clients")
self["key_yellow"].setText("")
self["key_blue"].setText("Log")
self["output"].l.setItemHeight(40)
elif self.what == "l":
self["key_green"].setText("Clients")
self["key_yellow"].setText("Servers")
self["key_blue"].setText("")
self["output"].l.setItemHeight(20)
else:
self["key_green"].setText("Clients")
self["key_yellow"].setText("Servers")
self["key_blue"].setText("Log")
if getDesktop(0).size().width() == 720:
if self.what == "c":
self["key_green"].setText("")
self["key_yellow"].setText("Servers")
self["key_blue"].setText("Log")
self["output"].l.setItemHeight(20)
elif self.what == "s":
self["key_green"].setText("Clients")
self["key_yellow"].setText("")
self["key_blue"].setText("Log")
self["output"].l.setItemHeight(20)
elif self.what == "l":
self["key_green"].setText("Clients")
self["key_yellow"].setText("Servers")
self["key_blue"].setText("")
self["output"].l.setItemHeight(14)
else:
self["key_green"].setText("Clients")
self["key_yellow"].setText("Servers")
self["key_blue"].setText("Log")
def showData(self):
if self.firstrun:
data = self.webif_data
self.firstrun = False
else:
data = self.readXML(typ = self.what)
if not isinstance(data,str):
out = []
if self.what != "l":
heading = ( self.HEAD[self.NAME], self.HEAD[self.PROT], self.HEAD[self.CAID_SRVID],
self.HEAD[self.SRVNAME], self.HEAD[self.ECMTIME], self.HEAD[self.IP_PORT], "")
outlist = [heading]
for i in data:
outlist.append( i )
self.fieldsize = self.calcSizes(outlist)
out = [ self.buildListEntry(heading, heading=True)]
for i in data:
out.append(self.buildListEntry(i))
else:
for i in data:
if i != "":
out.append( self.buildLogListEntry( (i,) ))
#out.reverse()
if getDesktop(0).size().width() == 1920:
ysize = (len(out) + 4 ) * 80
else:
ysize = (len(out) + 4 ) * 25
if self.what == "c":
self.changeScreensize( ysize )
self.setTitle("Client Info ( Oscam-Version: %s )" % self.getVersion())
elif self.what == "s":
self.changeScreensize( ysize )
self.setTitle("Server Info( Oscam-Version: %s )" % self.getVersion())
elif self.what == "l":
if getDesktop(0).size().width() == 1920:
self.changeScreensize( 980 )
else:
self.changeScreensize( 500 )
self.setTitle("Oscam Log ( Oscam-Version: %s )" % self.getVersion())
self["output"].l.setList(out)
self["output"].selectionEnabled(False)
else:
self.errmsg = (data,)
if config.oscaminfo.autoupdate.getValue():
self.loop.stop()
out = []
self.fieldsize = self.calcSizes( [(data,)] )
for i in self.errmsg:
out.append( self.buildListEntry( (i,) ))
if getDesktop(0).size().width() == 1920:
ysize = (len(out) + 4 ) * 60
else:
ysize = (len(out) + 4 ) * 25
self.changeScreensize( ysize )
self.setTitle(_("Error") + data)
self["output"].l.setList(out)
self["output"].selectionEnabled(False)
class oscEntitlements(Screen, OscamInfo):
global HDSKIN, sizeH
sizeLH = sizeH - 20
skin = """<screen position="center,center" size="%s, 400" title="Client Info" >
<widget source="output" render="Listbox" position="10,10" size="%s,400" scrollbarMode="showOnDemand" >
<convert type="TemplatedMultiContent">
{"templates":
{"default": (55,[
MultiContentEntryText(pos = (0, 1), size = (80, 24), font=0, flags = RT_HALIGN_LEFT, text = 0), # index 0 is caid
MultiContentEntryText(pos = (90, 1), size = (150, 24), font=0, flags = RT_HALIGN_LEFT, text = 1), # index 1 is csystem
MultiContentEntryText(pos = (250, 1), size = (40, 24), font=0, flags = RT_HALIGN_LEFT, text = 2), # index 2 is hop 1
MultiContentEntryText(pos = (290, 1), size = (40, 24), font=0, flags = RT_HALIGN_LEFT, text = 3), # index 3 is hop 2
MultiContentEntryText(pos = (330, 1), size = (40, 24), font=0, flags = RT_HALIGN_LEFT, text = 4), # index 4 is hop 3
MultiContentEntryText(pos = (370, 1), size = (40, 24), font=0, flags = RT_HALIGN_LEFT, text = 5), # index 5 is hop 4
MultiContentEntryText(pos = (410, 1), size = (40, 24), font=0, flags = RT_HALIGN_LEFT, text = 6), # index 6 is hop 5
MultiContentEntryText(pos = (480, 1), size = (70, 24), font=0, flags = RT_HALIGN_LEFT, text = 7), # index 7 is sum of cards for caid
MultiContentEntryText(pos = (550, 1), size = (80, 24), font=0, flags = RT_HALIGN_LEFT, text = 8), # index 8 is reshare
MultiContentEntryText(pos = (0, 25), size = (700, 24), font=1, flags = RT_HALIGN_LEFT, text = 9), # index 9 is providers
]),
"HD": (55,[
MultiContentEntryText(pos = (0, 1), size = (80, 24), font=0, flags = RT_HALIGN_LEFT, text = 0), # index 0 is caid
MultiContentEntryText(pos = (90, 1), size = (150, 24), font=0, flags = RT_HALIGN_LEFT, text = 1), # index 1 is csystem
MultiContentEntryText(pos = (250, 1), size = (40, 24), font=0, flags = RT_HALIGN_LEFT, text = 2), # index 2 is hop 1
MultiContentEntryText(pos = (290, 1), size = (40, 24), font=0, flags = RT_HALIGN_LEFT, text = 3), # index 3 is hop 2
MultiContentEntryText(pos = (330, 1), size = (40, 24), font=0, flags = RT_HALIGN_LEFT, text = 4), # index 4 is hop 3
MultiContentEntryText(pos = (370, 1), size = (40, 24), font=0, flags = RT_HALIGN_LEFT, text = 5), # index 5 is hop 4
MultiContentEntryText(pos = (410, 1), size = (40, 24), font=0, flags = RT_HALIGN_LEFT, text = 6), # index 6 is hop 5
MultiContentEntryText(pos = (480, 1), size = (70, 24), font=0, flags = RT_HALIGN_LEFT, text = 7), # index 7 is sum of cards for caid
MultiContentEntryText(pos = (550, 1), size = (80, 24), font=0, flags = RT_HALIGN_LEFT, text = 8), # index 8 is reshare
MultiContentEntryText(pos = (630, 1), size = (1024, 50), font=1, flags = RT_HALIGN_LEFT, text = 9), # index 9 is providers
]),
},
"fonts": [gFont("Regular", 18),gFont("Regular", 14),gFont("Regular", 24),gFont("Regular", 20)],
"itemHeight": 56
}
</convert>
</widget>
</screen>""" % ( sizeH, sizeLH)
def __init__(self, session, reader):
global HDSKIN, sizeH
Screen.__init__(self, session)
self.mlist = oscMenuList([])
self.cccamreader = reader
self["output"] = List([ ])
self["actions"] = ActionMap(["OkCancelActions"],
{
"ok": self.showData,
"cancel": self.exit
}, -1)
self.onLayoutFinish.append(self.showData)
def exit(self):
self.close()
def buildList(self, data):
caids = data.keys()
caids.sort()
outlist = []
res = [ ("CAID", "System", "1", "2", "3", "4", "5", "Total", "Reshare", "") ]
for i in caids:
csum = 0
ca_id = i
csystem = data[i]["system"]
hops = data[i]["hop"]
csum += sum(hops)
creshare = data[i]["reshare"]
prov = data[i]["provider"]
if not HDSKIN:
providertxt = _("Providers: ")
linefeed = ""
else:
providertxt = ""
linefeed = "\n"
for j in prov:
providertxt += "%s - %s%s" % ( j[0], j[1], linefeed )
res.append( ( ca_id,
csystem,
str(hops[1]),str(hops[2]), str(hops[3]), str(hops[4]), str(hops[5]), str(csum), str(creshare),
providertxt[:-1]
) )
outlist.append(res)
return res
def showData(self):
xmldata_for_reader = self.openWebIF(part = "entitlement", reader = self.cccamreader)
xdata = ElementTree.XML(xmldata_for_reader[1])
reader = xdata.find("reader")
if reader.attrib.has_key("hostaddress"):
hostadr = reader.attrib["hostaddress"]
host_ok = True
else:
host_ok = False
cardlist = reader.find("cardlist")
cardTotal = cardlist.attrib["totalcards"]
cards = cardlist.findall("card")
caid = {}
for i in cards:
ccaid = i.attrib["caid"]
csystem = i.attrib["system"]
creshare = i.attrib["reshare"]
if not host_ok:
hostadr = i.find("hostaddress").text
chop = int(i.attrib["hop"])
if chop > 5:
chop = 5
if caid.has_key(ccaid):
if caid[ccaid].has_key("hop"):
caid[ccaid]["hop"][chop] += 1
else:
caid[ccaid]["hop"] = [ 0, 0, 0, 0, 0, 0 ]
caid[ccaid]["hop"][chop] += 1
caid[ccaid]["reshare"] = creshare
caid[ccaid]["provider"] = [ ]
provs = i.find("providers")
for prov in provs.findall("provider"):
caid[ccaid]["provider"].append( (prov.attrib["provid"], prov.text) )
caid[ccaid]["system"] = csystem
else:
caid[ccaid] = {}
if caid[ccaid].has_key("hop"):
caid[ccaid]["hop"][chop] += 1
else:
caid[ccaid]["hop"] = [ 0, 0, 0, 0, 0, 0]
caid[ccaid]["hop"][chop] += 1
caid[ccaid]["reshare"] = creshare
caid[ccaid]["provider"] = [ ]
provs = i.find("providers")
for prov in provs.findall("provider"):
caid[ccaid]["provider"].append( (prov.attrib["provid"], prov.text) )
caid[ccaid]["system"] = csystem
result = self.buildList(caid)
if HDSKIN:
self["output"].setStyle("HD")
else:
self["output"].setStyle("default")
self["output"].setList(result)
title = [ _("Reader"), self.cccamreader, _("Cards:"), cardTotal, "Server:", hostadr ]
self.setTitle( " ".join(title))
class oscReaderStats(Screen, OscamInfo):
global HDSKIN, sizeH
sizeLH = sizeH - 20
skin = """<screen position="center,center" size="%s, 400" title="Client Info" >
<widget source="output" render="Listbox" position="10,10" size="%s,400" scrollbarMode="showOnDemand" >
<convert type="TemplatedMultiContent">
{"templates":
{"default": (25,[
MultiContentEntryText(pos = (0, 1), size = (100, 24), font=0, flags = RT_HALIGN_LEFT, text = 0), # index 0 is caid
MultiContentEntryText(pos = (100, 1), size = (50, 24), font=0, flags = RT_HALIGN_LEFT, text = 1), # index 1 is csystem
MultiContentEntryText(pos = (150, 1), size = (150, 24), font=0, flags = RT_HALIGN_LEFT, text = 2), # index 2 is hop 1
MultiContentEntryText(pos = (300, 1), size = (60, 24), font=0, flags = RT_HALIGN_LEFT, text = 3), # index 3 is hop 2
MultiContentEntryText(pos = (360, 1), size = (60, 24), font=0, flags = RT_HALIGN_LEFT, text = 4), # index 4 is hop 3
MultiContentEntryText(pos = (420, 1), size = (80, 24), font=0, flags = RT_HALIGN_LEFT, text = 5), # index 5 is hop 4
MultiContentEntryText(pos = (510, 1), size = (80, 24), font=0, flags = RT_HALIGN_LEFT, text = 6), # index 6 is hop 5
MultiContentEntryText(pos = (590, 1), size = (80, 24), font=0, flags = RT_HALIGN_LEFT, text = 7), # index 7 is sum of cards for caid
]),
"HD": (25,[
MultiContentEntryText(pos = (0, 1), size = (200, 24), font=1, flags = RT_HALIGN_LEFT, text = 0), # index 0 is caid
MultiContentEntryText(pos = (200, 1), size = (70, 24), font=1, flags = RT_HALIGN_LEFT, text = 1), # index 1 is csystem
MultiContentEntryText(pos = (300, 1), size = (220, 24), font=1, flags = RT_HALIGN_LEFT, text = 2), # index 2 is hop 1
MultiContentEntryText(pos = (540, 1), size = (80, 24), font=1, flags = RT_HALIGN_LEFT, text = 3), # index 3 is hop 2
MultiContentEntryText(pos = (630, 1), size = (80, 24), font=1, flags = RT_HALIGN_LEFT, text = 4), # index 4 is hop 3
MultiContentEntryText(pos = (720, 1), size = (130, 24), font=1, flags = RT_HALIGN_LEFT, text = 5), # index 5 is hop 4
MultiContentEntryText(pos = (840, 1), size = (130, 24), font=1, flags = RT_HALIGN_LEFT, text = 6), # index 6 is hop 5
MultiContentEntryText(pos = (970, 1), size = (100, 24), font=1, flags = RT_HALIGN_LEFT, text = 7), # index 7 is sum of cards for caid
]),
},
"fonts": [gFont("Regular", 14),gFont("Regular", 18),gFont("Regular", 24),gFont("Regular", 20)],
"itemHeight": 26
}
</convert>
</widget>
</screen>""" % ( sizeH, sizeLH)
def __init__(self, session, reader):
global HDSKIN, sizeH
Screen.__init__(self, session)
if reader == "all":
self.allreaders = True
else:
self.allreaders = False
self.reader = reader
self.mlist = oscMenuList([])
self["output"] = List([ ])
self["actions"] = ActionMap(["OkCancelActions"],
{
"ok": self.showData,
"cancel": self.exit
}, -1)
self.onLayoutFinish.append(self.showData)
def exit(self):
self.close()
def buildList(self, data):
caids = data.keys()
caids.sort()
outlist = []
res = [ ("CAID", "System", "1", "2", "3", "4", "5", "Total", "Reshare", "") ]
for i in caids:
csum = 0
ca_id = i
csystem = data[i]["system"]
hops = data[i]["hop"]
csum += sum(hops)
creshare = data[i]["reshare"]
prov = data[i]["provider"]
if not HDSKIN:
providertxt = _("Providers: ")
linefeed = ""
else:
providertxt = ""
linefeed = "\n"
for j in prov:
providertxt += "%s - %s%s" % ( j[0], j[1], linefeed )
res.append( ( ca_id,
csystem,
str(hops[1]),str(hops[2]), str(hops[3]), str(hops[4]), str(hops[5]), str(csum), str(creshare),
providertxt[:-1]
) )
outlist.append(res)
return res
def sortData(self, datalist, sort_col, reverse = False):
return sorted(datalist, key=itemgetter(sort_col), reverse = reverse)
def showData(self):
readers = self.getReaders()
result = []
title2 = ""
for i in readers:
xmldata = self.openWebIF(part = "readerstats", reader = i[1])
emm_wri = emm_ski = emm_blk = emm_err = ""
if xmldata[0]:
xdata = ElementTree.XML(xmldata[1])
rdr = xdata.find("reader")
# emms = rdr.find("emmstats")
# if emms.attrib.has_key("totalwritten"):
# emm_wri = emms.attrib["totalwritten"]
# if emms.attrib.has_key("totalskipped"):
# emm_ski = emms.attrib["totalskipped"]
# if emms.attrib.has_key("totalblocked"):
# emm_blk = emms.attrib["totalblocked"]
# if emms.attrib.has_key("totalerror"):
# emm_err = emms.attrib["totalerror"]
ecmstat = rdr.find("ecmstats")
totalecm = ecmstat.attrib["totalecm"]
ecmcount = ecmstat.attrib["count"]
lastacc = ecmstat.attrib["lastaccess"]
ecm = ecmstat.findall("ecm")
if ecmcount > 0:
for j in ecm:
caid = j.attrib["caid"]
channel = j.attrib["channelname"]
avgtime = j.attrib["avgtime"]
lasttime = j.attrib["lasttime"]
retcode = j.attrib["rc"]
rcs = j.attrib["rcs"]
num = j.text
if rcs == "found":
avg_time = str(float(avgtime) / 1000)[:5]
last_time = str(float(lasttime) / 1000)[:5]
if j.attrib.has_key("lastrequest"):
lastreq = j.attrib["lastrequest"]
try:
last_req = lastreq.split("T")[1][:-5]
except IndexError:
last_req = time.strftime("%H:%M:%S",time.localtime(float(lastreq)))
else:
last_req = ""
else:
avg_time = last_time = last_req = ""
# if lastreq != "":
# last_req = lastreq.split("T")[1][:-5]
if self.allreaders:
result.append( (i[1], caid, channel, avg_time, last_time, rcs, last_req, int(num)) )
title2 = _("( All readers)")
else:
if i[1] == self.reader:
result.append( (i[1], caid, channel, avg_time, last_time, rcs, last_req, int(num)) )
title2 =_("(Show only reader:") + "%s )" % self.reader
outlist = self.sortData(result, 7, True)
out = [ ( _("Label"), _("CAID"), _("Channel"), _("ECM avg"), _("ECM last"), _("Status"), _("Last Req."), _("Total") ) ]
for i in outlist:
out.append( (i[0], i[1], i[2], i[3], i[4], i[5], i[6], str(i[7])) )
if HDSKIN:
self["output"].setStyle("HD")
else:
self["output"].setStyle("default")
self["output"].setList(out)
title = [ _("Reader Statistics"), title2 ]
self.setTitle( " ".join(title))
class OscamInfoConfigScreen(Screen, ConfigListScreen):
def __init__(self, session, msg = None):
Screen.__init__(self, session)
self.session = session
if msg is not None:
self.msg = "Error:\n%s" % msg
else:
self.msg = ""
self.oscamconfig = [ ]
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("OK"))
self["status"] = StaticText(self.msg)
self["config"] = ConfigList(self.oscamconfig)
self["actions"] = ActionMap(["SetupActions", "ColorActions"],
{
"red": self.cancel,
"green": self.save,
"save": self.save,
"cancel": self.cancel,
"ok": self.save,
}, -2)
ConfigListScreen.__init__(self, self.oscamconfig, session = self.session)
self.createSetup()
config.oscaminfo.userdatafromconf.addNotifier(self.elementChanged, initial_call = False)
config.oscaminfo.autoupdate.addNotifier(self.elementChanged, initial_call = False)
self.onLayoutFinish.append(self.layoutFinished)
def elementChanged(self, instance):
self.createSetup()
try:
self["config"].l.setList(self.oscamconfig)
except KeyError:
pass
def layoutFinished(self):
self.setTitle(_("Oscam Info - Configuration"))
self["config"].l.setList(self.oscamconfig)
def createSetup(self):
self.oscamconfig = []
self.oscamconfig.append(getConfigListEntry(_("Read Userdata from oscam.conf"), config.oscaminfo.userdatafromconf))
if not config.oscaminfo.userdatafromconf.getValue():
self.oscamconfig.append(getConfigListEntry(_("Username (httpuser)"), config.oscaminfo.username))
self.oscamconfig.append(getConfigListEntry(_("Password (httpwd)"), config.oscaminfo.password))
self.oscamconfig.append(getConfigListEntry(_("IP address"), config.oscaminfo.ip))
self.oscamconfig.append(getConfigListEntry("Port", config.oscaminfo.port))
self.oscamconfig.append(getConfigListEntry(_("Automatically update Client/Server View?"), config.oscaminfo.autoupdate))
if config.oscaminfo.autoupdate.getValue():
self.oscamconfig.append(getConfigListEntry(_("Update interval (in seconds)"), config.oscaminfo.intervall))
def save(self):
for x in self.oscamconfig:
x[1].save()
configfile.save()
self.close()
def cancel(self):
for x in self.oscamconfig:
x[1].cancel()
self.close()
|
gpl-2.0
|
XXMrHyde/android_external_chromium_org
|
tools/perf/page_sets/PRESUBMIT.py
|
23
|
2163
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import re
import sys
def _SyncFilesToCloud(input_api, output_api):
"""Searches for .sha1 files and uploads them to Cloud Storage.
It validates all the hashes and skips upload if not necessary.
"""
# Because this script will be called from a magic PRESUBMIT demon,
# avoid angering it; don't pollute its sys.path.
old_sys_path = sys.path
try:
sys.path = [os.path.join(os.pardir, os.pardir, 'telemetry')] + sys.path
from telemetry.page import cloud_storage
finally:
sys.path = old_sys_path
hashes_in_cloud_storage = cloud_storage.List(cloud_storage.DEFAULT_BUCKET)
results = []
for hash_path in input_api.AbsoluteLocalPaths():
file_path, extension = os.path.splitext(hash_path)
if extension != '.sha1':
continue
with open(hash_path, 'rb') as f:
file_hash = f.read(1024).rstrip()
if file_hash in hashes_in_cloud_storage:
results.append(output_api.PresubmitNotifyResult(
'File already in Cloud Storage, skipping upload: %s' % hash_path))
continue
if not re.match('^([A-Za-z0-9]{40})$', file_hash):
results.append(output_api.PresubmitError(
'Hash file does not contain a valid SHA-1 hash: %s' % hash_path))
continue
if not os.path.exists(file_path):
results.append(output_api.PresubmitError(
'Hash file exists, but file not found: %s' % hash_path))
continue
if cloud_storage.GetHash(file_path) != file_hash:
results.append(output_api.PresubmitError(
'Hash file does not match file\'s actual hash: %s' % hash_path))
continue
try:
cloud_storage.Insert(cloud_storage.DEFAULT_BUCKET, file_hash, file_path)
except cloud_storage.CloudStorageError:
results.append(output_api.PresubmitError(
'Unable to upload to Cloud Storage: %s' % hash_path))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results += _SyncFilesToCloud(input_api, output_api)
return results
|
bsd-3-clause
|
trabacus-softapps/openerp-8.0-cc
|
openerp/addons/marketing_campaign_crm_demo/__openerp__.py
|
119
|
1675
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Marketing Campaign - Demo',
'version': '1.0',
'depends': ['marketing_campaign',
'crm',
],
'author': 'OpenERP SA',
'category': 'Marketing',
'description': """
Demo data for the module marketing_campaign.
============================================
Creates demo data like leads, campaigns and segments for the module marketing_campaign.
""",
'website': 'http://www.openerp.com',
'data': [],
'demo': ['marketing_campaign_demo.xml'],
'installable': True,
'auto_install': False,
'images': ['images/campaigns.jpeg','images/email_templates.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
anandbhoraskar/Diamond
|
src/collectors/bind/test/testbind.py
|
31
|
7690
|
#!/usr/bin/python
# coding=utf-8
##########################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from bind import BindCollector
##########################################################################
class TestBindCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('BindCollector', {
'interval': 10,
})
self.collector = BindCollector(config, None)
def test_import(self):
self.assertTrue(BindCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
patch_urlopen = patch('urllib2.urlopen', Mock(
return_value=self.getFixture('bind.xml')))
patch_urlopen.start()
self.collector.collect()
patch_urlopen.stop()
metrics = {
'view._default.resstat.Queryv4': 0.000000,
'view._default.resstat.Queryv6': 0.000000,
'view._default.resstat.Responsev4': 0.000000,
'view._default.resstat.Responsev6': 0.000000,
'view._default.resstat.NXDOMAIN': 0.000000,
'view._default.resstat.SERVFAIL': 0.000000,
'view._default.resstat.FORMERR': 0.000000,
'view._default.resstat.OtherError': 0.000000,
'view._default.resstat.EDNS0Fail': 0.000000,
'view._default.resstat.Mismatch': 0.000000,
'view._default.resstat.Truncated': 0.000000,
'view._default.resstat.Lame': 0.000000,
'view._default.resstat.Retry': 0.000000,
'view._default.resstat.QueryAbort': 0.000000,
'view._default.resstat.QuerySockFail': 0.000000,
'view._default.resstat.QueryTimeout': 0.000000,
'view._default.resstat.GlueFetchv4': 0.000000,
'view._default.resstat.GlueFetchv6': 0.000000,
'view._default.resstat.GlueFetchv4Fail': 0.000000,
'view._default.resstat.GlueFetchv6Fail': 0.000000,
'view._default.resstat.ValAttempt': 0.000000,
'view._default.resstat.ValOk': 0.000000,
'view._default.resstat.ValNegOk': 0.000000,
'view._default.resstat.ValFail': 0.000000,
'view._default.resstat.QryRTT10': 0.000000,
'view._default.resstat.QryRTT100': 0.000000,
'view._default.resstat.QryRTT500': 0.000000,
'view._default.resstat.QryRTT800': 0.000000,
'view._default.resstat.QryRTT1600': 0.000000,
'view._default.resstat.QryRTT1600+': 0.000000,
'requests.QUERY': 0.000000,
'queries.A': 0.000000,
'nsstat.Requestv4': 0.000000,
'nsstat.Requestv6': 0.000000,
'nsstat.ReqEdns0': 0.000000,
'nsstat.ReqBadEDNSVer': 0.000000,
'nsstat.ReqTSIG': 0.000000,
'nsstat.ReqSIG0': 0.000000,
'nsstat.ReqBadSIG': 0.000000,
'nsstat.ReqTCP': 0.000000,
'nsstat.AuthQryRej': 0.000000,
'nsstat.RecQryRej': 0.000000,
'nsstat.XfrRej': 0.000000,
'nsstat.UpdateRej': 0.000000,
'nsstat.Response': 0.000000,
'nsstat.TruncatedResp': 0.000000,
'nsstat.RespEDNS0': 0.000000,
'nsstat.RespTSIG': 0.000000,
'nsstat.RespSIG0': 0.000000,
'nsstat.QrySuccess': 0.000000,
'nsstat.QryAuthAns': 0.000000,
'nsstat.QryNoauthAns': 0.000000,
'nsstat.QryReferral': 0.000000,
'nsstat.QryNxrrset': 0.000000,
'nsstat.QrySERVFAIL': 0.000000,
'nsstat.QryFORMERR': 0.000000,
'nsstat.QryNXDOMAIN': 0.000000,
'nsstat.QryRecursion': 0.000000,
'nsstat.QryDuplicate': 0.000000,
'nsstat.QryDropped': 0.000000,
'nsstat.QryFailure': 0.000000,
'nsstat.XfrReqDone': 0.000000,
'nsstat.UpdateReqFwd': 0.000000,
'nsstat.UpdateRespFwd': 0.000000,
'nsstat.UpdateFwdFail': 0.000000,
'nsstat.UpdateDone': 0.000000,
'nsstat.UpdateFail': 0.000000,
'nsstat.UpdateBadPrereq': 0.000000,
'zonestat.NotifyOutv4': 0.000000,
'zonestat.NotifyOutv6': 0.000000,
'zonestat.NotifyInv4': 0.000000,
'zonestat.NotifyInv6': 0.000000,
'zonestat.NotifyRej': 0.000000,
'zonestat.SOAOutv4': 0.000000,
'zonestat.SOAOutv6': 0.000000,
'zonestat.AXFRReqv4': 0.000000,
'zonestat.AXFRReqv6': 0.000000,
'zonestat.IXFRReqv4': 0.000000,
'zonestat.IXFRReqv6': 0.000000,
'zonestat.XfrSuccess': 0.000000,
'zonestat.XfrFail': 0.000000,
'sockstat.UDP4Open': 0.000000,
'sockstat.UDP6Open': 0.000000,
'sockstat.TCP4Open': 0.000000,
'sockstat.TCP6Open': 0.000000,
'sockstat.UnixOpen': 0.000000,
'sockstat.UDP4OpenFail': 0.000000,
'sockstat.UDP6OpenFail': 0.000000,
'sockstat.TCP4OpenFail': 0.000000,
'sockstat.TCP6OpenFail': 0.000000,
'sockstat.UnixOpenFail': 0.000000,
'sockstat.UDP4Close': 0.000000,
'sockstat.UDP6Close': 0.000000,
'sockstat.TCP4Close': 0.000000,
'sockstat.TCP6Close': 0.000000,
'sockstat.UnixClose': 0.000000,
'sockstat.FDWatchClose': 0.000000,
'sockstat.UDP4BindFail': 0.000000,
'sockstat.UDP6BindFail': 0.000000,
'sockstat.TCP4BindFail': 0.000000,
'sockstat.TCP6BindFail': 0.000000,
'sockstat.UnixBindFail': 0.000000,
'sockstat.FdwatchBindFail': 0.000000,
'sockstat.UDP4ConnFail': 0.000000,
'sockstat.UDP6ConnFail': 0.000000,
'sockstat.TCP4ConnFail': 0.000000,
'sockstat.TCP6ConnFail': 0.000000,
'sockstat.UnixConnFail': 0.000000,
'sockstat.FDwatchConnFail': 0.000000,
'sockstat.UDP4Conn': 0.000000,
'sockstat.UDP6Conn': 0.000000,
'sockstat.TCP4Conn': 0.000000,
'sockstat.TCP6Conn': 0.000000,
'sockstat.UnixConn': 0.000000,
'sockstat.FDwatchConn': 0.000000,
'sockstat.TCP4AcceptFail': 0.000000,
'sockstat.TCP6AcceptFail': 0.000000,
'sockstat.UnixAcceptFail': 0.000000,
'sockstat.TCP4Accept': 0.000000,
'sockstat.TCP6Accept': 0.000000,
'sockstat.UnixAccept': 0.000000,
'sockstat.UDP4SendErr': 0.000000,
'sockstat.UDP6SendErr': 0.000000,
'sockstat.TCP4SendErr': 0.000000,
'sockstat.TCP6SendErr': 0.000000,
'sockstat.UnixSendErr': 0.000000,
'sockstat.FDwatchSendErr': 0.000000,
'sockstat.UDP4RecvErr': 0.000000,
'sockstat.UDP6RecvErr': 0.000000,
'sockstat.TCP4RecvErr': 0.000000,
'sockstat.TCP6RecvErr': 0.000000,
'sockstat.UnixRecvErr': 0.000000,
'sockstat.FDwatchRecvErr': 0.000000,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
mit
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.