code stringlengths 1 1.49M | vector listlengths 0 7.38k | snippet listlengths 0 7.38k |
|---|---|---|
#!/usr/bin/env python
# Copyright (c) 2001 actzero, inc. All rights reserved.
import sys
sys.path.insert(1, "..")
from SOAPpy import *
# Uncomment to see outgoing HTTP headers and SOAP and incoming
#Config.debug = 1
Config.BuildWithNoType = 1
Config.BuildWithNoNamespacePrefix = 1
hd = headerType(data = {"mystring": "Hello World"})
server = SOAPProxy("http://localhost:9900/", header=hd)
print server.echo("Hello world")
server.quit()
| [
[
1,
0,
0.2174,
0.0435,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
8,
0,
0.2609,
0.0435,
0,
0.66,
0.125,
368,
3,
2,
0,
0,
0,
0,
1
],
[
1,
0,
0.3478,
0.0435,
0,
0.6... | [
"import sys",
"sys.path.insert(1, \"..\")",
"from SOAPpy import *",
"Config.BuildWithNoType = 1",
"Config.BuildWithNoNamespacePrefix = 1",
"hd = headerType(data = {\"mystring\": \"Hello World\"})",
"server = SOAPProxy(\"http://localhost:9900/\", header=hd)",
"print(server.echo(\"Hello world\"))",
"s... |
#!/usr/bin/env python
import time
from SOAPpy import SOAP
srv = SOAP.SOAPProxy('http://localhost:10080/')
for p in ('good param', 'ok param'):
ret = srv.badparam(p)
if isinstance(ret, SOAP.faultType):
print ret
else:
print 'ok'
dt = SOAP.dateTimeType(time.localtime(time.time()))
print srv.dt(dt)
| [
[
1,
0,
0.1579,
0.0526,
0,
0.66,
0,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.2105,
0.0526,
0,
0.66,
0.2,
181,
0,
1,
0,
0,
181,
0,
0
],
[
14,
0,
0.3158,
0.0526,
0,
0.... | [
"import time",
"from SOAPpy import SOAP",
"srv = SOAP.SOAPProxy('http://localhost:10080/')",
"for p in ('good param', 'ok param'):\n ret = srv.badparam(p)\n if isinstance(ret, SOAP.faultType):\n print(ret)\n else:\n print('ok')",
" ret = srv.badparam(p)",
" if isinstance(ret, ... |
"""
################################################################################
#
# SOAPpy - Cayce Ullman (cayce@actzero.com)
# Brian Matthews (blm@actzero.com)
# Gregory Warnes (Gregory.R.Warnes@Pfizer.com)
# Christopher Blunck (blunck@gst.com)
#
################################################################################
# Copyright (c) 2003, Pfizer
# Copyright (c) 2001, Cayce Ullman.
# Copyright (c) 2001, Brian Matthews.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of actzero, inc. nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
################################################################################
"""
ident = '$Id: Errors.py,v 1.5 2005/02/15 16:32:22 warnes Exp $'
from version import __version__
import exceptions
################################################################################
# Exceptions
################################################################################
class Error(exceptions.Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return "<Error : %s>" % self.msg
__repr__ = __str__
def __call__(self):
return (msg,)
class RecursionError(Error):
pass
class UnknownTypeError(Error):
pass
class HTTPError(Error):
# indicates an HTTP protocol error
def __init__(self, code, msg):
self.code = code
self.msg = msg
def __str__(self):
return "<HTTPError %s %s>" % (self.code, self.msg)
__repr__ = __str__
def __call___(self):
return (self.code, self.msg, )
class UnderflowError(exceptions.ArithmeticError):
pass
| [
[
8,
0,
0.2658,
0.519,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.5443,
0.0127,
0,
0.66,
0.125,
977,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.557,
0.0127,
0,
0.66,
... | [
"\"\"\"\n################################################################################\n#\n# SOAPpy - Cayce Ullman (cayce@actzero.com)\n# Brian Matthews (blm@actzero.com)\n# Gregory Warnes (Gregory.R.Warnes@Pfizer.com)\n# Christopher Blunck (blunck@gst.com)\n#",
"ident ... |
"""This file is here for backward compatibility with versions <= 0.9.9
Delete when 1.0.0 is released!
"""
ident = '$Id: SOAP.py,v 1.38 2004/01/31 04:20:06 warnes Exp $'
from version import __version__
from Client import *
from Config import *
from Errors import *
from NS import *
from Parser import *
from SOAPBuilder import *
from Server import *
from Types import *
from Utilities import *
import wstools
import WSDL
from warnings import warn
warn("""
The sub-module SOAPpy.SOAP is deprecated and is only
provided for short-term backward compatibility. Objects are now
available directly within the SOAPpy module. Thus, instead of
from SOAPpy import SOAP
...
SOAP.SOAPProxy(...)
use
from SOAPpy import SOAPProxy
...
SOAPProxy(...)
instead.
""", DeprecationWarning)
| [
[
8,
0,
0.0625,
0.1,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.15,
0.025,
0,
0.66,
0.0667,
977,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.175,
0.025,
0,
0.66,
0.1... | [
"\"\"\"This file is here for backward compatibility with versions <= 0.9.9 \n\nDelete when 1.0.0 is released!\n\"\"\"",
"ident = '$Id: SOAP.py,v 1.38 2004/01/31 04:20:06 warnes Exp $'",
"from version import __version__",
"from Client import *",
"from Config import *",
"from Errors import *"... |
__version__="0.12.0"
| [
[
14,
0,
0.5,
0.5,
0,
0.66,
0,
162,
1,
0,
0,
0,
0,
3,
0
]
] | [
"__version__=\"0.12.0\""
] |
"""Provide a class for loading data from URL's that handles basic
authentication"""
ident = '$Id: URLopener.py,v 1.2 2004/01/31 04:20:06 warnes Exp $'
from version import __version__
from Config import Config
from urllib import FancyURLopener
class URLopener(FancyURLopener):
username = None
passwd = None
def __init__(self, username=None, passwd=None, *args, **kw):
FancyURLopener.__init__( self, *args, **kw)
self.username = username
self.passwd = passwd
def prompt_user_passwd(self, host, realm):
return self.username, self.passwd
| [
[
8,
0,
0.0652,
0.087,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.1739,
0.0435,
0,
0.66,
0.2,
977,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.2174,
0.0435,
0,
0.66,
... | [
"\"\"\"Provide a class for loading data from URL's that handles basic\nauthentication\"\"\"",
"ident = '$Id: URLopener.py,v 1.2 2004/01/31 04:20:06 warnes Exp $'",
"from version import __version__",
"from Config import Config",
"from urllib import FancyURLopener",
"class URLopener(FancyURLopener):\n\n ... |
ident = '$Id: __init__.py,v 1.9 2004/01/31 04:20:06 warnes Exp $'
from version import __version__
from Client import *
from Config import *
from Errors import *
from NS import *
from Parser import *
from SOAPBuilder import *
from Server import *
from Types import *
from Utilities import *
import wstools
import WSDL
| [
[
14,
0,
0.1333,
0.0667,
0,
0.66,
0,
977,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.2,
0.0667,
0,
0.66,
0.0833,
623,
0,
1,
0,
0,
623,
0,
0
],
[
1,
0,
0.3333,
0.0667,
0,
0.66... | [
"ident = '$Id: __init__.py,v 1.9 2004/01/31 04:20:06 warnes Exp $'",
"from version import __version__",
"from Client import *",
"from Config import *",
"from Errors import *",
"from NS import *",
"from Parser import *",
"from SOAPBuilder import *",
"from Server impo... |
#! /usr/bin/env python
"""Logging"""
import sys
class ILogger:
'''Logger interface, by default this class
will be used and logging calls are no-ops.
'''
level = 0
def __init__(self, msg):
return
def warning(self, *args):
return
def debug(self, *args):
return
def error(self, *args):
return
def setLevel(cls, level):
cls.level = level
setLevel = classmethod(setLevel)
_LoggerClass = ILogger
class BasicLogger(ILogger):
def __init__(self, msg, out=sys.stdout):
self.msg, self.out = msg, out
def warning(self, msg, *args):
if self.level < 1: return
print >>self, self.WARN, self.msg,
print >>self, msg %args
WARN = 'WARN'
def debug(self, msg, *args):
if self.level < 2: return
print >>self, self.DEBUG, self.msg,
print >>self, msg %args
DEBUG = 'DEBUG'
def error(self, msg, *args):
print >>self, self.ERROR, self.msg,
print >>self, msg %args
ERROR = 'ERROR'
def write(self, *args):
'''Write convenience function; writes strings.
'''
for s in args: self.out.write(s)
def setBasicLogger():
'''Use Basic Logger.
'''
setLoggerClass(BasicLogger)
BasicLogger.setLevel(0)
def setBasicLoggerWARN():
'''Use Basic Logger.
'''
setLoggerClass(BasicLogger)
BasicLogger.setLevel(1)
def setBasicLoggerDEBUG():
'''Use Basic Logger.
'''
setLoggerClass(BasicLogger)
BasicLogger.setLevel(2)
def setLoggerClass(loggingClass):
'''Set Logging Class.
'''
assert issubclass(loggingClass, ILogger), 'loggingClass must subclass ILogger'
global _LoggerClass
_LoggerClass = loggingClass
def setLevel(level=0):
'''Set Global Logging Level.
'''
ILogger.level = level
def getLogger(msg):
'''Return instance of Logging class.
'''
return _LoggerClass(msg)
| [
[
8,
0,
0.0235,
0.0118,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.0353,
0.0118,
0,
0.66,
0.1,
509,
0,
1,
0,
0,
509,
0,
0
],
[
3,
0,
0.1588,
0.1882,
0,
0.66,
... | [
"\"\"\"Logging\"\"\"",
"import sys",
"class ILogger:\n '''Logger interface, by default this class\n will be used and logging calls are no-ops.\n '''\n level = 0\n def __init__(self, msg):\n return\n def warning(self, *args):",
" '''Logger interface, by default this class\n will ... |
"""
A more or less complete user-defined wrapper around tuple objects.
Adapted version of the standard library's UserList.
Taken from Stefan Schwarzer's ftputil library, available at
<http://www.ndh.net/home/sschwarzer/python/python_software.html>, and used under this license:
Copyright (C) 1999, Stefan Schwarzer
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
- Neither the name of the above author nor the names of the
contributors to the software may be used to endorse or promote
products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# $Id: UserTuple.py,v 1.1 2003/07/21 14:18:54 warnes Exp $
#XXX tuple instances (in Python 2.2) contain also:
# __class__, __delattr__, __getattribute__, __hash__, __new__,
# __reduce__, __setattr__, __str__
# What about these?
class UserTuple:
def __init__(self, inittuple=None):
self.data = ()
if inittuple is not None:
# XXX should this accept an arbitrary sequence?
if type(inittuple) == type(self.data):
self.data = inittuple
elif isinstance(inittuple, UserTuple):
# this results in
# self.data is inittuple.data
# but that's ok for tuples because they are
# immutable. (Builtin tuples behave the same.)
self.data = inittuple.data[:]
else:
# the same applies here; (t is tuple(t)) == 1
self.data = tuple(inittuple)
def __repr__(self): return repr(self.data)
def __lt__(self, other): return self.data < self.__cast(other)
def __le__(self, other): return self.data <= self.__cast(other)
def __eq__(self, other): return self.data == self.__cast(other)
def __ne__(self, other): return self.data != self.__cast(other)
def __gt__(self, other): return self.data > self.__cast(other)
def __ge__(self, other): return self.data >= self.__cast(other)
def __cast(self, other):
if isinstance(other, UserTuple): return other.data
else: return other
def __cmp__(self, other):
return cmp(self.data, self.__cast(other))
def __contains__(self, item): return item in self.data
def __len__(self): return len(self.data)
def __getitem__(self, i): return self.data[i]
def __getslice__(self, i, j):
i = max(i, 0); j = max(j, 0)
return self.__class__(self.data[i:j])
def __add__(self, other):
if isinstance(other, UserTuple):
return self.__class__(self.data + other.data)
elif isinstance(other, type(self.data)):
return self.__class__(self.data + other)
else:
return self.__class__(self.data + tuple(other))
# dir( () ) contains no __radd__ (at least in Python 2.2)
def __mul__(self, n):
return self.__class__(self.data*n)
__rmul__ = __mul__
| [
[
8,
0,
0.2172,
0.4242,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
3,
0,
0.7677,
0.4545,
0,
0.66,
1,
366,
0,
16,
0,
0,
0,
0,
26
],
[
2,
1,
0.6263,
0.1515,
1,
0.92,
... | [
"\"\"\"\nA more or less complete user-defined wrapper around tuple objects.\nAdapted version of the standard library's UserList.\n\nTaken from Stefan Schwarzer's ftputil library, available at\n<http://www.ndh.net/home/sschwarzer/python/python_software.html>, and used under this license:",
"class UserTuple:\n d... |
#! /usr/bin/env python
"""WSDL parsing services package for Web Services for Python."""
ident = "$Id: __init__.py,v 1.11 2004/12/07 15:54:53 blunck2 Exp $"
import WSDLTools
import XMLname
import logging
| [
[
8,
0,
0.2222,
0.1111,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.4444,
0.1111,
0,
0.66,
0.25,
977,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.6667,
0.1111,
0,
0.66,
... | [
"\"\"\"WSDL parsing services package for Web Services for Python.\"\"\"",
"ident = \"$Id: __init__.py,v 1.11 2004/12/07 15:54:53 blunck2 Exp $\"",
"import WSDLTools",
"import XMLname",
"import logging"
] |
"""Translate strings to and from SOAP 1.2 XML name encoding
Implements rules for mapping application defined name to XML names
specified by the w3 SOAP working group for SOAP version 1.2 in
Appendix A of "SOAP Version 1.2 Part 2: Adjuncts", W3C Working Draft
17, December 2001, <http://www.w3.org/TR/soap12-part2/#namemap>
Also see <http://www.w3.org/2000/xp/Group/xmlp-issues>.
Author: Gregory R. Warnes <Gregory.R.Warnes@Pfizer.com>
Date:: 2002-04-25
Version 0.9.0
"""
ident = "$Id: XMLname.py,v 1.4 2005/02/16 14:45:37 warnes Exp $"
from re import *
def _NCNameChar(x):
return x.isalpha() or x.isdigit() or x=="." or x=='-' or x=="_"
def _NCNameStartChar(x):
return x.isalpha() or x=="_"
def _toUnicodeHex(x):
hexval = hex(ord(x[0]))[2:]
hexlen = len(hexval)
# Make hexval have either 4 or 8 digits by prepending 0's
if (hexlen==1): hexval = "000" + hexval
elif (hexlen==2): hexval = "00" + hexval
elif (hexlen==3): hexval = "0" + hexval
elif (hexlen==4): hexval = "" + hexval
elif (hexlen==5): hexval = "000" + hexval
elif (hexlen==6): hexval = "00" + hexval
elif (hexlen==7): hexval = "0" + hexval
elif (hexlen==8): hexval = "" + hexval
else: raise Exception, "Illegal Value returned from hex(ord(x))"
return "_x"+ hexval + "_"
def _fromUnicodeHex(x):
return eval( r'u"\u'+x[2:-1]+'"' )
def toXMLname(string):
"""Convert string to a XML name."""
if string.find(':') != -1 :
(prefix, localname) = string.split(':',1)
else:
prefix = None
localname = string
T = unicode(localname)
N = len(localname)
X = [];
for i in range(N) :
if i< N-1 and T[i]==u'_' and T[i+1]==u'x':
X.append(u'_x005F_')
elif i==0 and N >= 3 and \
( T[0]==u'x' or T[0]==u'X' ) and \
( T[1]==u'm' or T[1]==u'M' ) and \
( T[2]==u'l' or T[2]==u'L' ):
X.append(u'_xFFFF_' + T[0])
elif (not _NCNameChar(T[i])) or (i==0 and not _NCNameStartChar(T[i])):
X.append(_toUnicodeHex(T[i]))
else:
X.append(T[i])
if prefix:
return "%s:%s" % (prefix, u''.join(X))
return u''.join(X)
def fromXMLname(string):
"""Convert XML name to unicode string."""
retval = sub(r'_xFFFF_','', string )
def fun( matchobj ):
return _fromUnicodeHex( matchobj.group(0) )
retval = sub(r'_x[0-9A-Za-z]+_', fun, retval )
return retval
| [
[
8,
0,
0.0843,
0.1573,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.1798,
0.0112,
0,
0.66,
0.125,
977,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.2022,
0.0112,
0,
0.66,
... | [
"\"\"\"Translate strings to and from SOAP 1.2 XML name encoding\n\nImplements rules for mapping application defined name to XML names\nspecified by the w3 SOAP working group for SOAP version 1.2 in\nAppendix A of \"SOAP Version 1.2 Part 2: Adjuncts\", W3C Working Draft\n17, December 2001, <http://www.w3.org/TR/soap... |
#! /usr/bin/env python
"""Namespace module, so you don't need PyXML
"""
try:
from xml.ns import SOAP, SCHEMA, WSDL, XMLNS, DSIG, ENCRYPTION
DSIG.C14N = "http://www.w3.org/TR/2001/REC-xml-c14n-20010315"
except:
class SOAP:
ENV = "http://schemas.xmlsoap.org/soap/envelope/"
ENC = "http://schemas.xmlsoap.org/soap/encoding/"
ACTOR_NEXT = "http://schemas.xmlsoap.org/soap/actor/next"
class SCHEMA:
XSD1 = "http://www.w3.org/1999/XMLSchema"
XSD2 = "http://www.w3.org/2000/10/XMLSchema"
XSD3 = "http://www.w3.org/2001/XMLSchema"
XSD_LIST = [ XSD1, XSD2, XSD3 ]
XSI1 = "http://www.w3.org/1999/XMLSchema-instance"
XSI2 = "http://www.w3.org/2000/10/XMLSchema-instance"
XSI3 = "http://www.w3.org/2001/XMLSchema-instance"
XSI_LIST = [ XSI1, XSI2, XSI3 ]
BASE = XSD3
class WSDL:
BASE = "http://schemas.xmlsoap.org/wsdl/"
BIND_HTTP = "http://schemas.xmlsoap.org/wsdl/http/"
BIND_MIME = "http://schemas.xmlsoap.org/wsdl/mime/"
BIND_SOAP = "http://schemas.xmlsoap.org/wsdl/soap/"
BIND_SOAP12 = "http://schemas.xmlsoap.org/wsdl/soap12/"
class XMLNS:
BASE = "http://www.w3.org/2000/xmlns/"
XML = "http://www.w3.org/XML/1998/namespace"
HTML = "http://www.w3.org/TR/REC-html40"
class DSIG:
BASE = "http://www.w3.org/2000/09/xmldsig#"
C14N = "http://www.w3.org/TR/2001/REC-xml-c14n-20010315"
C14N_COMM = "http://www.w3.org/TR/2000/CR-xml-c14n-20010315#WithComments"
C14N_EXCL = "http://www.w3.org/2001/10/xml-exc-c14n#"
DIGEST_MD2 = "http://www.w3.org/2000/09/xmldsig#md2"
DIGEST_MD5 = "http://www.w3.org/2000/09/xmldsig#md5"
DIGEST_SHA1 = "http://www.w3.org/2000/09/xmldsig#sha1"
ENC_BASE64 = "http://www.w3.org/2000/09/xmldsig#base64"
ENVELOPED = "http://www.w3.org/2000/09/xmldsig#enveloped-signature"
HMAC_SHA1 = "http://www.w3.org/2000/09/xmldsig#hmac-sha1"
SIG_DSA_SHA1 = "http://www.w3.org/2000/09/xmldsig#dsa-sha1"
SIG_RSA_SHA1 = "http://www.w3.org/2000/09/xmldsig#rsa-sha1"
XPATH = "http://www.w3.org/TR/1999/REC-xpath-19991116"
XSLT = "http://www.w3.org/TR/1999/REC-xslt-19991116"
class ENCRYPTION:
BASE = "http://www.w3.org/2001/04/xmlenc#"
BLOCK_3DES = "http://www.w3.org/2001/04/xmlenc#des-cbc"
BLOCK_AES128 = "http://www.w3.org/2001/04/xmlenc#aes128-cbc"
BLOCK_AES192 = "http://www.w3.org/2001/04/xmlenc#aes192-cbc"
BLOCK_AES256 = "http://www.w3.org/2001/04/xmlenc#aes256-cbc"
DIGEST_RIPEMD160 = "http://www.w3.org/2001/04/xmlenc#ripemd160"
DIGEST_SHA256 = "http://www.w3.org/2001/04/xmlenc#sha256"
DIGEST_SHA512 = "http://www.w3.org/2001/04/xmlenc#sha512"
KA_DH = "http://www.w3.org/2001/04/xmlenc#dh"
KT_RSA_1_5 = "http://www.w3.org/2001/04/xmlenc#rsa-1_5"
KT_RSA_OAEP = "http://www.w3.org/2001/04/xmlenc#rsa-oaep-mgf1p"
STREAM_ARCFOUR = "http://www.w3.org/2001/04/xmlenc#arcfour"
WRAP_3DES = "http://www.w3.org/2001/04/xmlenc#kw-3des"
WRAP_AES128 = "http://www.w3.org/2001/04/xmlenc#kw-aes128"
WRAP_AES192 = "http://www.w3.org/2001/04/xmlenc#kw-aes192"
WRAP_AES256 = "http://www.w3.org/2001/04/xmlenc#kw-aes256"
class OASIS:
'''URLs for Oasis specifications
'''
WSSE = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd"
UTILITY = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd"
LIFETIME = "http://docs.oasis-open.org/wsrf/2004/06/wsrf-WS-ResourceLifetime-1.2-draft-01.xsd"
PROPERTIES = "http://docs.oasis-open.org/wsrf/2004/06/wsrf-WS-ResourceProperties-1.2-draft-01.xsd"
BASENOTIFICATION = "http://docs.oasis-open.org/wsn/2004/06/wsn-WS-BaseNotification-1.2-draft-01.xsd"
BASEFAULTS = "http://docs.oasis-open.org/wsrf/2004/06/wsrf-WS-BaseFaults-1.2-draft-01.xsd"
class WSSE:
BASE = "http://schemas.xmlsoap.org/ws/2002/04/secext"
TRUST = "http://schemas.xmlsoap.org/ws/2004/04/trust"
class WSU:
BASE = "http://schemas.xmlsoap.org/ws/2002/04/utility"
UTILITY = "http://schemas.xmlsoap.org/ws/2002/07/utility"
class WSR:
PROPERTIES = "http://www.ibm.com/xmlns/stdwip/web-services/WS-ResourceProperties"
LIFETIME = "http://www.ibm.com/xmlns/stdwip/web-services/WS-ResourceLifetime"
class WSA200408:
ADDRESS = "http://schemas.xmlsoap.org/ws/2004/08/addressing"
ANONYMOUS = "%s/role/anonymous" %ADDRESS
FAULT = "%s/fault" %ADDRESS
WSA = WSA200408
class WSA200403:
ADDRESS = "http://schemas.xmlsoap.org/ws/2004/03/addressing"
ANONYMOUS = "%s/role/anonymous" %ADDRESS
FAULT = "%s/fault" %ADDRESS
class WSA200303:
ADDRESS = "http://schemas.xmlsoap.org/ws/2003/03/addressing"
ANONYMOUS = "%s/role/anonymous" %ADDRESS
FAULT = None
class WSP:
POLICY = "http://schemas.xmlsoap.org/ws/2002/12/policy"
class BEA:
SECCONV = "http://schemas.xmlsoap.org/ws/2004/04/sc"
class GLOBUS:
SECCONV = "http://wsrf.globus.org/core/2004/07/security/secconv"
CORE = "http://www.globus.org/namespaces/2004/06/core"
SIG = "http://www.globus.org/2002/04/xmlenc#gssapi-sign"
ZSI_SCHEMA_URI = 'http://www.zolera.com/schemas/ZSI/'
| [
[
8,
0,
0.02,
0.016,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
7,
0,
0.3,
0.528,
0,
0.66,
0.0769,
0,
0,
1,
0,
0,
0,
0,
0
],
[
1,
1,
0.048,
0.008,
1,
0.22,
0,
... | [
"\"\"\"Namespace module, so you don't need PyXML \n\"\"\"",
"try:\n from xml.ns import SOAP, SCHEMA, WSDL, XMLNS, DSIG, ENCRYPTION\n DSIG.C14N = \"http://www.w3.org/TR/2001/REC-xml-c14n-20010315\"\n \nexcept:\n class SOAP:\n ENV = \"http://schemas.xmlsoap.org/soap/envelope/\"\n ... |
#! /usr/bin/env python
"""wstools.WSDLTools.WSDLReader tests directory."""
import utils
| [
[
8,
0,
0.4,
0.2,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.8,
0.2,
0,
0.66,
1,
970,
0,
1,
0,
0,
970,
0,
0
]
] | [
"\"\"\"wstools.WSDLTools.WSDLReader tests directory.\"\"\"",
"import utils"
] |
"""Parse web services description language to get SOAP methods.
Rudimentary support."""
ident = '$Id: WSDL.py,v 1.11 2005/02/21 20:16:15 warnes Exp $'
from version import __version__
import wstools
from Client import SOAPProxy, SOAPAddress
from Config import Config
import urllib
class Proxy:
"""WSDL Proxy.
SOAPProxy wrapper that parses method names, namespaces, soap actions from
the web service description language (WSDL) file passed into the
constructor. The WSDL reference can be passed in as a stream, an url, a
file name, or a string.
Loads info into self.methods, a dictionary with methodname keys and values
of WSDLTools.SOAPCallinfo.
For example,
url = 'http://www.xmethods.org/sd/2001/TemperatureService.wsdl'
wsdl = WSDL.Proxy(url)
print len(wsdl.methods) # 1
print wsdl.methods.keys() # getTemp
See WSDLTools.SOAPCallinfo for more info on each method's attributes.
"""
def __init__(self, wsdlsource, config=Config, **kw ):
reader = wstools.WSDLTools.WSDLReader()
self.wsdl = None
# From Mark Pilgrim's "Dive Into Python" toolkit.py--open anything.
if self.wsdl is None and hasattr(wsdlsource, "read"):
#print 'stream'
self.wsdl = reader.loadFromStream(wsdlsource)
# NOT TESTED (as of April 17, 2003)
#if self.wsdl is None and wsdlsource == '-':
# import sys
# self.wsdl = reader.loadFromStream(sys.stdin)
# print 'stdin'
if self.wsdl is None:
try:
file(wsdlsource)
self.wsdl = reader.loadFromFile(wsdlsource)
#print 'file'
except (IOError, OSError):
pass
if self.wsdl is None:
try:
stream = urllib.urlopen(wsdlsource)
self.wsdl = reader.loadFromStream(stream, wsdlsource)
except (IOError, OSError): pass
if self.wsdl is None:
import StringIO
self.wsdl = reader.loadFromString(str(wsdlsource))
#print 'string'
# Package wsdl info as a dictionary of remote methods, with method name
# as key (based on ServiceProxy.__init__ in ZSI library).
self.methods = {}
service = self.wsdl.services[0]
port = service.ports[0]
name = service.name
binding = port.getBinding()
portType = binding.getPortType()
for operation in portType.operations:
callinfo = wstools.WSDLTools.callInfoFromWSDL(port, operation.name)
self.methods[callinfo.methodName] = callinfo
self.soapproxy = SOAPProxy('http://localhost/dummy.webservice',
config=config, **kw)
def __str__(self):
s = ''
for method in self.methods.values():
s += str(method)
return s
def __getattr__(self, name):
"""Set up environment then let parent class handle call.
Raises AttributeError is method name is not found."""
if not self.methods.has_key(name): raise AttributeError, name
callinfo = self.methods[name]
self.soapproxy.proxy = SOAPAddress(callinfo.location)
self.soapproxy.namespace = callinfo.namespace
self.soapproxy.soapaction = callinfo.soapAction
return self.soapproxy.__getattr__(name)
def show_methods(self):
for key in self.methods.keys():
method = self.methods[key]
print "Method Name:", key.ljust(15)
print
inps = method.inparams
for parm in range(len(inps)):
details = inps[parm]
print " In #%d: %s (%s)" % (parm, details.name, details.type)
print
outps = method.outparams
for parm in range(len(outps)):
details = outps[parm]
print " Out #%d: %s (%s)" % (parm, details.name, details.type)
print
| [
[
8,
0,
0.0172,
0.0259,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.0431,
0.0086,
0,
0.66,
0.1429,
977,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.0517,
0.0086,
0,
0.66,... | [
"\"\"\"Parse web services description language to get SOAP methods.\n\nRudimentary support.\"\"\"",
"ident = '$Id: WSDL.py,v 1.11 2005/02/21 20:16:15 warnes Exp $'",
"from version import __version__",
"import wstools",
"from Client import SOAPProxy, SOAPAddress",
"from Config import Config",
"import url... |
"""
GSIServer - Contributed by Ivan R. Judson <judson@mcs.anl.gov>
################################################################################
#
# SOAPpy - Cayce Ullman (cayce@actzero.com)
# Brian Matthews (blm@actzero.com)
# Gregory Warnes (Gregory.R.Warnes@Pfizer.com)
# Christopher Blunck (blunck@gst.com)
#
################################################################################
# Copyright (c) 2003, Pfizer
# Copyright (c) 2001, Cayce Ullman.
# Copyright (c) 2001, Brian Matthews.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of actzero, inc. nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
################################################################################
"""
from __future__ import nested_scopes
ident = '$Id: GSIServer.py,v 1.5 2005/02/15 16:32:22 warnes Exp $'
from version import __version__
#import xml.sax
import re
import socket
import sys
import SocketServer
from types import *
import BaseHTTPServer
# SOAPpy modules
from Parser import parseSOAPRPC
from Config import SOAPConfig
from Types import faultType, voidType, simplify
from NS import NS
from SOAPBuilder import buildSOAP
from Utilities import debugHeader, debugFooter
try: from M2Crypto import SSL
except: pass
#####
from Server import *
from pyGlobus.io import GSITCPSocketServer, ThreadingGSITCPSocketServer
from pyGlobus import ioc
def GSIConfig():
config = SOAPConfig()
config.channel_mode = ioc.GLOBUS_IO_SECURE_CHANNEL_MODE_GSI_WRAP
config.delegation_mode = ioc.GLOBUS_IO_SECURE_DELEGATION_MODE_FULL_PROXY
config.tcpAttr = None
config.authMethod = "_authorize"
return config
Config = GSIConfig()
class GSISOAPServer(GSITCPSocketServer, SOAPServerBase):
def __init__(self, addr = ('localhost', 8000),
RequestHandler = SOAPRequestHandler, log = 0,
encoding = 'UTF-8', config = Config, namespace = None):
# Test the encoding, raising an exception if it's not known
if encoding != None:
''.encode(encoding)
self.namespace = namespace
self.objmap = {}
self.funcmap = {}
self.encoding = encoding
self.config = config
self.log = log
self.allow_reuse_address= 1
GSITCPSocketServer.__init__(self, addr, RequestHandler,
self.config.channel_mode,
self.config.delegation_mode,
tcpAttr = self.config.tcpAttr)
def get_request(self):
sock, addr = GSITCPSocketServer.get_request(self)
return sock, addr
class ThreadingGSISOAPServer(ThreadingGSITCPSocketServer, SOAPServerBase):
def __init__(self, addr = ('localhost', 8000),
RequestHandler = SOAPRequestHandler, log = 0,
encoding = 'UTF-8', config = Config, namespace = None):
# Test the encoding, raising an exception if it's not known
if encoding != None:
''.encode(encoding)
self.namespace = namespace
self.objmap = {}
self.funcmap = {}
self.encoding = encoding
self.config = config
self.log = log
self.allow_reuse_address= 1
ThreadingGSITCPSocketServer.__init__(self, addr, RequestHandler,
self.config.channel_mode,
self.config.delegation_mode,
tcpAttr = self.config.tcpAttr)
def get_request(self):
sock, addr = ThreadingGSITCPSocketServer.get_request(self)
return sock, addr
| [
[
8,
0,
0.1573,
0.3077,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.3217,
0.007,
0,
0.66,
0.0435,
777,
0,
1,
0,
0,
777,
0,
0
],
[
14,
0,
0.3357,
0.007,
0,
0.66,... | [
"\"\"\"\nGSIServer - Contributed by Ivan R. Judson <judson@mcs.anl.gov>\n\n\n################################################################################\n#\n# SOAPpy - Cayce Ullman (cayce@actzero.com)\n# Brian Matthews (blm@actzero.com)",
"from __future__ import nested_scopes",
"ident = ... |
#!/usr/bin/env python
import string
import cgi
ident = '$Id: interop2html.py,v 1.1.1.1 2001/06/27 21:36:14 cullman Exp $'
lines = open('output.txt').readlines()
#preserve the tally
tally = lines[-6:]
#whack the tally from lines
lines = lines[:-6]
table={}
for line in lines:
if line[:3] == ' ' or line == '>\n' : continue
line = line[:-1] #delete end of line char
row = [line[:line.find(': ')], line[line.find(': ')+2:]] #split server name from rest of line
restofrow = row[1].split(' ',3) #break out method name, number, status code, status comment
if len(restofrow) > 3:
if restofrow[3].find('as expected') != -1:
restofrow[2] = restofrow[2] + ' (as expected)'
elif restofrow[3][:2] == '- ' :
restofrow[3] = restofrow[3][2:]
try: table[row[0]].append([restofrow[0],restofrow[2:]])
except KeyError: table[row[0]] = [[restofrow[0],restofrow[2:]]]
print "<html><body>"
print "<script>function popup(text) {"
print "text = '<html><head><title>Test Detail</title></head><body><p>' + text + '</p></body></html>';"
print "newWin=window.open('','win1','location=no,menubar=no,width=400,height=200');"
print "newWin.document.open();"
print "newWin.document.write(text);"
print "newWin.focus(); } </script>"
print "<br><table style='font-family: Arial; color: #cccccc'><tr><td colspan=2><font face=arial color=#cccccc><b>Summary</b></font></td></tr>"
for x in tally:
z = x[:-1].split(":",1)
print "<tr><td><font face=arial color=#cccccc>",z[0],"</font></td><td><font face=arial color=#cccccc>",z[1],"</font></td></tr>"
print "</table><br>"
c = 0
totalmethods = len(table[table.keys()[0]])
while c < totalmethods:
print "<br><table width='95%' style='font-family: Arial'>"
print "<tr><td width='27%' bgcolor='#cccccc'></td>"
cols = [c, c + 1, c + 2]
if c != 16:
cols += [c + 3]
for i in cols:
try: header = table[table.keys()[0]][i][0]
except: break
print "<td width ='17%' align='center' bgcolor='#cccccc'><b>",header,"</b></td>"
print "</tr>"
l = table.keys()
l.sort()
for key in l:
print "<tr><td bgcolor='#cccccc'>", key , "</td>"
for i in cols:
try: status = table[key][i][1][0]
except: break
if status.find("succeed") != -1:
bgcolor = "#339900"
status = "Pass"
elif status.find("expected") != -1:
bgcolor = "#FF9900"
hreftitle = table[key][i][1][1].replace("'","") # remove apostrophes from title properties
popuphtml = '"' + cgi.escape(cgi.escape(table[key][i][1][1]).replace("'","'").replace('"',""")) + '"'
status = "<a title='" + hreftitle + "' href='javascript:popup(" + popuphtml + ")'>Failed (expected)</a>"
else:
bgcolor = "#CC0000"
hreftitle = table[key][i][1][1].replace("'","") # remove apostrophes from title properties
popuphtml = '"' + cgi.escape(cgi.escape(table[key][i][1][1]).replace("'","'").replace('"',""")) + '"'
status = "<a title='" + hreftitle + "' href='javascript:popup(" + popuphtml + ")'>Failed</a>"
print "<td align='center' bgcolor=" , bgcolor , ">" , status , "</td>"
print "</tr>"
print "</table>"
c = c + len(cols)
print "</body></html>"
| [
[
1,
0,
0.0395,
0.0132,
0,
0.66,
0,
890,
0,
1,
0,
0,
890,
0,
0
],
[
1,
0,
0.0526,
0.0132,
0,
0.66,
0.0476,
934,
0,
1,
0,
0,
934,
0,
0
],
[
14,
0,
0.0789,
0.0132,
0,
... | [
"import string",
"import cgi",
"ident = '$Id: interop2html.py,v 1.1.1.1 2001/06/27 21:36:14 cullman Exp $'",
"lines = open('output.txt').readlines()",
"tally = lines[-6:]",
"lines = lines[:-6]",
"table={}",
"for line in lines:\n if line[:3] == ' ' or line == '>\\n' : continue\n line = line[:-1] #... |
"""
------------------------------------------------------------------
Author: Gregory R. Warnes <Gregory.R.Warnes@Pfizer.com>
Date: 2005-02-24
Version: 0.7.2
Copyright: (c) 2003-2005 Pfizer, Licensed to PSF under a Contributor Agreement
License: Licensed under the Apache License, Version 2.0 (the"License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in
writing, software distributed under the License is
distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See
the License for the specific language governing
permissions and limitations under the License.
------------------------------------------------------------------
"""
from distutils.core import setup
url="http://www.analytics.washington.edu/statcomp/projects/rzope/fpconst/"
import fpconst
setup(name="fpconst",
version=fpconst.__version__,
description="Utilities for handling IEEE 754 floating point special values",
author="Gregory Warnes",
author_email="Gregory.R.Warnes@Pfizer.com",
url = url,
long_description=fpconst.__doc__,
py_modules=['fpconst']
)
| [
[
8,
0,
0.2692,
0.5128,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.6154,
0.0256,
0,
0.66,
0.25,
152,
0,
1,
0,
0,
152,
0,
0
],
[
14,
0,
0.6667,
0.0256,
0,
0.66,... | [
"\"\"\"\n------------------------------------------------------------------\nAuthor: Gregory R. Warnes <Gregory.R.Warnes@Pfizer.com>\nDate: 2005-02-24\nVersion: 0.7.2\nCopyright: (c) 2003-2005 Pfizer, Licensed to PSF under a Contributor Agreement\nLicense: Licensed under the Apache License, Version 2.0 ... |
"""Utilities for handling IEEE 754 floating point special values
This python module implements constants and functions for working with
IEEE754 double-precision special values. It provides constants for
Not-a-Number (NaN), Positive Infinity (PosInf), and Negative Infinity
(NegInf), as well as functions to test for these values.
The code is implemented in pure python by taking advantage of the
'struct' standard module. Care has been taken to generate proper
results on both big-endian and little-endian machines. Some efficiency
could be gained by translating the core routines into C.
See <http://babbage.cs.qc.edu/courses/cs341/IEEE-754references.html>
for reference material on the IEEE 754 floating point standard.
Further information on this package is available at
<http://www.analytics.washington.edu/statcomp/projects/rzope/fpconst/>.
------------------------------------------------------------------
Author: Gregory R. Warnes <Gregory.R.Warnes@Pfizer.com>
Date: 2005-02-24
Version: 0.7.2
Copyright: (c) 2003-2005 Pfizer, Licensed to PSF under a Contributor Agreement
License: Licensed under the Apache License, Version 2.0 (the"License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in
writing, software distributed under the License is
distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
CONDITIONS OF ANY KIND, either express or implied. See
the License for the specific language governing
permissions and limitations under the License.
------------------------------------------------------------------
"""
__version__ = "0.7.2"
ident = "$Id: fpconst.py,v 1.16 2005/02/24 17:42:03 warnes Exp $"
import struct, operator
# check endianess
_big_endian = struct.pack('i',1)[0] != '\x01'
# and define appropriate constants
if(_big_endian):
NaN = struct.unpack('d', '\x7F\xF8\x00\x00\x00\x00\x00\x00')[0]
PosInf = struct.unpack('d', '\x7F\xF0\x00\x00\x00\x00\x00\x00')[0]
NegInf = -PosInf
else:
NaN = struct.unpack('d', '\x00\x00\x00\x00\x00\x00\xf8\xff')[0]
PosInf = struct.unpack('d', '\x00\x00\x00\x00\x00\x00\xf0\x7f')[0]
NegInf = -PosInf
def _double_as_bytes(dval):
"Use struct.unpack to decode a double precision float into eight bytes"
tmp = list(struct.unpack('8B',struct.pack('d', dval)))
if not _big_endian:
tmp.reverse()
return tmp
##
## Functions to extract components of the IEEE 754 floating point format
##
def _sign(dval):
"Extract the sign bit from a double-precision floating point value"
bb = _double_as_bytes(dval)
return bb[0] >> 7 & 0x01
def _exponent(dval):
"""Extract the exponentent bits from a double-precision floating
point value.
Note that for normalized values, the exponent bits have an offset
of 1023. As a consequence, the actual exponentent is obtained
by subtracting 1023 from the value returned by this function
"""
bb = _double_as_bytes(dval)
return (bb[0] << 4 | bb[1] >> 4) & 0x7ff
def _mantissa(dval):
"""Extract the _mantissa bits from a double-precision floating
point value."""
bb = _double_as_bytes(dval)
mantissa = bb[1] & 0x0f << 48
mantissa += bb[2] << 40
mantissa += bb[3] << 32
mantissa += bb[4]
return mantissa
def _zero_mantissa(dval):
"""Determine whether the mantissa bits of the given double are all
zero."""
bb = _double_as_bytes(dval)
return ((bb[1] & 0x0f) | reduce(operator.or_, bb[2:])) == 0
##
## Functions to test for IEEE 754 special values
##
def isNaN(value):
"Determine if the argument is a IEEE 754 NaN (Not a Number) value."
return (_exponent(value)==0x7ff and not _zero_mantissa(value))
def isInf(value):
"""Determine if the argument is an infinite IEEE 754 value (positive
or negative inifinity)"""
return (_exponent(value)==0x7ff and _zero_mantissa(value))
def isFinite(value):
"""Determine if the argument is an finite IEEE 754 value (i.e., is
not NaN, positive or negative inifinity)"""
return (_exponent(value)!=0x7ff)
def isPosInf(value):
"Determine if the argument is a IEEE 754 positive infinity value"
return (_sign(value)==0 and _exponent(value)==0x7ff and \
_zero_mantissa(value))
def isNegInf(value):
"Determine if the argument is a IEEE 754 negative infinity value"
return (_sign(value)==1 and _exponent(value)==0x7ff and \
_zero_mantissa(value))
##
## Functions to test public functions.
##
def test_isNaN():
assert( not isNaN(PosInf) )
assert( not isNaN(NegInf) )
assert( isNaN(NaN ) )
assert( not isNaN( 1.0) )
assert( not isNaN( -1.0) )
def test_isInf():
assert( isInf(PosInf) )
assert( isInf(NegInf) )
assert( not isInf(NaN ) )
assert( not isInf( 1.0) )
assert( not isInf( -1.0) )
def test_isFinite():
assert( not isFinite(PosInf) )
assert( not isFinite(NegInf) )
assert( not isFinite(NaN ) )
assert( isFinite( 1.0) )
assert( isFinite( -1.0) )
def test_isPosInf():
assert( isPosInf(PosInf) )
assert( not isPosInf(NegInf) )
assert( not isPosInf(NaN ) )
assert( not isPosInf( 1.0) )
assert( not isPosInf( -1.0) )
def test_isNegInf():
assert( not isNegInf(PosInf) )
assert( isNegInf(NegInf) )
assert( not isNegInf(NaN ) )
assert( not isNegInf( 1.0) )
assert( not isNegInf( -1.0) )
# overall test
def test():
test_isNaN()
test_isInf()
test_isFinite()
test_isPosInf()
test_isNegInf()
if __name__ == "__main__":
test()
| [
[
8,
0,
0.1067,
0.2079,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.2191,
0.0056,
0,
0.66,
0.0455,
162,
1,
0,
0,
0,
0,
3,
0
],
[
14,
0,
0.2247,
0.0056,
0,
0.66... | [
"\"\"\"Utilities for handling IEEE 754 floating point special values\n\nThis python module implements constants and functions for working with\nIEEE754 double-precision special values. It provides constants for\nNot-a-Number (NaN), Positive Infinity (PosInf), and Negative Infinity\n(NegInf), as well as functions t... |
from sys import stdin, stdout
#fin = stdin
fin = open('input.txt')
fout = stdout
#fout = open('output.txt', 'w')
s = list(fin.read().strip().split())
n = list(map(len, s))
dp = [[0] * (n[1] + 1) for i in range(n[0] + 1)]
fr = [[(0, 0)] * (n[1] + 1) for i in range(n[0] + 1)]
for i in range(n[0]):
for j in range(n[1]):
dp[i][j], fr[i][j] = dp[i - 1][j], (i - 1, j)
if dp[i][j] < dp[i][j - 1]:
dp[i][j], fr[i][j] = dp[i][j - 1], (i, j - 1)
if s[0][i] == s[1][j]:
dp[i][j], fr[i][j] = dp[i - 1][j - 1] + 1, (i - 1, j - 1)
ans = []
i, j = map(lambda x: x - 1, n)
while dp[i][j] != 0:
if s[0][i] == s[1][j]:
ans.append(s[0][i])
i, j = fr[i][j]
print(''.join(ans[::-1]), file=fout)
| [
[
1,
0,
0.0312,
0.0312,
0,
0.66,
0,
509,
0,
2,
0,
0,
509,
0,
0
],
[
14,
0,
0.125,
0.0312,
0,
0.66,
0.0909,
225,
3,
1,
0,
0,
693,
10,
1
],
[
14,
0,
0.1562,
0.0312,
0,
... | [
"from sys import stdin, stdout",
"fin = open('input.txt')",
"fout = stdout",
"s = list(fin.read().strip().split())",
"n = list(map(len, s))",
"dp = [[0] * (n[1] + 1) for i in range(n[0] + 1)]",
"fr = [[(0, 0)] * (n[1] + 1) for i in range(n[0] + 1)]",
"for i in range(n[0]):\n for j in range(n[1]):\n... |
from sys import stdin, stdout
fin = stdin
#fin = open('input.txt')
fout = stdout
#fout = open('output.txt', 'w')
n = int(fin.readline())
ans = [[] for i in range(n)]
for i in range(n):
for j in range(n):
ans[(i + j) % n].append(i * n + j + 1)
for i in range(n):
print(' '.join(map(str, ans[i])), ' , sum = ', sum(ans[i])) | [
[
1,
0,
0.0588,
0.0588,
0,
0.66,
0,
509,
0,
2,
0,
0,
509,
0,
0
],
[
14,
0,
0.1765,
0.0588,
0,
0.66,
0.1667,
225,
2,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.2941,
0.0588,
0,
... | [
"from sys import stdin, stdout",
"fin = stdin",
"fout = stdout",
"n = int(fin.readline())",
"ans = [[] for i in range(n)]",
"for i in range(n):\n\tfor j in range(n):\n\t\tans[(i + j) % n].append(i * n + j + 1)",
"\tfor j in range(n):\n\t\tans[(i + j) % n].append(i * n + j + 1)",
"\t\tans[(i + j) % n]... |
'''
Created on 21-03-2011
@author: maciek
'''
def formatString(format, **kwargs):
'''
'''
if not format: return ''
for arg in kwargs.keys():
format = format.replace("{" + arg + "}", "##" + arg + "##")
format = format.replace ("{", "{{")
format = format.replace("}", "}}")
for arg in kwargs.keys():
format = format.replace("##" + arg + "##", "{" + arg + "}")
res = format.format(**kwargs)
res = res.replace("{{", "{")
res = res.replace("}}", "}")
return res | [
[
8,
0,
0.1304,
0.2174,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
2,
0,
0.6739,
0.6957,
0,
0.66,
1,
798,
0,
2,
1,
0,
0,
0,
9
],
[
8,
1,
0.413,
0.087,
1,
0.76,
0,
... | [
"'''\nCreated on 21-03-2011\n\n@author: maciek\n'''",
"def formatString(format, **kwargs):\n '''\n '''\n if not format: return ''\n \n for arg in kwargs.keys():\n format = format.replace(\"{\" + arg + \"}\", \"##\" + arg + \"##\")\n format = format.replace (\"{\", \"{{\")",
" '''\n ... |
'''
Created on 21-03-2011
@author: maciek
'''
from IndexGenerator import IndexGenerator
from optparse import OptionParser
import os
import tempfile
import shutil
import logging
logging.basicConfig(level = logging.DEBUG)
parser = OptionParser()
parser.add_option('-n', '--app-name', action='store', dest='appName', help='aplication name')
parser.add_option('-u', '--release-urls', action='store', dest='releaseUrls', help='URLs of download files - as coma separated list of entrires')
parser.add_option('-d', '--destination-directory', action='store', dest='otaAppDir', help='Directory where OTA files are created')
parser.add_option('-v', '--version', action='store', dest='version', help='Version of the application')
parser.add_option('-r', '--releases', action='store', dest='releases', help='Release names of the application')
parser.add_option('-R', '--release-notes', action='store', dest='releaseNotes', help='Release notes of the application (in txt2tags format)')
parser.add_option('-D', '--description', action='store', dest='description', help='Description of the application (in txt2tags format)')
(options, args) = parser.parse_args()
if options.appName == None:
parser.error("Please specify the appName.")
elif options.releaseUrls == None:
parser.error("Please specify releaseUrls")
elif options.otaAppDir == None:
parser.error("Please specify destination directory")
elif options.version == None:
parser.error("Please specify version")
elif options.releases == None:
parser.error("Please specify releases")
elif options.releaseNotes == None:
parser.error("Please specify releaseNotes")
elif options.description == None:
parser.error("Please specify description")
appName = options.appName
releaseUrls = options.releaseUrls
otaAppDir = options.otaAppDir
version = options.version
releases = options.releases
releaseNotes = options.releaseNotes
description = options.description
def findIconFilename():
iconPath = "res/drawable-hdpi/icon.png"
if not os.path.exists(iconPath):
iconPath = "res/drawable-mdpi/icon.png"
if not os.path.exists(iconPath):
iconPath = "res/drawable-ldpi/icon.png"
if not os.path.exists(iconPath):
iconPath = "res/drawable/icon.png"
logging.debug("IconPath: "+iconPath)
return iconPath
def createOTApackage():
'''
crates all needed files in tmp dir
'''
releaseNotesContent = open(releaseNotes).read()
descriptionContent = open(description).read()
indexGenerator = IndexGenerator(appName, releaseUrls, releaseNotesContent, descriptionContent, version, releases)
index = indexGenerator.get();
tempIndexFile = tempfile.TemporaryFile()
tempIndexFile.write(index)
tempIndexFile.flush()
tempIndexFile.seek(0)
return tempIndexFile
tempIndexFile = createOTApackage()
if not os.path.isdir(otaAppDir):
logging.debug("creating dir: "+otaAppDir)
os.mkdir(otaAppDir)
else:
logging.warning("dir: "+otaAppDir+" exists")
indexFile = open(os.path.join(otaAppDir,"index.html"),'w')
shutil.copyfileobj(tempIndexFile, indexFile)
srcIconFileName = findIconFilename()
disIconFileName = os.path.join(otaAppDir,"Icon.png")
shutil.copy(srcIconFileName,disIconFileName)
| [
[
8,
0,
0.0341,
0.0568,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.0682,
0.0114,
0,
0.66,
0.0303,
933,
0,
1,
0,
0,
933,
0,
0
],
[
1,
0,
0.0795,
0.0114,
0,
0.66... | [
"'''\nCreated on 21-03-2011\n\n@author: maciek\n'''",
"from IndexGenerator import IndexGenerator",
"from optparse import OptionParser",
"import os",
"import tempfile",
"import shutil",
"import logging",
"logging.basicConfig(level = logging.DEBUG)",
"parser = OptionParser()",
"parser.add_option('-n... |
'''
Created on 21-03-2011
@author: maciek
'''
from formater import formatString
import os
class IndexGenerator(object):
'''
Generates Index.html for iOS app OTA distribution
'''
basePath = os.path.dirname(__file__)
templateFile = os.path.join(basePath,"templates/index.tmpl")
releaseUrls = ""
appName = ""
changeLog = ""
description = ""
version = ""
release = ""
def __init__(self,appName, releaseUrls, changeLog, description, version, releases):
'''
Constructor
'''
self.appName = appName
self.releaseUrls = releaseUrls
self.changeLog = changeLog
self.description = description
self.version = version
self.releases = releases
def get(self):
'''
returns index.html source code from template file
'''
urlList = self.releaseUrls.split(",")
releaseList = self.releases.split(",")
generatedHtml=""
count=0;
for release in releaseList:
generatedHtml += " <li>\n"
generatedHtml += " <h3><a href=\"javascript:load('" + urlList[count] + "')\">" + release + "</a></h3>\n"
generatedHtml += " </li>\n"
count += 1
template = open(self.templateFile).read()
index = formatString(template, downloads=generatedHtml,
changeLog=self.changeLog,
appName=self.appName,
description=self.description,
version = self.version);
return index | [
[
8,
0,
0.0526,
0.0877,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.1053,
0.0175,
0,
0.66,
0.3333,
11,
0,
1,
0,
0,
11,
0,
0
],
[
1,
0,
0.1228,
0.0175,
0,
0.66,
... | [
"'''\nCreated on 21-03-2011\n\n@author: maciek\n'''",
"from formater import formatString",
"import os",
"class IndexGenerator(object):\n '''\n Generates Index.html for iOS app OTA distribution\n '''\n basePath = os.path.dirname(__file__)\n templateFile = os.path.join(basePath,\"templates/index.... |
from math import sqrt, sin, cos, tan, pi
from numpy import *
# linear interp between key frames startKey and endKey, returns a list of frames
# includes startKey's time exclude endKey's time
def lerpt(startKey, endKey):
step = 1 / 30.0 #assuming 30 frames a second
#key format is [time, pose] i have the time now, so i just pull out the pose
startTime = startKey[0]
endTime = endKey[0]
startPose = startKey[1]
endPose = endKey[1]
interval = float(endTime) - float(startTime)
time = 0
frames = []
# i don't want a frame where time practically equals the endTime
while time < interval-step:
#frame format is [time, pose information]
#example [time, x, y, z, rotation]
frame = [startTime + time]
for i in range(0, len(startPose)):
frame.append(lerp(startPose[i], endPose[i], time / interval))
frames.append(frame)
time += step
return frames
def lerp(x, y, pct):
pct = min(1.0, max(0.0, pct))
return x * (1-pct) + y * pct
#frames are x, y, z, rot, time
def writeFrames(f, keyframes):
for i in range(0, len(keyframes)-1):
thisKey = keyframes[i]
nextKey = keyframes[i+1]
frames = lerpt(thisKey, nextKey)
for frame in frames:
for i in range(0, len(frame)-1):
f.write(str(frame[i]) + " ")
f.write(str(frame[len(frame)-1]))
f.write("\n")
#assume that characters are squares with sides of length .025
def stopGoCardinal(): #this is the up and down version
f = open("stopGo.patch", 'w')
f.write("4\n") #number of verts in the polygon
f.write("0.0 0.0 0.0\n") #polygon
f.write("1.0 0.0 0.0\n")
f.write("1.0 1.0 0.0\n")
f.write("0.0 1.0 0.0\n")
f.write("2\n") #number of characters
f.write("# 0 1 2 3\n") #entrance, start of interaction, end of interaction, exit
#write frames here
#define some key frames. list of lists [time, [x, y, z, rotation (in degrees)]]
kfsCharacter1 = [[0, [.5, 0, 0, 0]],
[1, [.5, .3, 0, 0]],
[2, [.5, .3, 0, 0]],
[3, [.5, 1, 0, 0,]]]
writeFrames(f, kfsCharacter1)
f.write("# 0 1 2 3\n") #intervals for second character
#write frames here
kfsCharacter2 = [[0, [1, .5, 0, 45]],
[1, [.7, .5, 0, 45]],
[2, [.3, .5, 0, 45]],
[3, [0, .5, 0, 45]]]
writeFrames(f, kfsCharacter2)
def rightTurn():
f = open("rightTurn.patch", 'w')
f.write("5\n") #number of verts in the polygon
f.write("0.0 0.0 0.0\n") #polygon
f.write("1.0 0.0 0.0\n")
f.write("1.0 1.0 0.0\n")
f.write("0.5 1.0 0.0\n")
f.write("0.0 0.5 0.0\n")
f.write("1\n") #number of characters
f.write("# 0 1 2 3\n") #entrance, start of interaction, end of interaction, exit
#write frames here
#define some key frames. list of lists [time, [x, y, z, rotation (in degrees)]]
kfsCharacter1 = [[0, [.5, 0, 0, 0]],
[1, [.5, .5, 0, 0]],
[2, [.5, .5, 0, -90]],
[3, [1, .5, 0, -90,]]]
writeFrames(f, kfsCharacter1)
def leftTurn():
f = open("leftTurn.patch", 'w')
f.write("4\n") #number of verts in the polygon
f.write("0.0 0.0 0.0\n") #polygon
f.write("1.0 0.0 0.0\n")
f.write("1.0 1.0 0.0\n")
f.write("0.0 1.0 0.0\n")
f.write("1\n") #number of characters
f.write("# 0 1 2 3\n") #entrance, start of interaction, end of interaction, exit
#write frames here
#define some key frames. list of lists [time, [x, y, z, rotation (in degrees)]]
kfsCharacter1 = [[0, [.5, 0, 0, 0]],
[1, [.5, .5, 0, 0]],
[2, [.5, .5, 0, 90]],
[3, [0, .5, 0, 90,]]]
writeFrames(f, kfsCharacter1)
def straightAhead():
f = open("straightAhead.patch", 'w')
f.write("4\n") #number of verts in the polygon
f.write(".25 0.0 0.0\n") #polygon
f.write(".75 0.0 0.0\n")
f.write(".75 1.0 0.0\n")
f.write(".25 1.0 0.0\n")
f.write("1\n") #number of characters
f.write("# 0 1.5 1.5 3\n") #entrance, start of interaction, end of interaction, exit
#write frames here
#define some key frames. list of lists [time, [x, y, z, rotation (in degrees)]]
kfsCharacter1 = [[0, [.5, 0, 0, 0]],
[3, [.5, 1, 0, 0]]]
writeFrames(f, kfsCharacter1)
def turnAround():
f = open("turnAround.patch", 'w')
f.write("4\n") #number of verts in the polygon
f.write("0.0 0.0 0.0\n") #polygon
f.write("1.0 0.0 0.0\n")
f.write("1.0 1.0 0.0\n")
f.write("0.0 1.0 0.0\n")
f.write("1\n") #number of characters
f.write("# 0 1 2 3\n") #entrance, start of interaction, end of interaction, exit
#write frames here
#define some key frames. list of lists [time, [x, y, z, rotation (in degrees)]]
kfsCharacter1 = [[0, [.5, 0, 0, 0]],
[1, [.5, .5, 0, 0]],
[2, [.5, .5, 0, -180]],
[3, [.5, 0, 0, -180,]]]
writeFrames(f, kfsCharacter1)
def diamondThing():
f = open("diamondThing.patch", 'w')
f.write("4\n") #number of verts in the polygon
f.write("0.0 0.0 0.0\n") #polygon
f.write("1.0 0.0 0.0\n")
f.write("1.0 1.0 0.0\n")
f.write("0.0 1.0 0.0\n")
f.write("2\n") #number of characters
f.write("# 0 1 2 3\n") #entrance, start of interaction, end of interaction, exit
#write frames here
#define some key frames. list of lists [time, [x, y, z, rotation (in degrees)]]
kfsCharacter1 = [[0, [.5, 0, 0, 0]],
[1, [.5, .4, 0, 0]],
[1.5, [.6, .4, 0, -45]],
[2, [.6, .5, 0, -90]],
[3, [1, .5, 0, -90,]]]
writeFrames(f, kfsCharacter1)
f.write("# 0 1 2 3\n") #entrance, start of interaction, end of interaction, exit
#write frames here
#define some key frames. list of lists [time, [x, y, z, rotation (in degrees)]]
kfsCharacter2 = [[0, [0, .5, 0, -90]],
[1, [.4, .5, 0, -90]],
[1.5, [.4, .6, 0, -45]],
[2, [.5, .6, 0, 0]],
[3, [.5, 1, 0, 0,]]]
writeFrames(f, kfsCharacter2)
def smallTest():
f = open("smalltest.patch", 'w')
f.write("4\n") #number of verts in the polygon
f.write("0.0 0.0 0.0\n") #polygon
f.write("1.0 0.0 0.0\n")
f.write("1.0 1.0 0.0\n")
f.write("0.0 1.0 0.0\n")
f.write("1\n") #number of characters
f.write("# 0 .33 .66 1\n") #entrance, start of interaction, end of interaction, exit
#write frames here
#define some key frames. list of lists [time, [x, y, z, rotation (in degrees)]]
kfsCharacter1 = [[0, [.5, 0, 0, 0]],
[.33, [.5, .3, 0, 0]],
[.66, [.5, .3, 0, 0]],
[1, [.5, 1, 0, 0,]]]
writeFrames(f, kfsCharacter1)
def smallTest2():
f = open("smalltest2.patch", 'w')
f.write("4\n") #number of verts in the polygon
f.write("0.0 0.0 0.0\n") #polygon
f.write("1.0 0.0 0.0\n")
f.write("1.0 1.0 0.0\n")
f.write("0.0 1.0 0.0\n")
f.write("1\n") #number of characters
f.write("# 0 .33 .66 1\n") #entrance, start of interaction, end of interaction, exit
#write frames here
#define some key frames. list of lists [time, [x, y, z, rotation (in degrees)]]
kfsCharacter1 = [[0, [.5, 0, 0, 0]],
[.33, [.5, .5, 0, 0]],
[.66, [.5, .5, 0, 0]],
[1, [1, .5, 0, 0,]]]
writeFrames(f, kfsCharacter1)
#assume that characters are squares with sides of length .025
def stopGoSmall(): #this is the up and down version
f = open("stopGoSmall.patch", 'w')
f.write("4\n") #number of verts in the polygon
f.write("0.0 0.0 0.0\n") #polygon
f.write("1.0 0.0 0.0\n")
f.write("1.0 1.0 0.0\n")
f.write("0.0 1.0 0.0\n")
f.write("2\n") #number of characters
f.write("# 0 .33 .66 1\n") #entrance, start of interaction, end of interaction, exit
#write frames here
#define some key frames. list of lists [time, [x, y, z, rotation (in degrees)]]
kfsCharacter1 = [[0, [.5, 0, 0, 0]],
[.33, [.5, .3, 0, 0]],
[.66, [.5, .3, 0, 0]],
[1, [.5, 1, 0, 0,]]]
writeFrames(f, kfsCharacter1)
f.write("# 0 .33 .66 1\n") #intervals for second character
#write frames here
kfsCharacter2 = [[0, [1, .5, 0, 45]],
[.33, [.7, .5, 0, 45]],
[.66, [.3, .5, 0, 45]],
[1, [0, .5, 0, 45]]]
writeFrames(f, kfsCharacter2)
leftTurn()
#stopGoSmall()
#diamondThing()
#turnAround()
#straightAhead()
#rightTurn()
#stopGoCardinal()
#stopGo()
| [
[
1,
0,
0.0038,
0.0038,
0,
0.66,
0,
526,
0,
5,
0,
0,
526,
0,
0
],
[
1,
0,
0.0077,
0.0038,
0,
0.66,
0.0714,
954,
0,
1,
0,
0,
954,
0,
0
],
[
2,
0,
0.0728,
0.1034,
0,
... | [
"from math import sqrt, sin, cos, tan, pi",
"from numpy import *",
"def lerpt(startKey, endKey):\n step = 1 / 30.0 #assuming 30 frames a second\n\n #key format is [time, pose] i have the time now, so i just pull out the pose\n startTime = startKey[0]\n endTime = endKey[0]\n\n startPose = startKey... |
#!/usr/bin/python
from random import uniform
def find_xIndex(x, array):
i = 0
for value in array:
if x < value:
return i
i += 1
return 0
func = lambda x:x**2.
weight = lambda x:x**1.
x = [.001*(i+1.) for i in range(1000)]
# create the weight prob. boundary and name as norm_w
weightList = [weight(i) for i in x]
sum_w = sum(weightList)
norm_w = []
t_sum = 0.0
for i in weightList:
t_sum += i/sum_w
norm_w.append(t_sum)
result = 0.0
result_w = 0.0
N_iter = 10000
# start calculate average
for i in range(N_iter):
ran = uniform(0,1)
x_index = find_xIndex(ran, norm_w)
result += func(x[x_index])/weight(x[x_index])
result_w += 1/weight(x[x_index])
# normalized
avg = result/float(N_iter)
print avg
| [
[
1,
0,
0.0541,
0.027,
0,
0.66,
0,
715,
0,
1,
0,
0,
715,
0,
0
],
[
2,
0,
0.1622,
0.1892,
0,
0.66,
0.0667,
593,
0,
2,
1,
0,
0,
0,
0
],
[
14,
1,
0.1081,
0.027,
1,
0.9... | [
"from random import uniform",
"def find_xIndex(x, array):\n\ti = 0\n\tfor value in array:\n\t\tif x < value:\n\t\t\treturn i\n\t\ti += 1\n\treturn 0",
"\ti = 0",
"\tfor value in array:\n\t\tif x < value:\n\t\t\treturn i\n\t\ti += 1",
"\t\tif x < value:\n\t\t\treturn i",
"\t\t\treturn i",
"\treturn 0",
... |
#!/usr/bin/python
import time
from random import uniform
# position : [up, down, left, right]
moveList = {
1 : (4, 0, 0, 2),
2 : (5, 0, 1, 3),
3 : (6, 0, 2, 0),
4 : (7, 1, 0, 5),
5 : (8, 2, 4, 6),
6 : (9, 3, 5, 0),
7 : (0, 4, 0, 8),
8 : (0, 5, 7, 9),
9 : (0, 6, 8, 0)
}
Nhits = {
1 : 0,
2 : 0,
3 : 0,
4 : 0,
5 : 0,
6 : 0,
7 : 0,
8 : 0,
9 : 0
}
## create record
#iter_time = []
#record = {}
#tmp = []
#for i in Nhits.keys():
# record[i] = tmp
N = 4000
startPosition = 9
# start simulate
Nhits[startPosition] += 1
position = startPosition
#iter_time.append(1)
#for i in Nhits.keys():
# record[i].append(Nhits[i])
for i in range(1, N):
direction = int(uniform(0, 4)) # 0:up, 1:down, 2:left, 3:right
nextPosition = moveList[position][direction]
if nextPosition != 0:
position = nextPosition
Nhits[position] += 1
# iter_time.append(i)
# for j in Nhits.keys():
# record[j].append(Nhits[j])
print position, Nhits.values()
#time.sleep(.005)
# end
# convert array format for gnuplot
plotarray = []
for i in moveList.keys():
print i, '\t', float(Nhits[i])/float(N)
plotarray.append([i, float(Nhits[i])/float(N)])
import Gnuplot as gp
plotItem = gp.PlotItems.Data(plotarray, with_='boxes')
g = gp.Gnuplot()
g('set yrange [0:1]')
g.xlabel('Position')
g.ylabel('Probability')
g.plot(plotItem)
#site_hist = []
#for site in Nhits.keys():
# tmp = []
# for i in range(len(record[site])):
# tmp.append([i, record[site][i]])
# site_hist.append(tmp)
#
#
#plotItems = []
#for i in Nhits.keys():
# plotItems.append(gp.PlotItems.Data(site_hist[i-1]))
#
#g_iter = gp.Gnuplot()
#g_iter.xlabel('Move times')
#g_iter.ylabel('Count')
#for i in plotItems:
# g_iter.replot(i)
raw_input()
| [
[
1,
0,
0.0206,
0.0103,
0,
0.66,
0,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.0309,
0.0103,
0,
0.66,
0.0588,
715,
0,
1,
0,
0,
715,
0,
0
],
[
14,
0,
0.1031,
0.1134,
0,
... | [
"import time",
"from random import uniform",
"moveList = {\n1 : (4, 0, 0, 2),\n2 : (5, 0, 1, 3),\n3 : (6, 0, 2, 0),\n4 : (7, 1, 0, 5),\n5 : (8, 2, 4, 6),\n6 : (9, 3, 5, 0),\n7 : (0, 4, 0, 8),",
"Nhits = {\n1 : 0,\n2 : 0,\n3 : 0,\n4 : 0,\n5 : 0,\n6 : 0,\n7 : 0,",
"N = 4000",
"startPosition = 9",
"positio... |
#!/usr/bin/python
import Gnuplot
import math
from random import uniform
def markov_throw(delta, x, y):
return x + uniform(-delta, delta), y + uniform(-delta, delta)
def direct_throw(start, end):
return uniform(start, end), uniform(start, end)
def IsInSquare(x, y):
if abs(x) < 1. and abs(y) < 1.:
return True
else:
return False
def IsInCircle(x, y):
if x**2 + y**2 < 1.:
return True
else:
return False
def CalErr(summ, sqrSum, N):
avg = summ/float(N)
st_err = (sqrSum/float(N)-avg**2)**1/2
return st_err
def markov_pi(delta, N):
x, y = 1., 1. # initial position
N_hits = 0
record = []
sumPi = 0.0 #sum(f)
sumSqrPi = 0.0 #sum(f^2)
for i in range(1,N):
tx, ty = markov_throw(delta, x, y)
if IsInSquare(tx, ty):
x, y = tx, ty
if IsInCircle(x, y):
N_hits += 1
c_pi = N_hits * 4. / float(i)
sumPi += c_pi
sumSqrPi += c_pi**2
if i > 2:
err = CalErr(sumPi, sumSqrPi, i)
else:
err = 0.0
if i % 100 == 1:
record.append([i, c_pi, err])
return record
if __name__== "__main__":
#data = markov_pi(.3, 10001)
#item_pi = Gnuplot.PlotItems.Data(data, cols=(0,1), title = 'pi', with_='lp pt 6')
#item_err = Gnuplot.PlotItems.Data(data, cols=(0,2), title = 'err', with_='lp pt 6')
#g1 = Gnuplot.Gnuplot()
#g1('set multiplot title "Markov_pi" layout 2,1')
#g1.plot(item_pi)
#g1.plot(item_err)
N = 10001 # iter times
delta_value = [0.01, 0.1, 0.3, 0.5, 0.99]
pi_items = []
err_items = []
for i in delta_value:
calc = markov_pi(i, N)
pi_items.append(Gnuplot.PlotItems.Data(calc, cols=(0, 1), title='pi '+str(i)))
err_items.append(Gnuplot.PlotItems.Data(calc, cols=(0, 2), title='err '+str(i)))
g_pi = Gnuplot.Gnuplot()
g_pi.xlabel('N')
g_pi.ylabel('pi')
g_pi('set yrange [0:4.7]')
g_err = Gnuplot.Gnuplot()
g_err('set logscale xy')
g_err.xlabel('N')
g_err.ylabel('err')
for i in range(len(delta_value)):
g_pi.replot(pi_items[i])
g_err.replot(err_items[i])
raw_input('Press Enter to exit')
| [
[
1,
0,
0.0202,
0.0101,
0,
0.66,
0,
580,
0,
1,
0,
0,
580,
0,
0
],
[
1,
0,
0.0303,
0.0101,
0,
0.66,
0.1111,
526,
0,
1,
0,
0,
526,
0,
0
],
[
1,
0,
0.0404,
0.0101,
0,
... | [
"import Gnuplot",
"import math",
"from random import uniform",
"def markov_throw(delta, x, y):\n\treturn\tx + uniform(-delta, delta), y + uniform(-delta, delta)",
"\treturn\tx + uniform(-delta, delta), y + uniform(-delta, delta)",
"def direct_throw(start, end):\n\treturn uniform(start, end), uniform(start... |
from random import uniform
def cont_integral(a, b, N):
summ = 0.0
summ_n = 0.0
for i in range(N):
x = uniform(0, 1)**(1./(a+1.))
area = x**(b-a)
summ += area
return summ/float(N)
a = 2.
b = 3.
N = 1000
print cont_integral(a, b, N)
| [
[
1,
0,
0.0625,
0.0625,
0,
0.66,
0,
715,
0,
1,
0,
0,
715,
0,
0
],
[
2,
0,
0.4062,
0.5,
0,
0.66,
0.2,
893,
0,
3,
1,
0,
0,
0,
3
],
[
14,
1,
0.25,
0.0625,
1,
0.05,
... | [
"from random import uniform",
"def cont_integral(a, b, N):\n\tsumm = 0.0\n\tsumm_n = 0.0\n\tfor i in range(N):\n\t\tx = uniform(0, 1)**(1./(a+1.))\n\t\tarea = x**(b-a)\n\t\tsumm += area\n\treturn summ/float(N)",
"\tsumm = 0.0",
"\tsumm_n = 0.0",
"\tfor i in range(N):\n\t\tx = uniform(0, 1)**(1./(a+1.))\n\t\... |
from random import uniform
class ThrowMethod:
def Throw(self):
return 0.
class DirectPyThrow(ThrowMethod):
def __init__(self, start = 0., end = 1.):
self.start = start
self.end = end
def Throw(self):
return uniform(self.start, self.end), uniform(self.start, self.end)
class DirectNaiveThrow(ThrowMethod):
def __init__(self, seed = 43289, start = 0., end = 1.):
self.start = start
self.lens = end - start
self.idum = seed
self.m = 134456
self.n = 8121
self.k = 28411
def Throw(self):
self.idum = (self.idum*self.n + self.k) % self.m
return self.start + self.lens*float(self.idum)/float(self.m), self.start + self.lens*float(self.idum)/float(self.m)
class MarkovThrow(ThrowMethod):
def __init__(self, delta = .1, position = (1., 1.)):
self.delta = delta
self.x = position[0]
self.y = position[1]
def Throw(self):
tmp_x = self.x + uniform(-self.delta, self.delta)
tmp_y = self.y + uniform(-self.delta, self.delta)
if (abs(tmp_x) < 1.) and (abs(tmp_y) < 1.):
self.x = tmp_x
self.y = tmp_y
return self.x, self.y
##############################################################################
class Record:
def __init__(self):
self.hist = []
def Clear(self):
self.hist = []
def Add(self, value):
self.hist.append(value)
def GetRecord(self):
return self.hist
##############################################################################
class MonteCarlo_pi:
def __init__(self):
self.hits = 0
self.rejects = 0
self.x = 0.0
self.y = 0.0
def SetThrowMethod(self, throwMethod):
self.throwMethod = throwMethod
def Throw(self):
self.x, self.y = self.throwMethod.Throw()
def IsInCircleRange(self):
return self.x**2. + self.y**2. < 1.
def HitNotify(self):
self.record.Add(1)
def RejectNotify(self):
self.record.Add(0)
def SetRecord(self, record):
self.record = record
def GetRecord(self):
return self.record.GetRecord()
def Simulate(self, N):
self.record.Clear()
for i in range(N):
self.Throw()
if self.IsInCircleRange():
self.HitNotify()
else:
self.RejectNotify()
class MonteCarlo_integral:
def __init__(self):
self.prob = 0.
self.obser = 0.
self.diff = 0.
def SetFunction(self, prob, obser):
self.prob = prob
self.obser = obser
self.diff = self.obser - self.prob
def SetRecord(self, record):
self.record = record
def GetRecord(self):
return self.record.GetRecord()
def Throw(self):
self.x = uniform(0, 1)**(1./(self.prob + 1.))
def Simulate(self, N):
self.record.Clear()
for i in range(N):
self.Throw()
self.record.Add(self.x**self.diff)
##############################################################################
def Binning(base = 2, record = []):
#binning 2N array to N array
tmp_sum = 0.0
tmp_record = []
count = 0
for i in record:
if count < base:
tmp_sum += i
else:
tmp_record.append(tmp_sum/float(count))
count = 0
tmp_sum = i
count += 1
tmp_record.append(tmp_sum/float(count))
return tmp_record
def CalAvgStderr(record):
#calculate average stardard err
sumAvg = 0.0
sumSquar = 0.0
count = 0
for i in record:
count += 1
sumAvg += i
sumSquar += i**2.
avgStderr = ((sumSquar/float(count)-(sumAvg/float(count))**2.)/float(count))**(1./2.)
return avgStderr
def CalStderrWithBinning(base, record):
lens = 0
avgStderrBinning = []
numsBinning = []
lens = len(record)
if (lens > base):
numsBinning.append(lens)
avgStderrBinning.append(CalAvgStderr(record))
while(lens > base):
record = Binning(base, record)
avgStderrBinning.append(CalAvgStderr(record))
lens = len(record)
numsBinning.append(lens)
return numsBinning, avgStderrBinning
if __name__== '__main__':
baseNum = 2
N = baseNum**15
delta = .3
startPosition = (1., 1.)
p = MonteCarlo_pi()
#throwMethod = DirectPyThrow(0., 1.)
#throwMethod = DirectNaiveThrow(12345, 0., 1.)
throwMethod = MarkovThrow(delta, startPosition)
p.SetThrowMethod(throwMethod)
p.SetRecord(Record())
p.Simulate(N)
numBinning, errBinning = CalStderrWithBinning(baseNum, p.GetRecord())
for i in range(len(numBinning)):
print numBinning[i], errBinning[i]
import Gnuplot
g = Gnuplot.Gnuplot()
g.plot(errBinning)
raw_input()
############################################################################
#N = 1000
#prob = 2.
#obser = 3.
#c = MonteCarlo_integral()
#c.SetRecord(Record())
#c.SetFunction(prob, obser)
#c.Simulate(N)
#print sum(c.GetRecord())/float(N)
| [
[
1,
0,
0.0049,
0.0049,
0,
0.66,
0,
715,
0,
1,
0,
0,
715,
0,
0
],
[
3,
0,
0.0196,
0.0147,
0,
0.66,
0.0909,
337,
0,
1,
0,
0,
0,
0,
0
],
[
2,
1,
0.0221,
0.0098,
1,
0.... | [
"from random import uniform",
"class ThrowMethod:\n\tdef Throw(self):\n\t\treturn 0.",
"\tdef Throw(self):\n\t\treturn 0.",
"\t\treturn 0.",
"class DirectPyThrow(ThrowMethod):\n\tdef __init__(self, start = 0., end = 1.):\n\t\tself.start = start\n\t\tself.end = end\n\n\tdef Throw(self):\n\t\treturn uniform(s... |
import numpy as np
from random import uniform
class record(object):
def __init__(self):
self.summ = 0. # summation f
self.sumSquare = 0. # summation f^2
self.count = 0
def Clear(self):
self.summ = 0.
self.sumSquare = 0.
self.count = 0
def Add(self, value):
self.count += 1
self.summ += value
self.sumSquare += value*value
def GetAvg(self):
return self.summ/float(self.count) # return <f>
def GetAvgSquare(self):
return self.sumSquare/float(self.count) # return <f^2>
# end of record
SPINUP = 1. # define Spin up = 1.
SPINDOWN = -1. # define Spin down = -1.
class twoD_IsingModel(object):
def __init__(self, dim = (1, 1), k = 1., J = 1.):
self.energy = 0. # total energy of lattice
self.sumSpins = 0. # total magnetic field of lattice
self.k = k # Bolzmann constant
self.J = J # spin energy
self.nx = dim[0]
self.ny = dim[1]
self.lattice = np.zeros((self.nx, self.ny)) # create lattice
self.area = float(self.nx * self.ny)
self.E_Record = record() # record energy of lattice
self.sumSpins_Record = record() # record magnetism of lattice
def SetTemperatureField(self, J = 1., T = 10., H = 0.):
self.H = H # external magnetic field
self.T = T # temperature
self.beta = 1./self.T/self.k
def Thermalize(self):
for i in xrange(self.nx):
for j in xrange(self.ny):
self.lattice[i,j] = SPINUP if int(uniform(0, 2))==1 else SPINDOWN
def CalcEnergy(self):
tmpE = 0.
for i in xrange(self.nx):
for j in xrange(self.ny):
tmpE += -self.J \
* ( self.lattice[i,j]*self.lattice[i,j-1] \
+ self.lattice[i,j]*self.lattice[i-1,j] ) \
- self.H*self.lattice[i,j]
self.energy = tmpE
def CalcSumSpins(self):
self.sumSpins = self.lattice.sum()
def FlipSpin(self, ix, iy):
self.lattice[ix,iy] = SPINDOWN if self.lattice[ix,iy] == SPINUP else SPINUP
def GetNeighborEnergy(self, ix, iy):
f_ix = (ix+1) % self.nx
b_ix = ix-1
f_iy = (iy+1) % self.ny
b_iy = iy-1
E = -( self.lattice[f_ix,iy] + self.lattice[b_ix,iy] \
+ self.lattice[ix,f_iy] + self.lattice[ix,b_iy] ) \
* self.lattice[ix,iy] * self.J \
- self.H * self.lattice[ix,iy]
return E
def Prob(self, deltaE):
if np.e**(-deltaE*self.beta) > uniform(0, 1):
return True
else:
return False
def LocalUpdate(self, x, y):
oldE = self.GetNeighborEnergy(x, y)
oldSpin = self.lattice[x,y]
self.FlipSpin(x, y)
newE = self.GetNeighborEnergy(x, y)
newSpin = self.lattice[x,y]
deltaE = newE - oldE
deltaSpin = newSpin - oldSpin
if (deltaE <= 0) or self.Prob(deltaE):
self.energy += deltaE
self.sumSpins += deltaSpin
else:
self.FlipSpin(x, y)
def Start(self, N, thermalStep):
self.E_Record.Clear()
self.sumSpins_Record.Clear()
self.CalcEnergy()
self.CalcSumSpins()
for i in xrange(N):
x = int(uniform(0, self.nx))
y = int(uniform(0, self.ny))
self.LocalUpdate(x, y)
if i > thermalStep:
self.sumSpins_Record.Add(self.GetSumSpins())
self.E_Record.Add(self.GetEnergy())
def GetEnergy(self):
return self.energy
def GetSumSpins(self):
return self.sumSpins
def GetAvgEnergy(self):
return self.E_Record.GetAvg()/self.area
def GetAvgMagnetism(self):
return self.sumSpins_Record.GetAvg()/self.area
def GetAvgSquarMagnetism(self):
return self.sumSpins_Record.GetAvgSquare()/self.area**2.
def GetSpecificHeat(self):
return ( self.E_Record.GetAvgSquare()/self.area**2. \
- (self.E_Record.GetAvg()/self.area)**2. ) \
* self.beta/self.T
def GetMagneticSusceptibility(self):
return ( self.sumSpins_Record.GetAvgSquare()/self.area**2. \
- (self.sumSpins_Record.GetAvg()/self.area)**2. ) \
* self.beta
# end of twoD_IsingModel
def SaveData(savePath="", data=[], labels=""):
fp = open(savePath, 'w')
fp.write(labels + '\n')
for column in data:
for rowItem in column:
fp.write(str(rowItem)+'\t')
fp.write('\n')
fp.close()
# end of SaveData
def LoopT_IsingModel( model, TList ):
avgE_List = [] #<E>
avgM_List = [] #<M>
avgSquarM_List = [] # <M^2>
MS_List = [] # Magnetic suscep
SH_List = [] # Specific heat
model.Thermalize()
for i in TList:
print "Temperature", i
model.SetTemperatureField( T = i, H = 0. )
model.Start( iterN, thermalStep )
avgE_List.append( model.GetAvgEnergy() )
avgM_List.append( model.GetAvgMagnetism() )
avgSquarM_List.append( model.GetAvgSquarMagnetism() )
MS_List.append( model.GetMagneticSusceptibility() )
SH_List.append( model.GetSpecificHeat() )
dataLabel = 'temperature\tM_Square\tSuscep\tSpecificHeat\t'
return dataLabel, \
[ temperatureList, \
avgSquarM_List, \
MS_List, \
SH_List \
]
if __name__ == '__main__':
import time
L = 20 # lattice size
dim = ( L, L ) # dim of lattice is L*L
iterNumPerSite = 10
iterN = L**2 * iterNumPerSite # iter times
thermalStep = int( iterN * 0.3 ) # reach thermalize after thermalStep
temperatureList = np.linspace( 5., 1., 41 ) # cooling down
tic = time.time()
model = twoD_IsingModel( dim )
dataLabel, tmpData = LoopT_IsingModel( model, temperatureList )
toc = time.time()
print "Time ", toc-tic
allData = np.array( tmpData ).transpose()
savePath = "2D_IsingModelResult.txt"
SaveData( savePath = savePath, data = allData, labels = dataLabel )
## reshape data for gnuplot
#avgEList = []
#avgMList = []
#avgSquarMList = []
#SHList = []
#MSList = []
#for i in range(len(temperatureList)):
# avgEList.append([temperatureList[i], avgE_List[i]])
# avgMList.append([temperatureList[i], avgM_List[i]])
# avgSquarMList.append([temperatureList[i], avgSquarM_List[i]])
# SHList.append([temperatureList[i], SH_List[i]])
# MSList.append([temperatureList[i], MS_List[i]])
#import Gnuplot
#g = Gnuplot.Gnuplot()
#g.xlabel("Temperature (K)")
#g.ylabel("<E>")
#g.plot(avgEList)
#g2 = Gnuplot.Gnuplot()
#g2.xlabel("Temperature (K)")
#g2.ylabel("<M>")
#g2.plot(avgMList)
#g3 = Gnuplot.Gnuplot()
#g3.xlabel("Temperature (K)")
#g3.ylabel("Cv")
#g3.plot(SHList)
#g4 = Gnuplot.Gnuplot()
#g4.xlabel("Temperature (K)")
#g4.ylabel("X")
#g4.plot(MSList)
#raw_input()
| [
[
1,
0,
0.0041,
0.0041,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.0082,
0.0041,
0,
0.66,
0.125,
715,
0,
1,
0,
0,
715,
0,
0
],
[
3,
0,
0.0615,
0.0861,
0,
0... | [
"import numpy as np",
"from random import uniform",
"class record(object):\n\tdef __init__(self):\n\t\tself.summ = 0.\t\t# summation f\n\t\tself.sumSquare = 0.\t# summation f^2\n\t\tself.count = 0\n\n\tdef Clear(self):\n\t\tself.summ = 0.",
"\tdef __init__(self):\n\t\tself.summ = 0.\t\t# summation f\n\t\tself... |
import csv
import numpy as np
a = csv.reader(open('test.txt', 'r'), delimiter='\t')
a.next() # skip a column
data = []
for row in a:
data.append(row[:-1])
print np.array(data, dtype='float')
| [
[
1,
0,
0.1,
0.1,
0,
0.66,
0,
312,
0,
1,
0,
0,
312,
0,
0
],
[
1,
0,
0.2,
0.1,
0,
0.66,
0.1667,
954,
0,
1,
0,
0,
954,
0,
0
],
[
14,
0,
0.4,
0.1,
0,
0.66,
0.3333,... | [
"import csv",
"import numpy as np",
"a = csv.reader(open('test.txt', 'r'), delimiter='\\t')",
"a.next()\t# skip a column",
"data = []",
"for row in a:\n\tdata.append(row[:-1])",
"\tdata.append(row[:-1])",
"print(np.array(data, dtype='float'))"
] |
import numpy as np
from scipy.constants import codata
from random import uniform
class OneD_Ising:
def __init__(self, n = 0, J = 1., T = 0., B = 0.):
self.up = 1.
self.down = -1.
self.J = J
self.n = n
self.B = B
self.chain = np.zeros(self.n)
self.currH = 0.
self.sumH = 0.
self.beta = 1/float(T)
def Thermalize(self):
for i in range(self.n):
self.chain[i] = self.up if int(round(uniform(0, 1))) else self.down
self.currH = self.GetEnergy()
def GetEnergy(self):
H = 0.
for i in range(self.n):
# periodic boundary conditions
H += -self.J*self.chain[i]*self.chain[i-1] - self.B*self.chain[i]
return H
def FlipSpin(self, n):
self.chain[n] = self.up if self.chain[n] == self.down else self.down
def LocalUpdate(self):
tmpH = 0.
n = int(round(uniform(0, 1)*(self.n-1)))
self.FlipSpin(n)
tmpH = self.GetEnergy()
if tmpH > self.currH:
if uniform(0, 1) > np.e**((self.currH-tmpH)*self.beta):
self.FlipSpin(n) #undo
else:
self.currH = tmpH
else:
self.currH = tmpH
self.sumH += self.currH
def Simulate(self, N):
self.currH = 0.
self.sumH = 0.
self.Thermalize()
for i in range(N):
self.LocalUpdate()
print self.chain
print self.currH
print self.sumH/float(N)
T = [float(i+1)*10. for i in range(10)]
N = 10000
Ts = []
Hs = []
for i in T:
t = OneD_Ising(20, 1., i, 1.)
t.Simulate(N)
Ts.append(i)
Hs.append(t.sumH/float(N))
import Gnuplot
g = Gnuplot.Gnuplot()
g.plot(Hs)
raw_input()
| [
[
1,
0,
0.0145,
0.0145,
0,
0.66,
0,
954,
0,
1,
0,
0,
954,
0,
0
],
[
1,
0,
0.029,
0.0145,
0,
0.66,
0.0833,
19,
0,
1,
0,
0,
19,
0,
0
],
[
1,
0,
0.0435,
0.0145,
0,
0.6... | [
"import numpy as np",
"from scipy.constants import codata",
"from random import uniform",
"class OneD_Ising:\n\tdef __init__(self, n = 0, J = 1., T = 0., B = 0.):\n\t\tself.up = 1.\n\t\tself.down = -1.\n\t\tself.J = J\n\t\tself.n = n\n\t\tself.B = B\n\t\tself.chain = np.zeros(self.n)",
"\tdef __init__(self,... |
from matplotlib.pylab import *
import numpy as np
t = np.arange(0, 10, 1)
y1 = np.e**(-t/2.)
y2 = np.e**(-t/5.)
ion()
#title("This is test title")
#xlabel("Hello", fontsize=28)
#ylabel("$\Delta$R/R", fontsize=28, weight='bold')
#grid(True)
"""
marker
marker +, o, <, >, ^, ., s, v, x ...etc.
ms markersize
mew markeredgewidth
mec markeredgecolor
mfc markerfillercolor
line
c color
ls linestyle -, --, -., :
lw linewidth
"""
"""
b blue
g green
r red
w white
k black
y yellow
c cyan
m magenta
(0.18, 0.31, 0.31) an RGB tuple
"""
#plot(t, y1, 'b-',marker='o', ms=10, mew=3, mfc='w', mec='b', lw=3, ls='--')
X = 2*rand(20, 20)
#imshow(X, vmax = 3, vmin = 0)
anorm = normalize(1, 2)
imshow(X, anorm)
xlabel("Time delay (ps)")
ylabel("$\Delta$ R/R")
jet()
cbar = colorbar(orientation='horizontal') # vertical
cbar.set_label("Signals")
#gray()
show()
raw_input()
| [
[
1,
0,
0.02,
0.02,
0,
0.66,
0,
607,
0,
1,
0,
0,
607,
0,
0
],
[
1,
0,
0.04,
0.02,
0,
0.66,
0.0588,
954,
0,
1,
0,
0,
954,
0,
0
],
[
14,
0,
0.08,
0.02,
0,
0.66,
0... | [
"from matplotlib.pylab import *",
"import numpy as np",
"t = np.arange(0, 10, 1)",
"y1 = np.e**(-t/2.)",
"y2 = np.e**(-t/5.)",
"ion()",
"\"\"\"\nmarker\n\tmarker\t+, o, <, >, ^, ., s, v, x ...etc.\nms\tmarkersize\nmew\tmarkeredgewidth\nmec\tmarkeredgecolor\nmfc\tmarkerfillercolor",
"\"\"\"\nb\tblue\ng... |
from pylab import *
from scipy import optimize
from numpy import *
class Parameter:
def __init__(self, value):
self.value = value
def set(self, value):
self.value = value
def __call__(self):
return self.value
def fit(function, parameters, y, x = None):
def f(params):
return y - function(x)
if x is None: x = arange(y.shape[0])
p = [param() for param in parameters]
return optimize.leastsq(f, p)
# giving initial parameters
mu = Parameter(7)
sigma = Parameter(3)
height = Parameter(5)
p = [mu, sigma, height]
# define your function:
def f(x): return height() * exp(-((x-mu())/sigma())**2)
# create test data
x0 = linspace(0., 20., 100)
data = f(x0)
# fit! (given that data is an array with the data to fit)
fitparas, sucess = fit(f, [mu, sigma, height], data, x0)
mu = Parameter(fitparas[0])
sigma = Parameter(fitparas[1])
height = Parameter(fitparas[2])
plot(x0, data, "bo", x0, f(x0), "r-")
show()
| [
[
1,
0,
0.0238,
0.0238,
0,
0.66,
0,
735,
0,
1,
0,
0,
735,
0,
0
],
[
1,
0,
0.0476,
0.0238,
0,
0.66,
0.0588,
265,
0,
1,
0,
0,
265,
0,
0
],
[
1,
0,
0.0714,
0.0238,
0,
... | [
"from pylab import *",
"from scipy import optimize",
"from numpy import *",
"class Parameter:\n def __init__(self, value):\n self.value = value\n\n def set(self, value):\n self.value = value\n\n def __call__(self):",
" def __init__(self, value):\n self.value = va... |
from pylab import *
from scipy import *
from scipy import optimize
# if you experience problem "optimize not found", try to uncomment the following line. The problem is present at least at Ubuntu Lucid python scipy package
# from scipy import optimize
# Generate data points with noise
num_points = 500
Tx = linspace(0., 200., num_points)
Ty = Tx
tX = 1.6*cos(2*pi*(Tx/29.-1.32/360.)) +1.4*((0.5-rand(num_points))*exp(2*rand(num_points)**2))
# Fit the first set
fitfunc = lambda p, x: p[0]*cos(2*pi*(x/p[1]+p[2]/360.)) # Target function
errfunc = lambda p, x, y: fitfunc(p, x) - y # Distance to the target function
p0 = [1.6, 31., -0.] # Initial guess for the parameters
p1, success = optimize.leastsq(errfunc, p0[:], args=(Tx, tX))
time = linspace(Tx.min(), Tx.max(), 500)
plot(Tx, tX, "ro", time, fitfunc(p1, time), "r-") # Plot of the data and the fit
# Legend the plot
title("Pump-probe oscillation fitting")
xlabel("Time delay (ps)")
ylabel("$\Delta R/R$")
legend(('x position', 'x fit'))
ax = axes()
text(0.8, 0.07,
'Periodic : %5.3f ps.' % (p1[1]),
fontsize=16,
horizontalalignment='center',
verticalalignment='center',
transform=ax.transAxes)
show()
| [
[
1,
0,
0.025,
0.025,
0,
0.66,
0,
735,
0,
1,
0,
0,
735,
0,
0
],
[
1,
0,
0.05,
0.025,
0,
0.66,
0.0526,
265,
0,
1,
0,
0,
265,
0,
0
],
[
1,
0,
0.075,
0.025,
0,
0.66,
... | [
"from pylab import *",
"from scipy import *",
"from scipy import optimize",
"num_points = 500",
"Tx = linspace(0., 200., num_points)",
"Ty = Tx",
"tX = 1.6*cos(2*pi*(Tx/29.-1.32/360.)) +1.4*((0.5-rand(num_points))*exp(2*rand(num_points)**2))",
"fitfunc = lambda p, x: p[0]*cos(2*pi*(x/p[1]+p[2]/360.)) ... |
#!/usr/bin/python2.4
#
# Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Run all tests defined in the DSPL Tools code."""
__author__ = 'Benjamin Yolken <yolken@google.com>'
import unittest
_TEST_MODULE_NAMES = [
'dsplcheck_test',
'dsplgen_test',
'dspllib.data_sources.csv_data_source_test',
'dspllib.data_sources.csv_data_source_sqlite_test',
'dspllib.data_sources.data_source_test',
'dspllib.data_sources.data_source_to_dspl_test',
'dspllib.model.dspl_model_loader_test',
'dspllib.model.dspl_model_test',
'dspllib.validation.dspl_validation_test',
'dspllib.validation.xml_validation_test']
def main():
"""Run all DSPL Tools tests and print the results to stderr."""
test_suite = unittest.TestSuite()
for test_module_name in _TEST_MODULE_NAMES:
test_suite.addTests(
unittest.defaultTestLoader.loadTestsFromName(test_module_name))
unittest.TextTestRunner().run(test_suite)
if __name__ == '__main__':
main()
| [
[
8,
0,
0.5,
0.0156,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.5469,
0.0156,
0,
0.66,
0.2,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.5781,
0.0156,
0,
0.66,
0... | [
"\"\"\"Run all tests defined in the DSPL Tools code.\"\"\"",
"__author__ = 'Benjamin Yolken <yolken@google.com>'",
"import unittest",
"_TEST_MODULE_NAMES = [\n 'dsplcheck_test',\n 'dsplgen_test',\n 'dspllib.data_sources.csv_data_source_test',\n 'dspllib.data_sources.csv_data_source_sqlite_test',\n... |
#!/usr/bin/python2.4
#
# Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests of dsplcheck module."""
__author__ = 'Benjamin Yolken <yolken@google.com>'
import os
import os.path
import re
import shutil
import StringIO
import sys
import tempfile
import unittest
import zipfile
import dsplcheck
_DSPL_CONTENT = (
"""<?xml version="1.0" encoding="UTF-8"?>
<dspl xmlns="http://schemas.google.com/dspl/2010"
xmlns:time="http://www.google.com/publicdata/dataset/google/time">
<import namespace="http://www.google.com/publicdata/dataset/google/time"/>
<info>
<name>
<value>Dataset Name</value>
</name>
</info>
<provider>
<name>
<value>Provider Name</value>
</name>
</provider>
</dspl>""")
_DSPL_CONTENT_BAD_CSV_PATH = (
"""<?xml version="1.0" encoding="UTF-8"?>
<dspl xmlns="http://schemas.google.com/dspl/2010"
xmlns:time="http://www.google.com/publicdata/dataset/google/time">
<import namespace="http://www.google.com/publicdata/dataset/google/time"/>
<info>
<name>
<value>Dataset Name</value>
</name>
</info>
<provider>
<name>
<value>Provider Name</value>
</name>
</provider>
<tables>
<table id="my_table">
<column id="col1" type="string"/>
<column id="col2" type="string"/>
<data>
<file format="csv" encoding="utf-8">non_existent_file.csv</file>
</data>
</table>
</tables>
</dspl>""")
class DSPLCheckTests(unittest.TestCase):
"""Test case for dsplcheck module."""
def setUp(self):
self.input_dir = tempfile.mkdtemp()
self.valid_dspl_file_path = (
os.path.join(self.input_dir, 'valid_dataset.xml'))
self.valid_dspl_file = open(
self.valid_dspl_file_path, 'w')
self.valid_dspl_file.write(_DSPL_CONTENT)
self.valid_dspl_file.close()
def tearDown(self):
shutil.rmtree(self.input_dir)
def testValidDataset(self):
"""Test basic case of dataset that validates and parses correctly."""
self._StdoutTestHelper(
dsplcheck.main, [self.valid_dspl_file_path],
'validates successfully.*Parsing completed.*'
'Checking DSPL model and data.*Completed')
def testBadXMLFilePath(self):
"""Test case where bad XML file path is passed in."""
self._StdoutTestHelper(
dsplcheck.main, ['nonexistent_input_file.xml'],
'Error opening XML file', expect_exit=True)
def testBadCSVFilePath(self):
"""Test case where DSPL file has bad CSV reference."""
bad_csv_dspl_file_path = (
os.path.join(self.input_dir, 'invalid_csv_dataset.xml'))
bad_csv_dspl_file = open(bad_csv_dspl_file_path, 'w')
bad_csv_dspl_file.write(_DSPL_CONTENT_BAD_CSV_PATH)
bad_csv_dspl_file.close()
self._StdoutTestHelper(
dsplcheck.main, [bad_csv_dspl_file_path],
'Error while trying to parse', expect_exit=True)
def testSchemaOnlyOption(self):
"""Test that 'schema only' checking level option works correctly."""
self._StdoutTestHelper(
dsplcheck.main, [self.valid_dspl_file_path, '-l', 'schema_only'],
'validates successfully\W*Completed')
def testSchemaAndModelOption(self):
"""Test that 'schema and model' checking level option works correctly."""
self._StdoutTestHelper(
dsplcheck.main, [self.valid_dspl_file_path, '-l', 'schema_and_model'],
'Checking DSPL model(?! and data)')
def testZipInput(self):
"""Test that module properly handles zipped input."""
zip_path = os.path.join(self.input_dir, 'dataset.zip')
zip_file = zipfile.ZipFile(zip_path, 'w')
zip_file.write(self.valid_dspl_file_path)
zip_file.close()
self._StdoutTestHelper(
dsplcheck.main, [zip_path],
'validates successfully.*Parsing completed.*'
'Checking DSPL model and data.*Completed')
def testZipMissingXML(self):
"""Test that zip file without an XML file produces error."""
zip_path = os.path.join(self.input_dir, 'dataset.zip')
zip_file = zipfile.ZipFile(zip_path, 'w')
zip_file.writestr('test.txt', 'Text')
zip_file.close()
self._StdoutTestHelper(
dsplcheck.main, [zip_path],
'does not have any XML', expect_exit=True)
def testZipMultipleXMLFiles(self):
"""Test that zip file with multiple XML files produces error."""
zip_path = os.path.join(self.input_dir, 'dataset.zip')
zip_file = zipfile.ZipFile(zip_path, 'w')
zip_file.writestr('test.xml', 'Text')
zip_file.writestr('test2.xml', 'Text')
zip_file.close()
self._StdoutTestHelper(
dsplcheck.main, [zip_path],
'multiple XML files', expect_exit=True)
def _StdoutTestHelper(self, function, args,
expected_output, expect_exit=False):
"""Check the stdout output of a function against its expected value.
Args:
function: A function to execute
args: The arguments to pass to the function
expected_output: A regular expression expected to match the stdout output
expect_exit: Boolean indicating whether the function execution should
trigger a system exit
"""
saved_stdout = sys.stdout
redirected_output = StringIO.StringIO()
sys.stdout = redirected_output
if expect_exit:
self.assertRaises(SystemExit, function, args)
else:
function(args)
self.assertTrue(
re.search(expected_output, redirected_output.getvalue(), re.DOTALL))
redirected_output.close()
sys.stdout = saved_stdout
if __name__ == '__main__':
unittest.main()
| [
[
8,
0,
0.1481,
0.0046,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.162,
0.0046,
0,
0.66,
0.0667,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.1713,
0.0046,
0,
0.66,
... | [
"\"\"\"Tests of dsplcheck module.\"\"\"",
"__author__ = 'Benjamin Yolken <yolken@google.com>'",
"import os",
"import os.path",
"import re",
"import shutil",
"import StringIO",
"import sys",
"import tempfile",
"import unittest",
"import zipfile",
"import dsplcheck",
"_DSPL_CONTENT = (\n\"\"\"... |
#!/usr/bin/python2.4
#
# Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Check a DSPL dataset for likely import errors."""
__author__ = 'Benjamin Yolken <yolken@google.com>'
import optparse
import os
import shutil
import sys
import tempfile
import time
import zipfile
from dspllib.model import dspl_model_loader
from dspllib.validation import dspl_validation
from dspllib.validation import xml_validation
def LoadOptionsFromFlags(argv):
"""Parse command-line arguments.
Args:
argv: The program argument vector (excluding the script name)
Returns:
A dictionary with key-value pairs for each of the options
"""
usage_string = 'python dsplcheck.py [options] [DSPL XML file or zip archive]'
parser = optparse.OptionParser(usage=usage_string)
parser.set_defaults(verbose=True)
parser.add_option('-q', '--quiet',
action='store_false', dest='verbose',
help='Quiet mode')
parser.add_option(
'-l', '--checking_level', dest='checking_level', type='choice',
choices=['schema_only', 'schema_and_model', 'full'], default='full',
help='Level of checking to do (default: full)')
(options, args) = parser.parse_args(args=argv)
if not len(args) == 1:
parser.error('An XML file or DSPL zip archive is required')
return {'verbose': options.verbose,
'checking_level': options.checking_level,
'file_path': args[0]}
def GetInputFilePath(input_file_path):
"""Parse the input file path, extracting a zip file if necessary.
Args:
input_file_path: String path to dsplcheck input file
Returns:
Dictionary containing final XML file path (post-extraction) and directory
into which zip was extracted (or '' if input was not a zip).
"""
if zipfile.is_zipfile(input_file_path):
# Extract files to temporary directory and search for dataset XML
zip_dir = tempfile.mkdtemp()
zip_file = zipfile.ZipFile(input_file_path, 'r')
zip_file.extractall(zip_dir)
xml_file_paths = []
for (dirpath, unused_dirnames, filenames) in os.walk(zip_dir):
for file_name in filenames:
if file_name[-4:] == '.xml':
xml_file_paths.append(os.path.join(dirpath, file_name))
if not xml_file_paths:
print 'Error: zip does not have any XML files'
sys.exit(2)
elif len(xml_file_paths) > 1:
print 'Error: zip contains multiple XML files'
sys.exit(2)
else:
xml_file_path = xml_file_paths[0]
zip_file.close()
else:
xml_file_path = input_file_path
zip_dir = ''
return {'xml_file_path': xml_file_path,
'zip_dir': zip_dir}
def main(argv):
"""Parse command-line flags and run XML validator.
Args:
argv: The program argument vector (excluding the script name)
"""
start_time = time.time()
options = LoadOptionsFromFlags(argv)
file_paths = GetInputFilePath(options['file_path'])
try:
xml_file = open(file_paths['xml_file_path'], 'r')
except IOError as io_error:
print 'Error opening XML file\n\n%s' % io_error
sys.exit(2)
if options['verbose']:
print '==== Checking XML file against DSPL schema....'
result = xml_validation.RunValidation(
xml_file,
verbose=options['verbose'])
print result
if 'validates successfully' not in result:
# Stop if XML validation not successful
sys.exit(2)
if options['checking_level'] != 'schema_only':
if options['verbose']:
print '\n==== Parsing DSPL dataset....'
if options['checking_level'] == 'full':
full_data_check = True
else:
full_data_check = False
try:
dataset = dspl_model_loader.LoadDSPLFromFiles(
file_paths['xml_file_path'], load_all_data=full_data_check)
except dspl_model_loader.DSPLModelLoaderError as loader_error:
print 'Error while trying to parse DSPL dataset\n\n%s' % loader_error
sys.exit(2)
if options['verbose']:
print 'Parsing completed.'
if full_data_check:
print '\n==== Checking DSPL model and data....'
else:
print '\n==== Checking DSPL model....'
dspl_validator = dspl_validation.DSPLDatasetValidator(
dataset, full_data_check=full_data_check)
print dspl_validator.RunValidation(options['verbose'])
xml_file.close()
if file_paths['zip_dir']:
shutil.rmtree(file_paths['zip_dir'])
if options['verbose']:
print '\nCompleted in %0.2f seconds' % (time.time() - start_time)
if __name__ == '__main__':
main(sys.argv[1:])
| [
[
8,
0,
0.1641,
0.0051,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.1795,
0.0051,
0,
0.66,
0.0667,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.1897,
0.0051,
0,
0.66,... | [
"\"\"\"Check a DSPL dataset for likely import errors.\"\"\"",
"__author__ = 'Benjamin Yolken <yolken@google.com>'",
"import optparse",
"import os",
"import shutil",
"import sys",
"import tempfile",
"import time",
"import zipfile",
"from dspllib.model import dspl_model_loader",
"from dspllib.vali... |
#!/usr/bin/python2.4
#
# Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests of dsplgen module."""
__author__ = 'Benjamin Yolken <yolken@google.com>'
import os
import os.path
import re
import shutil
import StringIO
import sys
import tempfile
import unittest
import dsplcheck
import dsplgen
_TEST_CSV_CONTENT = (
"""date[type=date;format=yyyy-MM-dd],category1,category2[concept=geo:us_state;rollup=true],metric1[extends=quantity:ratio;slice_role=metric],metric2,metric3
1980-01-01,red,california,89,321,71.21
1981-01-01,red,california,99,231,391.2
1982-01-01,blue,maine's,293,32,2.31
1983-01-01,blue,california,293,12,10.3
1984-01-01,red,maine's,932,48,10.78""")
class DSPLGenTests(unittest.TestCase):
"""Test cases for dsplgen module."""
def setUp(self):
self.input_dir = tempfile.mkdtemp()
input_file = open(os.path.join(self.input_dir, 'input.csv'), 'w')
input_file.write(_TEST_CSV_CONTENT)
input_file.close()
self.output_dir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.input_dir)
shutil.rmtree(self.output_dir)
def testDSPLGenEndToEnd(self):
"""A simple end-to-end test of the dsplgen application."""
dsplgen.main(['-o', self.output_dir, '-q',
os.path.join(self.input_dir, 'input.csv')])
self.assertTrue(
os.path.isfile(os.path.join(self.output_dir, 'dataset.xml')))
self.assertTrue(
os.path.isfile(os.path.join(self.output_dir, 'category1_table.csv')))
self.assertTrue(
os.path.isfile(os.path.join(self.output_dir, 'slice_0_table.csv')))
self.assertTrue(
os.path.isfile(os.path.join(self.output_dir, 'slice_1_table.csv')))
# Test that output validates against dsplcheck
saved_stdout = sys.stdout
redirected_output = StringIO.StringIO()
sys.stdout = redirected_output
dsplcheck.main([os.path.join(self.output_dir, 'dataset.xml')])
self.assertTrue(
re.search(
'validates successfully.*Parsing completed.*'
'No issues found.*Completed',
redirected_output.getvalue(), re.DOTALL))
redirected_output.close()
sys.stdout = saved_stdout
def testCSVNotFound(self):
"""Test case in which CSV can't be opened."""
dsplgen.main(['-o', self.output_dir, '-q',
os.path.join(self.input_dir, 'input.csv')])
saved_stdout = sys.stdout
redirected_output = StringIO.StringIO()
sys.stdout = redirected_output
self.assertRaises(SystemExit,
dsplgen.main, ['-q', 'non_existent_input_file.csv'])
self.assertTrue('Error opening CSV file' in redirected_output.getvalue())
redirected_output.close()
sys.stdout = saved_stdout
if __name__ == '__main__':
unittest.main()
| [
[
8,
0,
0.256,
0.008,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.28,
0.008,
0,
0.66,
0.0714,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.296,
0.008,
0,
0.66,
0.... | [
"\"\"\"Tests of dsplgen module.\"\"\"",
"__author__ = 'Benjamin Yolken <yolken@google.com>'",
"import os",
"import os.path",
"import re",
"import shutil",
"import StringIO",
"import sys",
"import tempfile",
"import unittest",
"import dsplcheck",
"import dsplgen",
"_TEST_CSV_CONTENT = (\n\"\"... |
#!/usr/bin/python2.4
#
# Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Generate a DSPL dataset from a tabular data source via the command-line."""
__author__ = 'Benjamin Yolken <yolken@google.com>'
import optparse
import sys
import time
from dspllib.data_sources import csv_data_source
from dspllib.data_sources import csv_data_source_sqlite
from dspllib.data_sources import data_source_to_dspl
def LoadOptionsFromFlags(argv):
"""Parse command-line arguments.
Args:
argv: The program argument vector (excluding the script name)
Returns:
A dictionary with key-value pairs for each of the options
"""
usage_string = 'python dsplgen.py [options] [csv file]'
parser = optparse.OptionParser(usage=usage_string)
parser.set_defaults(verbose=True)
parser.add_option('-o', '--output_path', dest='output_path', default='',
help=('Path to a output directory '
'(default: current directory)'))
parser.add_option('-q', '--quiet',
action='store_false', dest='verbose',
help='Quiet mode')
parser.add_option('-t', '--data_type', dest='data_type', type='choice',
choices=['csv', 'csv_sqlite'], default='csv',
help='Type of data source to use (default: csv)')
(options, args) = parser.parse_args(args=argv)
if not len(args) == 1:
parser.error('A data source (e.g., path to CSV file) is required')
return {'data_type': options.data_type,
'data_source': args[0],
'output_path': options.output_path,
'verbose': options.verbose}
def main(argv):
"""Parse command-line flags and run data source to DSPL conversion process.
Args:
argv: The program argument vector (excluding the script name)
"""
start_time = time.time()
options = LoadOptionsFromFlags(argv)
# Connect to data source
if options['data_type'] in ['csv', 'csv_sqlite']:
try:
csv_file = open(options['data_source'], 'r')
except IOError as io_error:
print 'Error opening CSV file\n\n%s' % io_error
sys.exit(2)
if options['data_type'] == 'csv':
data_source_obj = csv_data_source.CSVDataSource(
csv_file, options['verbose'])
else:
data_source_obj = csv_data_source_sqlite.CSVDataSourceSqlite(
csv_file, options['verbose'])
else:
print 'Error: Unknown data type: %s' % (options['data_type'])
sys.exit(2)
# Create DSPL dataset from data source
dataset = data_source_to_dspl.PopulateDataset(
data_source_obj, options['verbose'])
data_source_obj.Close()
if options['verbose']:
print 'Materializing dataset:'
print str(dataset)
# Write DSPL dataset to disk
dataset.Materialize(options['output_path'])
if options['verbose']:
print 'Completed in %0.2f seconds' % (time.time() - start_time)
if __name__ == '__main__':
main(sys.argv[1:])
| [
[
8,
0,
0.2581,
0.0081,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.2823,
0.0081,
0,
0.66,
0.1,
777,
1,
0,
0,
0,
0,
3,
0
],
[
1,
0,
0.2984,
0.0081,
0,
0.66,
... | [
"\"\"\"Generate a DSPL dataset from a tabular data source via the command-line.\"\"\"",
"__author__ = 'Benjamin Yolken <yolken@google.com>'",
"import optparse",
"import sys",
"import time",
"from dspllib.data_sources import csv_data_source",
"from dspllib.data_sources import csv_data_source_sqlite",
"... |
#!/usr/bin/python2.4
#
# Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Setup script for the DSPLtools suite."""
from distutils.core import setup
setup(name='dspltools',
version='0.4.3',
description='Suite of command-line tools for generating DSPL datasets',
author='Benjamin Yolken',
author_email='yolken@google.com',
url='http://code.google.com/apis/publicdata/docs/dspltools.html',
packages=['dspllib', 'dspllib.data_sources',
'dspllib.model', 'dspllib.validation', 'genxmlif',
'minixsv'],
package_dir={'dspllib': 'packages/dspllib',
'genxmlif': 'packages/third_party/minixsv/genxmlif',
'minixsv': 'packages/third_party/minixsv/minixsv'},
package_data={'dspllib.validation': ['schemas/*.xsd',
'test_dataset/*.csv',
'test_dataset/*.xml'],
'minixsv': ['*.xsd', 'minixsv']},
scripts=['scripts/dsplcheck.py', 'scripts/dsplgen.py',
'scripts/run_all_tests.py'],)
| [
[
8,
0,
0.5926,
0.0185,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.6296,
0.0185,
0,
0.66,
0.5,
152,
0,
1,
0,
0,
152,
0,
0
],
[
8,
0,
0.8426,
0.3333,
0,
0.66,
... | [
"\"\"\"Setup script for the DSPLtools suite.\"\"\"",
"from distutils.core import setup",
"setup(name='dspltools',\n version='0.4.3',\n description='Suite of command-line tools for generating DSPL datasets',\n author='Benjamin Yolken',\n author_email='yolken@google.com',\n url='http://co... |
import os
import sys
from distutils.core import setup
from distutils.command.install import INSTALL_SCHEMES
#install data file in the same way as *.py
for scheme in INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
def fullsplit(path, result=None):
"""
Split a pathname into components (the opposite of os.path.join) in a
platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
for dirpath, dirnames, filenames in os.walk('bots'):
# Ignore dirnames that start with '.'
#~ for i, dirname in enumerate(dirnames):
#~ if dirname.startswith('.'): del dirnames[i]
if '__init__.py' in filenames:
packages.append('.'.join(fullsplit(dirpath)))
if len(filenames) > 1:
data_files.append([dirpath, [os.path.join(dirpath, f) for f in filenames if not f.endswith('.pyc') and not f.endswith('.py')]])
elif filenames:
data_files.append([dirpath, [os.path.join(dirpath, f) for f in filenames if not f.endswith('.pyc') and not f.endswith('.py')]])
setup(
name="bots",
version="2.1.0",
author = "hjebbers",
author_email = "hjebbers@gmail.com",
url = "http://bots.sourceforge.net/",
description="Bots open source edi translator",
long_description = "Bots is complete software for edi (Electronic Data Interchange): translate and communicate. All major edi data formats are supported: edifact, x12, tradacoms, xml",
platforms="OS Independent (Written in an interpreted language)",
license="GNU General Public License (GPL)",
classifiers = [
'Development Status :: 5 - Production/Stable',
'Operating System :: OS Independent',
'Programming Language :: Python',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Topic :: Office/Business',
'Topic :: Office/Business :: Financial',
'Topic :: Other/Nonlisted Topic',
'Topic :: Communications',
'Environment :: Console',
'Environment :: Web Environment',
],
scripts = [ 'bots-webserver.py',
'bots-engine.py',
'bots-grammarcheck.py',
'bots-xml2botsgrammar.py',
#~ 'bots/bots-updatedb.py',
],
packages = packages,
data_files = data_files,
)
| [
[
1,
0,
0.0122,
0.0122,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0244,
0.0122,
0,
0.66,
0.1,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0366,
0.0122,
0,
0.6... | [
"import os",
"import sys",
"from distutils.core import setup",
"from distutils.command.install import INSTALL_SCHEMES",
"for scheme in INSTALL_SCHEMES.values():\n scheme['data'] = scheme['purelib']",
" scheme['data'] = scheme['purelib']",
"def fullsplit(path, result=None):\n \"\"\"\n Split a... |
import os
import sys
from distutils.core import setup
from distutils.command.install import INSTALL_SCHEMES
#install data file in the same way as *.py
for scheme in INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
def fullsplit(path, result=None):
"""
Split a pathname into components (the opposite of os.path.join) in a
platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
for dirpath, dirnames, filenames in os.walk('bots'):
# Ignore dirnames that start with '.'
#~ for i, dirname in enumerate(dirnames):
#~ if dirname.startswith('.'): del dirnames[i]
if '__init__.py' in filenames:
packages.append('.'.join(fullsplit(dirpath)))
if len(filenames) > 1:
data_files.append([dirpath, [os.path.join(dirpath, f) for f in filenames if not f.endswith('.pyc') and not f.endswith('.py')]])
elif filenames:
data_files.append([dirpath, [os.path.join(dirpath, f) for f in filenames if not f.endswith('.pyc') and not f.endswith('.py')]])
setup(
name="bots",
version="2.1.0",
author = "hjebbers",
author_email = "hjebbers@gmail.com",
url = "http://bots.sourceforge.net/",
description="Bots open source edi translator",
long_description = "Bots is complete software for edi (Electronic Data Interchange): translate and communicate. All major edi data formats are supported: edifact, x12, tradacoms, xml",
platforms="OS Independent (Written in an interpreted language)",
license="GNU General Public License (GPL)",
classifiers = [
'Development Status :: 5 - Production/Stable',
'Operating System :: OS Independent',
'Programming Language :: Python',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Topic :: Office/Business',
'Topic :: Office/Business :: Financial',
'Topic :: Other/Nonlisted Topic',
'Topic :: Communications',
'Environment :: Console',
'Environment :: Web Environment',
],
scripts = [ 'bots-webserver.py',
'bots-engine.py',
'bots-grammarcheck.py',
'bots-xml2botsgrammar.py',
#~ 'bots/bots-updatedb.py',
],
packages = packages,
data_files = data_files,
)
| [
[
1,
0,
0.0122,
0.0122,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0244,
0.0122,
0,
0.66,
0.1,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0366,
0.0122,
0,
0.6... | [
"import os",
"import sys",
"from distutils.core import setup",
"from distutils.command.install import INSTALL_SCHEMES",
"for scheme in INSTALL_SCHEMES.values():\n scheme['data'] = scheme['purelib']",
" scheme['data'] = scheme['purelib']",
"def fullsplit(path, result=None):\n \"\"\"\n Split a... |
#!/usr/bin/env python
from bots import webserver
if __name__ == '__main__':
webserver.start()
| [
[
1,
0,
0.4,
0.2,
0,
0.66,
0,
261,
0,
1,
0,
0,
261,
0,
0
],
[
4,
0,
0.9,
0.4,
0,
0.66,
1,
0,
0,
0,
0,
0,
0,
0,
1
],
[
8,
1,
1,
0.2,
1,
0.14,
0,
511,
3,
... | [
"from bots import webserver",
"if __name__ == '__main__':\n webserver.start()",
" webserver.start()"
] |
import unittest
import bots.botsglobal as botsglobal
import bots.inmessage as inmessage
import bots.botslib as botslib
import bots.transform as transform
import pickle
import bots.botsinit as botsinit
import utilsunit
'''plugin unittranslateutils.zip '''
#as the max length is
class TestTranslate(unittest.TestCase):
def setUp(self):
pass
def testpersist(self):
#~ inn = inmessage.edifromfile(editype='edifact',messagetype='orderswithenvelope',filename='botssys/infile/tests/inisout02.edi')
domein=u'test'
botskey=u'test'
value= u'xxxxxxxxxxxxxxxxx'
value2= u'IEFJUKAHE*FMhrt4hr f.wch shjeriw'
value3= u'1'*3024
transform.persist_delete(domein,botskey)
#~ self.assertRaises(botslib.PersistError,transform.persist_add,domein,botskey,value3) #content is too long
transform.persist_add(domein,botskey,value)
self.assertRaises(botslib.PersistError,transform.persist_add,domein,botskey,value) #is already present
self.assertEqual(value,transform.persist_lookup(domein,botskey),'basis')
transform.persist_update(domein,botskey,value2)
self.assertEqual(value2,transform.persist_lookup(domein,botskey),'basis')
#~ self.assertRaises(botslib.PersistError,transform.persist_update,domein,botskey,value3) #content is too long
transform.persist_delete(domein,botskey)
self.assertEqual(None,transform.persist_lookup(domein,botskey),'basis')
transform.persist_update(domein,botskey,value) #test-tet is not there. gives no error...
def testpersistunicode(self):
domein=u'test'
botskey=u'1235:\ufb52\ufb66\ufedb'
value= u'xxxxxxxxxxxxxxxxx'
value2= u'IEFJUKAHE*FMhr\u0302\u0267t4hr f.wch shjeriw'
value3= u'1'*1024
transform.persist_delete(domein,botskey)
#~ self.assertRaises(botslib.PersistError,transform.persist_add,domein,botskey,value3) #content is too long
transform.persist_add(domein,botskey,value)
self.assertRaises(botslib.PersistError,transform.persist_add,domein,botskey,value) #is already present
self.assertEqual(value,transform.persist_lookup(domein,botskey),'basis')
transform.persist_update(domein,botskey,value2)
self.assertEqual(value2,transform.persist_lookup(domein,botskey),'basis')
#~ self.assertRaises(botslib.PersistError,transform.persist_update,domein,botskey,value3) #content is too long
transform.persist_delete(domein,botskey)
self.assertEqual(None,transform.persist_lookup(domein,botskey),'basis')
transform.persist_update(domein,botskey,value) #is not there. gives no error...
def testcodeconversion(self):
self.assertEqual('TESTOUT',transform.codeconversion('aperakrff2qualifer','TESTIN'),'basis')
self.assertRaises(botslib.CodeConversionError,transform.codeconversion,'aperakrff2qualifer','TESTINNOT')
self.assertEqual('TESTIN',transform.rcodeconversion('aperakrff2qualifer','TESTOUT'),'basis')
self.assertRaises(botslib.CodeConversionError,transform.rcodeconversion,'aperakrff2qualifer','TESTINNOT')
#need article in ccodelist:
self.assertEqual('TESTOUT',transform.codetconversion('artikel','TESTIN'),'basis')
self.assertRaises(botslib.CodeConversionError,transform.codetconversion,'artikel','TESTINNOT')
self.assertEqual('TESTIN',transform.rcodetconversion('artikel','TESTOUT'),'basis')
self.assertRaises(botslib.CodeConversionError,transform.rcodetconversion,'artikel','TESTINNOT')
self.assertEqual('TESTATTR1',transform.codetconversion('artikel','TESTIN','attr1'),'basis')
def testunique(self):
newdomain = 'test' + transform.unique('test')
self.assertEqual('1',transform.unique(newdomain),'init new domain')
self.assertEqual('2',transform.unique(newdomain),'next one')
def testunique(self):
newdomain = 'test' + transform.unique('test')
self.assertEqual(True,transform.checkunique(newdomain,1),'init new domain')
self.assertEqual(False,transform.checkunique(newdomain,1),'seq should be 2')
self.assertEqual(False,transform.checkunique(newdomain,3),'seq should be 2')
self.assertEqual(True,transform.checkunique(newdomain,2),'next one')
def testean(self):
self.assertEqual('123456789012',transform.addeancheckdigit('12345678901'),'UPC')
self.assertEqual('2',transform.calceancheckdigit('12345678901'),'UPC')
self.assertEqual(True,transform.checkean('123456789012'),'UPC')
self.assertEqual(False,transform.checkean('123456789011'),'UPC')
self.assertEqual(False,transform.checkean('123456789013'),'UPC')
self.assertEqual('123456789012',transform.addeancheckdigit('12345678901'),'UPC')
self.assertEqual('2',transform.calceancheckdigit('12345678901'),'UPC')
self.assertEqual(True,transform.checkean('123456789012'),'UPC')
self.assertEqual(False,transform.checkean('123456789011'),'UPC')
self.assertEqual(False,transform.checkean('123456789013'),'UPC')
self.assertEqual('12345670',transform.addeancheckdigit('1234567'),'EAN8')
self.assertEqual('0',transform.calceancheckdigit('1234567'),'EAN8')
self.assertEqual(True,transform.checkean('12345670'),'EAN8')
self.assertEqual(False,transform.checkean('12345679'),'EAN8')
self.assertEqual(False,transform.checkean('12345671'),'EAN8')
self.assertEqual('1234567890128',transform.addeancheckdigit('123456789012'),'EAN13')
self.assertEqual('8',transform.calceancheckdigit('123456789012'),'EAN13')
self.assertEqual(True,transform.checkean('1234567890128'),'EAN13')
self.assertEqual(False,transform.checkean('1234567890125'),'EAN13')
self.assertEqual(False,transform.checkean('1234567890120'),'EAN13')
self.assertEqual('12345678901231',transform.addeancheckdigit('1234567890123'),'EAN14')
self.assertEqual('1',transform.calceancheckdigit('1234567890123'),'EAN14')
self.assertEqual(True,transform.checkean('12345678901231'),'EAN14')
self.assertEqual(False,transform.checkean('12345678901230'),'EAN14')
self.assertEqual(False,transform.checkean('12345678901236'),'EAN14')
self.assertEqual('123456789012345675',transform.addeancheckdigit('12345678901234567'),'UPC')
self.assertEqual('5',transform.calceancheckdigit('12345678901234567'),'UPC')
self.assertEqual(True,transform.checkean('123456789012345675'),'UPC')
self.assertEqual(False,transform.checkean('123456789012345670'),'UPC')
self.assertEqual(False,transform.checkean('123456789012345677'),'UPC')
if __name__ == '__main__':
botsinit.generalinit('config')
botsinit.initenginelogging()
botsinit.connect()
try:
unittest.main()
except:
pass
botsglobal.db.close()
| [
[
1,
0,
0.0081,
0.0081,
0,
0.66,
0,
88,
0,
1,
0,
0,
88,
0,
0
],
[
1,
0,
0.0161,
0.0081,
0,
0.66,
0.1,
662,
0,
1,
0,
0,
662,
0,
0
],
[
1,
0,
0.0242,
0.0081,
0,
0.66,... | [
"import unittest",
"import bots.botsglobal as botsglobal",
"import bots.inmessage as inmessage",
"import bots.botslib as botslib",
"import bots.transform as transform",
"import pickle",
"import bots.botsinit as botsinit",
"import utilsunit",
"'''plugin unittranslateutils.zip '''",
"class TestTrans... |
import unittest
import filecmp
import glob
import shutil
import os
import subprocess
import logging
import utilsunit
import bots.botslib as botslib
import bots.botsinit as botsinit
import bots.botsglobal as botsglobal
from bots.botsconfig import *
'''
plugin unitconfirm.zip
before each run: clear transactions!
'''
botssys = 'bots/botssys'
class TestMain(unittest.TestCase):
def testroutetestmdn(self):
lijst = utilsunit.getdir(os.path.join(botssys,'outfile/confirm/mdn/*'))
self.failUnless(len(lijst)==0)
nr_rows = 0
for row in botslib.query(u'''SELECT idta,confirmed,confirmidta
FROM ta
WHERE status=%(status)s
AND statust=%(statust)s
AND idroute=%(idroute)s
AND confirmtype=%(confirmtype)s
AND confirmasked=%(confirmasked)s
ORDER BY idta DESC
''',
{'status':210,'statust':DONE,'idroute':'testmdn','confirmtype':'send-email-MDN','confirmasked':True}):
nr_rows += 1
self.failUnless(row[1])
self.failUnless(row[2]!=0)
else:
self.failUnless(nr_rows==1)
nr_rows = 0
for row in botslib.query(u'''SELECT idta,confirmed,confirmidta
FROM ta
WHERE status=%(status)s
AND statust=%(statust)s
AND idroute=%(idroute)s
AND confirmtype=%(confirmtype)s
AND confirmasked=%(confirmasked)s
ORDER BY idta DESC
''',
{'status':510,'statust':DONE,'idroute':'testmdn','confirmtype':'ask-email-MDN','confirmasked':True}):
nr_rows += 1
self.failUnless(row[1])
self.failUnless(row[2]!=0)
else:
self.failUnless(nr_rows==1)
def testroutetestmdn2(self):
lijst = utilsunit.getdir(os.path.join(botssys,'outfile/confirm/mdn2/*'))
self.failUnless(len(lijst)==0)
nr_rows = 0
for row in botslib.query(u'''SELECT idta,confirmed,confirmidta
FROM ta
WHERE status=%(status)s
AND statust=%(statust)s
AND idroute=%(idroute)s
AND confirmtype=%(confirmtype)s
AND confirmasked=%(confirmasked)s
ORDER BY idta DESC
''',
{'status':510,'statust':DONE,'idroute':'testmdn2','confirmtype':'ask-email-MDN','confirmasked':True}):
nr_rows += 1
self.failUnless(not row[1])
self.failUnless(row[2]==0)
else:
self.failUnless(nr_rows==1)
def testrouteotherx12(self):
lijst = utilsunit.getdir(os.path.join(botssys,'outfile/confirm/otherx12/*'))
self.failUnless(len(lijst)==15)
def testroutetest997(self):
'''
test997 1: pickup 850*1 ask confirm 850*2 gen & send 850*2
send confirm 850*1 gen & send 997*1
test997 2: receive 997*1 confirm 850*1 gen xml*1
receive 850*2 ask confirm 850*3 gen 850*3
send confirm 850*2 gen & send 997*2
test997 3: receive 997*2 confirm 850*2 gen & send xml (to trash)
send 850*3 (to trash)
send xml (to trash)
'''
lijst = utilsunit.getdir(os.path.join(botssys,'outfile/confirm/x12/*'))
self.failUnless(len(lijst)==0)
lijst = utilsunit.getdir(os.path.join(botssys,'outfile/confirm/trash/*'))
self.failUnless(len(lijst)==6)
counter=0
for row in botslib.query(u'''SELECT idta,confirmed,confirmidta
FROM ta
WHERE status=%(status)s
AND statust=%(statust)s
AND idroute=%(idroute)s
AND confirmtype=%(confirmtype)s
AND confirmasked=%(confirmasked)s
ORDER BY idta DESC
''',
{'status':400,'statust':DONE,'idroute':'test997','confirmtype':'ask-x12-997','confirmasked':True}):
counter += 1
if counter == 1:
self.failUnless(not row[1])
self.failUnless(row[2]==0)
elif counter == 2:
self.failUnless(row[1])
self.failUnless(row[2]!=0)
else:
break
else:
self.failUnless(counter!=0)
for row in botslib.query(u'''SELECT idta,confirmed,confirmidta
FROM ta
WHERE status=%(status)s
AND statust=%(statust)s
AND idroute=%(idroute)s
AND confirmtype=%(confirmtype)s
AND confirmasked=%(confirmasked)s
ORDER BY idta DESC
''',
{'status':310,'statust':DONE,'idroute':'test997','confirmtype':'send-x12-997','confirmasked':True}):
counter += 1
if counter <= 2:
self.failUnless(row[1])
self.failUnless(row[2]!=0)
else:
break
else:
self.failUnless(counter!=0)
def testroutetestcontrl(self):
'''
test997 1: pickup ORDERS*1 ask confirm ORDERS*2 gen & send ORDERS*2
send confirm ORDERS*1 gen & send CONTRL*1
test997 2: receive CONTRL*1 confirm ORDERS*1 gen xml*1
receive ORDERS*2 ask confirm ORDERS*3 gen ORDERS*3
send confirm ORDERS*2 gen & send CONTRL*2
test997 3: receive CONTRL*2 confirm ORDERS*2 gen & send xml (to trash)
send ORDERS*3 (to trash)
send xml (to trash)
'''
lijst = utilsunit.getdir(os.path.join(botssys,'outfile/confirm/edifact/*'))
self.failUnless(len(lijst)==0)
lijst = utilsunit.getdir(os.path.join(botssys,'outfile/confirm/trash/*'))
self.failUnless(len(lijst)==6)
counter=0
for row in botslib.query(u'''SELECT idta,confirmed,confirmidta
FROM ta
WHERE status=%(status)s
AND statust=%(statust)s
AND idroute=%(idroute)s
AND confirmtype=%(confirmtype)s
AND confirmasked=%(confirmasked)s
ORDER BY idta DESC
''',
{'status':400,'statust':DONE,'idroute':'testcontrl','confirmtype':'ask-edifact-CONTRL','confirmasked':True}):
counter += 1
if counter == 1:
self.failUnless(not row[1])
self.failUnless(row[2]==0)
elif counter == 2:
self.failUnless(row[1])
self.failUnless(row[2]!=0)
else:
break
else:
self.failUnless(counter!=0)
for row in botslib.query(u'''SELECT idta,confirmed,confirmidta
FROM ta
WHERE status=%(status)s
AND statust=%(statust)s
AND idroute=%(idroute)s
AND confirmtype=%(confirmtype)s
AND confirmasked=%(confirmasked)s
ORDER BY idta DESC
''',
{'status':310,'statust':DONE,'idroute':'testcontrl','confirmtype':'send-edifact-CONTRL','confirmasked':True}):
counter += 1
if counter <= 2:
self.failUnless(row[1])
self.failUnless(row[2]!=0)
else:
break
else:
self.failUnless(counter!=0)
if __name__ == '__main__':
pythoninterpreter = 'python'
newcommand = [pythoninterpreter,'bots-engine.py',]
shutil.rmtree(os.path.join(botssys, 'outfile'),ignore_errors=True) #remove whole output directory
subprocess.call(newcommand)
botsinit.generalinit('config')
botsinit.initenginelogging()
botsinit.connect()
print '''expect:
21 files received/processed in run.
17 files without errors,
4 files with errors,
30 files send in run.
'''
unittest.main()
logging.shutdown()
botsglobal.db.close()
| [
[
1,
0,
0.0052,
0.0052,
0,
0.66,
0,
88,
0,
1,
0,
0,
88,
0,
0
],
[
1,
0,
0.0105,
0.0052,
0,
0.66,
0.0833,
891,
0,
1,
0,
0,
891,
0,
0
],
[
1,
0,
0.0157,
0.0052,
0,
0.... | [
"import unittest",
"import filecmp",
"import glob",
"import shutil",
"import os",
"import subprocess",
"import logging",
"import utilsunit",
"import bots.botslib as botslib",
"import bots.botsinit as botsinit",
"import bots.botsglobal as botsglobal",
"from bots.botsconfig import *",
"class T... |
#!/usr/bin/env python
from bots import grammarcheck
if __name__=='__main__':
grammarcheck.start()
| [
[
1,
0,
0.4,
0.2,
0,
0.66,
0,
261,
0,
1,
0,
0,
261,
0,
0
],
[
4,
0,
0.9,
0.4,
0,
0.66,
1,
0,
0,
0,
0,
0,
0,
0,
1
],
[
8,
1,
1,
0.2,
1,
0.38,
0,
511,
3,
... | [
"from bots import grammarcheck",
"if __name__=='__main__':\n grammarcheck.start()",
" grammarcheck.start()"
] |
import os
import unittest
import shutil
import bots.inmessage as inmessage
import bots.outmessage as outmessage
import filecmp
try:
import json as simplejson
except ImportError:
import simplejson
import bots.botslib as botslib
import bots.botsinit as botsinit
import utilsunit
''' pluging unitinisout.zip'''
class Testinisoutedifact(unittest.TestCase):
def testedifact02(self):
infile ='botssys/infile/unitinisout/org/inisout02.edi'
outfile='botssys/infile/unitinisout/output/inisout02.edi'
inn = inmessage.edifromfile(editype='edifact',messagetype='orderswithenvelope',filename=infile)
out = outmessage.outmessage_init(editype='edifact',messagetype='orderswithenvelope',filename=outfile,divtext='',topartner='') #make outmessage object
out.root = inn.root
out.writeall()
self.failUnless(filecmp.cmp('bots/' + outfile,'bots/' + infile))
def testedifact03(self):
#~ #takes quite long
infile ='botssys/infile/unitinisout/org/inisout03.edi'
outfile='botssys/infile/unitinisout/output/inisout03.edi'
inn = inmessage.edifromfile(editype='edifact',messagetype='invoicwithenvelope',filename=infile)
out = outmessage.outmessage_init(editype='edifact',messagetype='invoicwithenvelope',filename=outfile,divtext='',topartner='') #make outmessage object
out.root = inn.root
out.writeall()
self.failUnless(filecmp.cmp('bots/' + outfile,'bots/' + infile))
def testedifact04(self):
utilsunit.readwrite(editype='edifact',
messagetype='orderswithenvelope',
filenamein='botssys/infile/unitinisout/0406edifact/040601.edi',
filenameout='botssys/infile/unitinisout/output/040601.edi')
utilsunit.readwrite(editype='edifact',
messagetype='orderswithenvelope',
filenamein='botssys/infile/unitinisout/0406edifact/040602.edi',
filenameout='botssys/infile/unitinisout/output/040602.edi')
utilsunit.readwrite(editype='edifact',
messagetype='orderswithenvelope',
filenamein='botssys/infile/unitinisout/0406edifact/040603.edi',
filenameout='botssys/infile/unitinisout/output/040603.edi')
utilsunit.readwrite(editype='edifact',
messagetype='orderswithenvelope',
filenamein='botssys/infile/unitinisout/0406edifact/040604.edi',
filenameout='botssys/infile/unitinisout/output/040604.edi')
utilsunit.readwrite(editype='edifact',
messagetype='orderswithenvelope',
filenamein='botssys/infile/unitinisout/0406edifact/040605.edi',
filenameout='botssys/infile/unitinisout/output/040605.edi')
utilsunit.readwrite(editype='edifact',
messagetype='orderswithenvelope',
filenamein='botssys/infile/unitinisout/0406edifact/040606.edi',
filenameout='botssys/infile/unitinisout/output/040606.edi')
utilsunit.readwrite(editype='edifact',
messagetype='orderswithenvelope',
filenamein='botssys/infile/unitinisout/0406edifact/040607.edi',
filenameout='botssys/infile/unitinisout/output/040607.edi')
utilsunit.readwrite(editype='edifact',
messagetype='orderswithenvelope',
filenamein='botssys/infile/unitinisout/0406edifact/040608.edi',
filenameout='botssys/infile/unitinisout/output/040608.edi')
self.failUnless(filecmp.cmp('bots/botssys/infile/unitinisout/output/040601.edi','bots/botssys/infile/unitinisout/output/040602.edi'))
self.failUnless(filecmp.cmp('bots/botssys/infile/unitinisout/output/040601.edi','bots/botssys/infile/unitinisout/output/040603.edi'))
self.failUnless(filecmp.cmp('bots/botssys/infile/unitinisout/output/040601.edi','bots/botssys/infile/unitinisout/output/040604.edi'))
self.failUnless(filecmp.cmp('bots/botssys/infile/unitinisout/output/040601.edi','bots/botssys/infile/unitinisout/output/040605.edi'))
self.failUnless(filecmp.cmp('bots/botssys/infile/unitinisout/output/040601.edi','bots/botssys/infile/unitinisout/output/040606.edi'))
self.failUnless(filecmp.cmp('bots/botssys/infile/unitinisout/output/040601.edi','bots/botssys/infile/unitinisout/output/040607.edi'))
self.failUnless(filecmp.cmp('bots/botssys/infile/unitinisout/output/040601.edi','bots/botssys/infile/unitinisout/output/040608.edi'))
class Testinisoutinh(unittest.TestCase):
def testinh01(self):
filenamein='botssys/infile/unitinisout/org/inisout01.inh'
filenameout='botssys/infile/unitinisout/output/inisout01.inh'
inn = inmessage.edifromfile(editype='fixed',messagetype='invoicfixed',filename=filenamein)
out = outmessage.outmessage_init(editype='fixed',messagetype='invoicfixed',filename=filenameout,divtext='',topartner='') #make outmessage object
out.root = inn.root
out.writeall()
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamein))
def testidoc01(self):
filenamein='botssys/infile/unitinisout/org/inisout01.idoc'
filenameout='botssys/infile/unitinisout/output/inisout01.idoc'
inn = inmessage.edifromfile(editype='idoc',messagetype='WP_PLU02',filename=filenamein)
out = outmessage.outmessage_init(editype='idoc',messagetype='WP_PLU02',filename=filenameout,divtext='',topartner='') #make outmessage object
out.root = inn.root
out.writeall()
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamein))
class Testinisoutx12(unittest.TestCase):
def testx12_01(self):
filenamein='botssys/infile/unitinisout/org/inisout01.x12'
filenameout='botssys/infile/unitinisout/output/inisout01.x12'
inn = inmessage.edifromfile(editype='x12',messagetype='850withenvelope',filename=filenamein)
out = outmessage.outmessage_init(editype='x12',messagetype='850withenvelope',filename=filenameout,divtext='',topartner='') #make outmessage object
out.root = inn.root
out.writeall()
linesfile1 = utilsunit.readfilelines('bots/' + filenamein)
linesfile2 = utilsunit.readfilelines('bots/' + filenameout)
self.assertEqual(linesfile1[0][:103],linesfile2[0][:103],'first part of ISA')
for line1,line2 in zip(linesfile1[1:],linesfile2[1:]):
self.assertEqual(line1,line2,'Cmplines')
def testx12_02(self):
filenamein='botssys/infile/unitinisout/org/inisout02.x12'
filenameout='botssys/infile/unitinisout/output/inisout02.x12'
inn = inmessage.edifromfile(editype='x12',messagetype='850withenvelope',filename=filenamein)
out = outmessage.outmessage_init(add_crlfafterrecord_sep='',editype='x12',messagetype='850withenvelope',filename=filenameout,divtext='',topartner='') #make outmessage object
out.root = inn.root
out.writeall()
linesfile1 = utilsunit.readfile('bots/' + filenamein)
linesfile2 = utilsunit.readfile('bots/' + filenameout)
self.assertEqual(linesfile1[:103],linesfile2[:103],'first part of ISA')
self.assertEqual(linesfile1[105:],linesfile2[103:],'rest of message')
class Testinisoutcsv(unittest.TestCase):
def testcsv001(self):
filenamein='botssys/infile/unitinisout/org/inisout01.csv'
filenameout='botssys/infile/unitinisout/output/inisout01.csv'
utilsunit.readwrite(editype='csv',messagetype='invoic',filenamein=filenamein,filenameout=filenameout)
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamein))
def testcsv003(self):
#utf-charset
filenamein='botssys/infile/unitinisout/org/inisout03.csv'
filenameout='botssys/infile/unitinisout/output/inisout03.csv'
utilsunit.readwrite(editype='csv',messagetype='invoic',filenamein=filenamein,filenameout=filenameout)
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamein))
#~ #utf-charset with BOM **error. BOM is not removed by python.
#~ #utilsunit.readwrite(editype='csv',
#~ # messagetype='invoic',
#~ # filenamein='botssys/infile/unitinisout/inisout04.csv',
#~ # filenameout='botssys/infile/unitinisout/output/inisout04.csv')
#~ #self.failUnless(filecmp.cmp('botssys/infile/unitinisout/output/inisout04.csv','botssys/infile/unitinisout/inisout04.csv'))
if __name__ == '__main__':
botsinit.generalinit('config')
#~ botslib.initbotscharsets()
botsinit.initenginelogging()
shutil.rmtree('bots/botssys/infile/unitinisout/output',ignore_errors=True) #remove whole output directory
os.mkdir('bots/botssys/infile/unitinisout/output')
unittest.main()
| [
[
1,
0,
0.0067,
0.0067,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0133,
0.0067,
0,
0.66,
0.0667,
88,
0,
1,
0,
0,
88,
0,
0
],
[
1,
0,
0.02,
0.0067,
0,
0.66... | [
"import os",
"import unittest",
"import shutil",
"import bots.inmessage as inmessage",
"import bots.outmessage as outmessage",
"import filecmp",
"try:\n import json as simplejson\nexcept ImportError:\n import simplejson",
" import json as simplejson",
" import simplejson",
"import bots... |
import unittest
import bots.botslib as botslib
import bots.botsinit as botsinit
import bots.inmessage as inmessage
import bots.outmessage as outmessage
from bots.botsconfig import *
import utilsunit
''' plugin unitformats '''
#python 2.6 treats -0 different. in outmessage this is adapted, for inmessage: python 2.6 does this correct
testdummy={MPATH:'dummy for tests'}
class TestFormatFieldVariableOutmessage(unittest.TestCase):
def setUp(self):
self.edi = outmessage.outmessage_init(messagetype='edifact',editype='edifact')
def test_out_formatfield_var_R(self):
self.edi.ta_info['lengthnumericbare']=True
self.edi.ta_info['decimaal']='.'
tfield1 = ['TEST1','M',3,'R',True,0, 0, 'R']
# length decimals minlength format
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), '','empty string')
self.assertEqual(self.edi._formatfield('1',tfield1,testdummy), '1', 'basic')
self.assertEqual(self.edi._formatfield(' 1',tfield1,testdummy), '1', 'basic')
self.assertEqual(self.edi._formatfield('1 ',tfield1,testdummy), '1', 'basic')
self.assertEqual(self.edi._formatfield('0',tfield1,testdummy), '0','zero stays zero')
self.assertEqual(self.edi._formatfield('-0',tfield1,testdummy), '-0','neg.zero stays neg.zero')
self.assertEqual(self.edi._formatfield('-0.00',tfield1,testdummy), '-0.00','')
self.assertEqual(self.edi._formatfield('-.12',tfield1,testdummy), '-0.12','no zero before dec,sign is OK')
self.assertEqual(self.edi._formatfield('123',tfield1,testdummy), '123','numeric field at max')
self.assertEqual(self.edi._formatfield('001',tfield1,testdummy), '1','leading zeroes are removed')
self.assertEqual(self.edi._formatfield('0.10',tfield1,testdummy), '0.10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('-1.23',tfield1,testdummy), '-1.23','numeric field at max with minus and decimal sign')
self.assertEqual(self.edi._formatfield('0001',tfield1,testdummy), '1','strips leading zeroes if possobel')
self.assertEqual(self.edi._formatfield('+123',tfield1,testdummy), '123','strips leading zeroes if possobel')
self.edi.ta_info['decimaal']=','
self.assertEqual(self.edi._formatfield('1.23',tfield1,testdummy), '1,23','other dec.sig, replace')
self.edi.ta_info['decimaal']='.'
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1234',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-1.234',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1<3',tfield1,testdummy) #wrong char
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1-3',tfield1,testdummy) #'-' in middel of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'123-',tfield1,testdummy) #'-' at end of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1,3',tfield1,testdummy) #',', where ',' is not traid sep.
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1+3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1E3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'13+',tfield1,testdummy) #'+' at end
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'0.100',tfield1,testdummy) #field too big
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'.',tfield1,testdummy) #no num
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-',tfield1,testdummy) #no num
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'.001',tfield1,testdummy) #bots adds 0 before dec, thus too big
#~ #test filling up to min length
tfield2 = ['TEST1', 'M', 8, 'R', True, 0, 5,'R']
self.assertEqual(self.edi._formatfield('12345',tfield2,testdummy), '12345','just large enough')
self.assertEqual(self.edi._formatfield('0.1000',tfield2,testdummy), '0.1000','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('00001',tfield2,testdummy), '00001','keep leading zeroes')
self.assertEqual(self.edi._formatfield('123',tfield2,testdummy), '00123','add leading zeroes')
self.assertEqual(self.edi._formatfield('.1',tfield2,testdummy), '0000.1','add leading zeroes')
#~ #test exp
self.assertEqual(self.edi._formatfield('12E+3',tfield2,testdummy), '12000','Exponent notation is possible')
self.assertEqual(self.edi._formatfield('12E3',tfield2,testdummy), '12000','Exponent notation is possible->to std notation')
self.assertEqual(self.edi._formatfield('12e+3',tfield2,testdummy), '12000','Exponent notation is possible; e->E')
self.assertEqual(self.edi._formatfield('12e3',tfield2,testdummy), '12000','Exponent notation is possible; e->E')
self.assertEqual(self.edi._formatfield('12345E+3',tfield2,testdummy), '12345000','do not count + and E')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'12345678E+3',tfield2,testdummy) #too big with exp
tfield3 = ['TEST1', 'M', 8, 'R', True, 3, 5,'R']
#~ print '\n>>>',self.edi._formatfield('12E-3',tfield3,testdummy)
#~ self.assertEqual(self.edi._formatfield('12E-3',tfield3,testdummy), '00.012','Exponent notation is possible')
#~ self.assertEqual(self.edi._formatfield('12e-3',tfield2,testdummy), '00.012','Exponent notation is possible; e->E')
#~ self.assertEqual(self.edi._formatfield('12345678E-3',tfield2,testdummy), '12345.678','do not count + and E')
#~ self.assertEqual(self.edi._formatfield('12345678E-7',tfield2,testdummy), '1.2345678','do not count + and E')
#~ self.assertEqual(self.edi._formatfield('123456E-7',tfield2,testdummy), '0.0123456','do not count + and E')
#~ self.assertEqual(self.edi._formatfield('1234567E-7',tfield2,testdummy), '0.1234567','do not count + and E')
#~ self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'12345678E-8',tfield2,testdummy) #gets 0.12345678, is too big
#~ self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'12345678E+3',tfield2,testdummy) #too big with exp
tfield4 = ['TEST1', 'M', 80, 'R', True, 3, 0,'R']
self.assertEqual(self.edi._formatfield('12345678901234560',tfield4,testdummy), '12345678901234560','lot of digits')
#test for lentgh checks if:
self.edi.ta_info['lengthnumericbare']=False
self.assertEqual(self.edi._formatfield('-1.45',tfield2,testdummy), '-1.45','just large enough')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-12345678',tfield2,testdummy) #field too large
def test_out_formatfield_var_N(self):
self.edi.ta_info['decimaal']='.'
self.edi.ta_info['lengthnumericbare']=True
tfield1 = ['TEST1','M',5,'N',True,2, 0, 'N']
# length decimals minlength format
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), '','empty string')
self.assertEqual(self.edi._formatfield('1',tfield1,testdummy), '1.00', 'basic')
self.assertEqual(self.edi._formatfield(' 1',tfield1,testdummy), '1.00', 'basic')
self.assertEqual(self.edi._formatfield('1 ',tfield1,testdummy), '1.00', 'basic')
self.assertEqual(self.edi._formatfield('0',tfield1,testdummy), '0.00','zero stays zero')
self.assertEqual(self.edi._formatfield('-0',tfield1,testdummy), '-0.00','neg.zero stays neg.zero')
self.assertEqual(self.edi._formatfield('-0.00',tfield1,testdummy), '-0.00','')
self.assertEqual(self.edi._formatfield('-0.001',tfield1,testdummy), '-0.00','')
self.assertEqual(self.edi._formatfield('-.12',tfield1,testdummy), '-0.12','no zero before dec,sign is OK')
self.assertEqual(self.edi._formatfield('123',tfield1,testdummy), '123.00','numeric field at max')
self.assertEqual(self.edi._formatfield('001',tfield1,testdummy), '1.00','leading zeroes are removed')
self.assertEqual(self.edi._formatfield('0.10',tfield1,testdummy), '0.10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('123.1049',tfield1,testdummy), '123.10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('-1.23',tfield1,testdummy), '-1.23','numeric field at max with minus and decimal sign')
self.assertEqual(self.edi._formatfield('0001',tfield1,testdummy), '1.00','strips leading zeroes if possobel')
self.assertEqual(self.edi._formatfield('+123',tfield1,testdummy), '123.00','strips leading zeroes if possobel')
self.edi.ta_info['decimaal']=','
self.assertEqual(self.edi._formatfield('1.23',tfield1,testdummy), '1,23','other dec.sig, replace')
self.edi.ta_info['decimaal']='.'
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1234',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-1234.56',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1<3',tfield1,testdummy) #wrong char
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1-3',tfield1,testdummy) #'-' in middel of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'123-',tfield1,testdummy) #'-' at end of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1,3',tfield1,testdummy) #',', where ',' is not traid sep.
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1+3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1E3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'13+',tfield1,testdummy) #'+' at end
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1234.100',tfield1,testdummy) #field too big
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'.',tfield1,testdummy) #no num
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-',tfield1,testdummy) #no num
# #test filling up to min length
tfield23 = ['TEST1', 'M', 8, 'N', True, 0, 5,'N']
#~ print self.edi._formatfield('12345.5',tfield23,testdummy)
self.assertEqual(self.edi._formatfield('12345.5',tfield23,testdummy), '12346','just large enough')
tfield2 = ['TEST1', 'M', 8, 'N', True, 2, 5,'N']
self.assertEqual(self.edi._formatfield('123.45',tfield2,testdummy), '123.45','just large enough')
self.assertEqual(self.edi._formatfield('123.4549',tfield2,testdummy), '123.45','just large enough')
#~ print self.edi._formatfield('123.455',tfield2,testdummy)
self.assertEqual(self.edi._formatfield('123.455',tfield2,testdummy), '123.46','just large enough')
self.assertEqual(self.edi._formatfield('0.1000',tfield2,testdummy), '000.10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('00001',tfield2,testdummy), '001.00','keep leading zeroes')
self.assertEqual(self.edi._formatfield('12',tfield2,testdummy), '012.00','add leading zeroes')
self.assertEqual(self.edi._formatfield('.1',tfield2,testdummy), '000.10','add leading zeroes')
#test exp; bots tries to convert to normal
self.assertEqual(self.edi._formatfield('178E+3',tfield2,testdummy), '178000.00','add leading zeroes')
self.assertEqual(self.edi._formatfield('-178E+3',tfield2,testdummy), '-178000.00','add leading zeroes')
self.assertEqual(self.edi._formatfield('-178e-3',tfield2,testdummy), '-000.18','add leading zeroes')
self.assertEqual(self.edi._formatfield('-178e-5',tfield2,testdummy), '-000.00','add leading zeroes')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'178E+4',tfield2,testdummy) #too big with exp
tfield4 = ['TEST1', 'M', 80, 'N', True, 3, 0,'N']
self.assertEqual(self.edi._formatfield('12345678901234560',tfield4,testdummy), '12345678901234560.000','lot of digits')
self.assertEqual(self.edi._formatfield('1234567890123456789012345',tfield4,testdummy), '1234567890123456789012345.000','lot of digits')
def test_out_formatfield_var_I(self):
self.edi.ta_info['lengthnumericbare']=True
self.edi.ta_info['decimaal']='.'
tfield1 = ['TEST1','M',5,'I',True,2, 0, 'I']
# length decimals minlength format
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), '','empty string')
self.assertEqual(self.edi._formatfield('1',tfield1,testdummy), '100', 'basic')
self.assertEqual(self.edi._formatfield(' 1',tfield1,testdummy), '100', 'basic')
self.assertEqual(self.edi._formatfield('1 ',tfield1,testdummy), '100', 'basic')
self.assertEqual(self.edi._formatfield('0',tfield1,testdummy), '0','zero stays zero')
self.assertEqual(self.edi._formatfield('-0',tfield1,testdummy), '-0','neg.zero stays neg.zero')
self.assertEqual(self.edi._formatfield('-0.00',tfield1,testdummy), '-0','')
self.assertEqual(self.edi._formatfield('-0.001',tfield1,testdummy), '-0','')
self.assertEqual(self.edi._formatfield('-.12',tfield1,testdummy), '-12','no zero before dec,sign is OK') #TODO: puts ) in front
self.assertEqual(self.edi._formatfield('123',tfield1,testdummy), '12300','numeric field at max')
self.assertEqual(self.edi._formatfield('001',tfield1,testdummy), '100','leading zeroes are removed')
self.assertEqual(self.edi._formatfield('0.10',tfield1,testdummy), '10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('123.1049',tfield1,testdummy), '12310','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('-1.23',tfield1,testdummy), '-123','numeric field at max with minus and decimal sign')
self.assertEqual(self.edi._formatfield('0001',tfield1,testdummy), '100','strips leading zeroes if possobel')
self.assertEqual(self.edi._formatfield('+123',tfield1,testdummy), '12300','strips leading zeroes if possobel')
self.edi.ta_info['decimaal']=','
self.assertEqual(self.edi._formatfield('1.23',tfield1,testdummy), '123','other dec.sig, replace')
self.edi.ta_info['decimaal']='.'
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1234',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-1234.56',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1<3',tfield1,testdummy) #wrong char
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1-3',tfield1,testdummy) #'-' in middel of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'123-',tfield1,testdummy) #'-' at end of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1,3',tfield1,testdummy) #',', where ',' is not traid sep.
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1+3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1E3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'13+',tfield1,testdummy) #'+' at end
self.assertEqual(self.edi._formatfield('+13',tfield1,testdummy), '1300','other dec.sig, replace')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1+3',tfield1,testdummy) #'+' in middle of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1234.100',tfield1,testdummy) #field too big
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'.',tfield1,testdummy) #no num
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-',tfield1,testdummy) #no num
#~ #test filling up to min length
tfield2 = ['TEST1', 'M', 8, 'I', True, 2, 5,'I']
self.assertEqual(self.edi._formatfield('123.45',tfield2,testdummy), '12345','just large enough')
self.assertEqual(self.edi._formatfield('123.4549',tfield2,testdummy), '12345','just large enough')
self.assertEqual(self.edi._formatfield('123.455',tfield2,testdummy), '12346','just large enough')
self.assertEqual(self.edi._formatfield('0.1000',tfield2,testdummy), '00010','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('00001',tfield2,testdummy), '00100','keep leading zeroes')
self.assertEqual(self.edi._formatfield('12',tfield2,testdummy), '01200','add leading zeroes')
self.assertEqual(self.edi._formatfield('.1',tfield2,testdummy), '00010','add leading zeroes')
#test exp; bots tries to convert to normal
self.assertEqual(self.edi._formatfield('178E+3',tfield2,testdummy), '17800000','add leading zeroes')
self.assertEqual(self.edi._formatfield('-178E+3',tfield2,testdummy), '-17800000','add leading zeroes')
self.assertEqual(self.edi._formatfield('-178e-3',tfield2,testdummy), '-00018','add leading zeroes')
self.assertEqual(self.edi._formatfield('-178e-5',tfield2,testdummy), '-00000','add leading zeroes')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'178E+4',tfield2,testdummy) #too big with exp
tfield4 = ['TEST1', 'M', 80, 'I', True, 3, 0,'I']
self.assertEqual(self.edi._formatfield('123456789012340',tfield4,testdummy), '123456789012340000','lot of digits')
def test_out_formatfield_var_D(self):
tfield1 = ['TEST1', 'M', 20, 'D', True, 0, 0,'D']
# length decimals minlength
self.assertEqual(self.edi._formatfield('20071001',tfield1,testdummy), '20071001','basic')
self.assertEqual(self.edi._formatfield('071001',tfield1,testdummy), '071001','basic')
self.assertEqual(self.edi._formatfield('99991001',tfield1,testdummy), '99991001','max year')
self.assertEqual(self.edi._formatfield('00011001',tfield1,testdummy), '00011001','min year')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'2007093112',tfield1,testdummy) #too long
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'20070931',tfield1,testdummy) #no valid date
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-0070931',tfield1,testdummy) #no valid date
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'70931',tfield1,testdummy) #too short
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'0931',tfield1,testdummy) #too short
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'0931BC',tfield1,testdummy) #alfanum
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'OOOOBC',tfield1,testdummy) #alfanum
def test_out_formatfield_var_T(self):
tfield1 = ['TEST1', 'M', 10, 'T', True, 0, 0,'T']
# length decimals minlength
self.assertEqual(self.edi._formatfield('2359',tfield1,testdummy), '2359','basic')
self.assertEqual(self.edi._formatfield('0000',tfield1,testdummy), '0000','basic')
self.assertEqual(self.edi._formatfield('000000',tfield1,testdummy), '000000','basic')
self.assertEqual(self.edi._formatfield('230000',tfield1,testdummy), '230000','basic')
self.assertEqual(self.edi._formatfield('235959',tfield1,testdummy), '235959','basic')
self.assertEqual(self.edi._formatfield('123456',tfield1,testdummy), '123456','basic')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'12345678',tfield1,testdummy) #no valid time
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'240001',tfield1,testdummy) #no valid time
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'126101',tfield1,testdummy) #no valid time
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'120062',tfield1,testdummy) #no valid time - python allows 61 secnds?
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'240000',tfield1,testdummy) #no valid time
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'250001',tfield1,testdummy) #no valid time
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-12000',tfield1,testdummy) #no valid time
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'120',tfield1,testdummy) #no valid time
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'0931233',tfield1,testdummy) #too short
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'11PM',tfield1,testdummy) #alfanum
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'TIME',tfield1,testdummy) #alfanum
tfield2 = ['TEST1', 'M', 4, 'T', True, 0, 4,'T']
# length decimals minlength
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'230001',tfield2,testdummy) #time too long
tfield3 = ['TEST1', 'M', 6, 'T', True, 0, 6,'T']
# length decimals minlength
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'2300',tfield3,testdummy) #time too short
def test_out_formatfield_var_A(self):
tfield1 = ['TEST1', 'M', 5, 'A', True, 0, 0,'A']
# length decimals minlength
self.assertEqual(self.edi._formatfield('abcde',tfield1,testdummy), 'abcde','basic')
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), '','basic')
self.assertEqual(self.edi._formatfield('a b',tfield1,testdummy), 'a b','basic')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'abcdef',tfield1,testdummy) #no valid date
tfield1 = ['TEST1', 'M', 5, 'A', True, 0, 2,'A']
# length decimals minlength
self.assertEqual(self.edi._formatfield('abcde',tfield1,testdummy), 'abcde','basic')
self.assertEqual(self.edi._formatfield('a b',tfield1,testdummy), 'a b','basic')
self.assertEqual(self.edi._formatfield('aa',tfield1,testdummy), 'aa','basic')
self.assertEqual(self.edi._formatfield('aaa',tfield1,testdummy), 'aaa','basic')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'a',tfield1,testdummy) #field too small
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,' ',tfield1,testdummy) #field too small
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'',tfield1,testdummy) #field too small
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'abcdef',tfield1,testdummy) #no valid date
class TestFormatFieldFixedOutmessage(unittest.TestCase):
def setUp(self):
self.edi = outmessage.outmessage_init(editype='fixed',messagetype='ordersfixed')
def test_out_formatfield_fixedR(self):
self.edi.ta_info['lengthnumericbare']=False
self.edi.ta_info['decimaal']='.'
tfield1 = ['TEST1','M',3,'R',True,0, 3, 'R']
# length decimals minlength format
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), '000','empty string')
self.assertEqual(self.edi._formatfield('1',tfield1,testdummy), '001', 'basic')
self.assertEqual(self.edi._formatfield(' 1',tfield1,testdummy), '001', 'basic')
self.assertEqual(self.edi._formatfield('1 ',tfield1,testdummy), '001', 'basic')
self.assertEqual(self.edi._formatfield('0',tfield1,testdummy), '000','zero stays zero')
self.assertEqual(self.edi._formatfield('-0',tfield1,testdummy), '-00','neg.zero stays neg.zero')
tfield3 = ['TEST1','M',5,'R',True,2, 3, 'R']
self.assertEqual(self.edi._formatfield('-0.00',tfield3,testdummy), '-0.00','')
self.assertEqual(self.edi._formatfield('0.10',tfield3,testdummy), '0.10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('123',tfield1,testdummy), '123','numeric field at max')
self.assertEqual(self.edi._formatfield('001',tfield1,testdummy), '001','leading zeroes are removed')
self.assertEqual(self.edi._formatfield('0001',tfield1,testdummy), '001','strips leading zeroes if possobel')
self.assertEqual(self.edi._formatfield('+123',tfield1,testdummy), '123','strips leading zeroes if possobel')
self.edi.ta_info['decimaal']=','
self.assertEqual(self.edi._formatfield('1.2',tfield1,testdummy), '1,2','other dec.sig, replace')
self.edi.ta_info['decimaal']='.'
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'.12',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-.12',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1234',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-1.234',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1<3',tfield1,testdummy) #wrong char
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1-3',tfield1,testdummy) #'-' in middel of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'123-',tfield1,testdummy) #'-' at end of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1,3',tfield1,testdummy) #',', where ',' is not traid sep.
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1+3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1E3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'13+',tfield1,testdummy) #'+' at end
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1+3',tfield1,testdummy) #'+' in middle of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'0.100',tfield1,testdummy) #field too big
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'.',tfield1,testdummy) #no num
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-',tfield1,testdummy) #no num
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'.001',tfield1,testdummy) #bots adds 0 before dec, thus too big
# #test filling up to min length
tfield2 = ['TEST1', 'M', 8, 'R', True, 0, 8,'R']
self.assertEqual(self.edi._formatfield('12345',tfield2,testdummy), '00012345','just large enough')
self.assertEqual(self.edi._formatfield('0.1000',tfield2,testdummy), '000.1000','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('00001',tfield2,testdummy), '00000001','keep leading zeroes')
self.assertEqual(self.edi._formatfield('123',tfield2,testdummy), '00000123','add leading zeroes')
self.assertEqual(self.edi._formatfield('.1',tfield2,testdummy), '000000.1','add leading zeroes')
self.assertEqual(self.edi._formatfield('-1.23',tfield2,testdummy), '-0001.23','numeric field at max with minus and decimal sign')
#test exp
self.assertEqual(self.edi._formatfield('12E+3',tfield2,testdummy), '00012000','Exponent notation is possible')
self.assertEqual(self.edi._formatfield('12E3',tfield2,testdummy), '00012000','Exponent notation is possible->to std notation')
self.assertEqual(self.edi._formatfield('12e+3',tfield2,testdummy), '00012000','Exponent notation is possible; e->E')
self.assertEqual(self.edi._formatfield('12e3',tfield2,testdummy), '00012000','Exponent notation is possible; e->E')
self.assertEqual(self.edi._formatfield('4567E+3',tfield2,testdummy), '04567000','do not count + and E')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'12345678E+3',tfield2,testdummy) #too big with exp
#~ #print '>>',self.edi._formatfield('12E-3',tfield2,testdummy)
#~ self.assertEqual(self.edi._formatfield('12E-3',tfield2,testdummy), '0000.012','Exponent notation is possible')
#~ self.assertEqual(self.edi._formatfield('12e-3',tfield2,testdummy), '0000.012','Exponent notation is possible; e->E')
#~ self.assertEqual(self.edi._formatfield('1234567E-3',tfield2,testdummy), '1234.567','do not count + and E')
#~ self.assertEqual(self.edi._formatfield('1234567E-6',tfield2,testdummy), '1.234567','do not count + and E')
#~ self.assertEqual(self.edi._formatfield('123456E-6',tfield2,testdummy), '0.123456','do not count + and E')
#~ self.assertEqual(self.edi._formatfield('-12345E-5',tfield2,testdummy), '-0.12345','do not count + and E')
#~ self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'12345678E-8',tfield2,testdummy) #gets 0.12345678, is too big
#~ self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'12345678E+3',tfield2,testdummy) #too big with exp
tfield4 = ['TEST1', 'M', 30, 'R', True, 3, 30,'R']
self.assertEqual(self.edi._formatfield('12345678901234560',tfield4,testdummy), '000000000000012345678901234560','lot of digits')
tfield5 = ['TEST1','M',4,'R',True,2, 4, 'R']
self.assertEqual(self.edi._formatfield('0.00',tfield5,testdummy), '0.00','lot of digits')
tfield6 = ['TEST1','M',5,'R',True,2, 5, 'R']
self.assertEqual(self.edi._formatfield('12.45',tfield6,testdummy), '12.45','lot of digits')
def test_out_formatfield_fixedRL(self):
self.edi.ta_info['lengthnumericbare']=False
self.edi.ta_info['decimaal']='.'
tfield1 = ['TEST1','M',3,'RL',True,0, 3, 'R']
# length decimals minlength format
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), '0 ','empty string')
self.assertEqual(self.edi._formatfield('1',tfield1,testdummy), '1 ', 'basic')
self.assertEqual(self.edi._formatfield(' 1',tfield1,testdummy), '1 ', 'basic')
self.assertEqual(self.edi._formatfield('1 ',tfield1,testdummy), '1 ', 'basic')
self.assertEqual(self.edi._formatfield('0',tfield1,testdummy), '0 ','zero stays zero')
self.assertEqual(self.edi._formatfield('-0',tfield1,testdummy), '-0 ','neg.zero stays neg.zero')
tfield3 = ['TEST1','M',5,'RL',True,2, 3, 'R']
self.assertEqual(self.edi._formatfield('-0.00',tfield3,testdummy), '-0.00','')
self.assertEqual(self.edi._formatfield('0.10',tfield3,testdummy), '0.10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('123',tfield1,testdummy), '123','numeric field at max')
self.assertEqual(self.edi._formatfield('001',tfield1,testdummy), '1 ','leading zeroes are removed')
self.assertEqual(self.edi._formatfield('0001',tfield1,testdummy), '1 ','strips leading zeroes if possobel')
self.assertEqual(self.edi._formatfield('+123',tfield1,testdummy), '123','strips leading zeroes if possobel')
self.edi.ta_info['decimaal']=','
self.assertEqual(self.edi._formatfield('1.2',tfield1,testdummy), '1,2','other dec.sig, replace')
self.edi.ta_info['decimaal']='.'
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'.12',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-.12',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1234',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-1.234',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1<3',tfield1,testdummy) #wrong char
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1-3',tfield1,testdummy) #'-' in middel of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'123-',tfield1,testdummy) #'-' at end of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1,3',tfield1,testdummy) #',', where ',' is not traid sep.
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1+3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1E3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'13+',tfield1,testdummy) #'+' at end
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1+3',tfield1,testdummy) #'+' in middle of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'0.100',tfield1,testdummy) #field too big
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'.',tfield1,testdummy) #no num
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-',tfield1,testdummy) #no num
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'.001',tfield1,testdummy) #bots adds 0 before dec, thus too big
# #test filling up to min length
tfield2 = ['TEST1', 'M', 8, 'RL', True, 0, 8,'R']
self.assertEqual(self.edi._formatfield('12345',tfield2,testdummy), '12345 ','just large enough')
self.assertEqual(self.edi._formatfield('0.1000',tfield2,testdummy), '0.1000 ','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('00001',tfield2,testdummy), '1 ','keep leading zeroes')
self.assertEqual(self.edi._formatfield('123',tfield2,testdummy), '123 ','add leading zeroes')
self.assertEqual(self.edi._formatfield('.1',tfield2,testdummy), '0.1 ','add leading zeroes')
self.assertEqual(self.edi._formatfield('-1.23',tfield2,testdummy), '-1.23 ','numeric field at max with minus and decimal sign')
#test exp
self.assertEqual(self.edi._formatfield('12E+3',tfield2,testdummy), '12000 ','Exponent notation is possible')
self.assertEqual(self.edi._formatfield('12E3',tfield2,testdummy), '12000 ','Exponent notation is possible->to std notation')
self.assertEqual(self.edi._formatfield('12e+3',tfield2,testdummy), '12000 ','Exponent notation is possible; e->E')
self.assertEqual(self.edi._formatfield('12e3',tfield2,testdummy), '12000 ','Exponent notation is possible; e->E')
self.assertEqual(self.edi._formatfield('4567E+3',tfield2,testdummy), '4567000 ','do not count + and E')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'12345678E+3',tfield2,testdummy) #too big with exp
#~ #print '>>',self.edi._formatfield('12E-3',tfield2,testdummy)
#~ self.assertEqual(self.edi._formatfield('12E-3',tfield2,testdummy), '0000.012','Exponent notation is possible')
#~ self.assertEqual(self.edi._formatfield('12e-3',tfield2,testdummy), '0000.012','Exponent notation is possible; e->E')
#~ self.assertEqual(self.edi._formatfield('1234567E-3',tfield2,testdummy), '1234.567','do not count + and E')
#~ self.assertEqual(self.edi._formatfield('1234567E-6',tfield2,testdummy), '1.234567','do not count + and E')
#~ self.assertEqual(self.edi._formatfield('123456E-6',tfield2,testdummy), '0.123456','do not count + and E')
#~ self.assertEqual(self.edi._formatfield('-12345E-5',tfield2,testdummy), '-0.12345','do not count + and E')
#~ self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'12345678E-8',tfield2,testdummy) #gets 0.12345678, is too big
#~ self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'12345678E+3',tfield2,testdummy) #too big with exp
tfield4 = ['TEST1', 'M', 30, 'RL', True, 3, 30,'R']
self.assertEqual(self.edi._formatfield('12345678901234560',tfield4,testdummy), '12345678901234560 ','lot of digits')
tfield5 = ['TEST1','M',4,'RL',True,2, 4, 'N']
self.assertEqual(self.edi._formatfield('0.00',tfield5,testdummy), '0.00','lot of digits')
tfield6 = ['TEST1','M',5,'RL',True,2, 5, 'N']
self.assertEqual(self.edi._formatfield('12.45',tfield6,testdummy), '12.45','lot of digits')
def test_out_formatfield_fixedRR(self):
self.edi.ta_info['lengthnumericbare']=False
self.edi.ta_info['decimaal']='.'
tfield1 = ['TEST1','M',3,'RR',True,0, 3, 'R']
# length decimals minlength format
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), ' 0','empty string')
self.assertEqual(self.edi._formatfield('1',tfield1,testdummy), ' 1', 'basic')
self.assertEqual(self.edi._formatfield(' 1',tfield1,testdummy), ' 1', 'basic')
self.assertEqual(self.edi._formatfield('1 ',tfield1,testdummy), ' 1', 'basic')
self.assertEqual(self.edi._formatfield('0',tfield1,testdummy), ' 0','zero stays zero')
self.assertEqual(self.edi._formatfield('-0',tfield1,testdummy), ' -0','neg.zero stays neg.zero')
tfield3 = ['TEST1','M',5,'RR',True,2, 3, 'R']
self.assertEqual(self.edi._formatfield('-0.00',tfield3,testdummy), '-0.00','')
self.assertEqual(self.edi._formatfield('0.10',tfield3,testdummy), '0.10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('123',tfield1,testdummy), '123','numeric field at max')
self.assertEqual(self.edi._formatfield('001',tfield1,testdummy), ' 1','leading zeroes are removed')
self.assertEqual(self.edi._formatfield('0001',tfield1,testdummy), ' 1','strips leading zeroes if possobel')
self.assertEqual(self.edi._formatfield('+123',tfield1,testdummy), '123','strips leading zeroes if possobel')
self.edi.ta_info['decimaal']=','
self.assertEqual(self.edi._formatfield('1.2',tfield1,testdummy), '1,2','other dec.sig, replace')
self.edi.ta_info['decimaal']='.'
self.assertRaises(botslib.OutMessageError,self.edi._formatfield, '.12',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield, '-.12',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield, '1234',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield, '-1.234',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield, '1<3',tfield1,testdummy) #wrong char
self.assertRaises(botslib.OutMessageError,self.edi._formatfield, '1-3',tfield1,testdummy) #'-' in middel of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield, '123-',tfield1,testdummy) #'-' at end of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield, '1,3',tfield1,testdummy) #',', where ',' is not traid sep.
self.assertRaises(botslib.OutMessageError,self.edi._formatfield, '1+3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield, '1E3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield, '13+',tfield1,testdummy) #'+' at end
self.assertRaises(botslib.OutMessageError,self.edi._formatfield, '1+3',tfield1,testdummy) #'+' in middle of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield, '0.100',tfield1,testdummy) #field too big
self.assertRaises(botslib.OutMessageError,self.edi._formatfield, '.',tfield1,testdummy) #no num
self.assertRaises(botslib.OutMessageError,self.edi._formatfield, '-',tfield1,testdummy) #no num
self.assertRaises(botslib.OutMessageError,self.edi._formatfield, '.001',tfield1,testdummy) #bots adds 0 before dec, thus too big
# #test filling up to min length
tfield2 = ['TEST1', 'M', 8, 'RR', True, 0, 8,'R']
self.assertEqual(self.edi._formatfield('12345',tfield2,testdummy), ' 12345','just large enough')
self.assertEqual(self.edi._formatfield('0.1000',tfield2,testdummy), ' 0.1000','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('00001',tfield2,testdummy), ' 1','keep leading zeroes')
self.assertEqual(self.edi._formatfield('123',tfield2,testdummy), ' 123','add leading zeroes')
self.assertEqual(self.edi._formatfield('.1',tfield2,testdummy), ' 0.1','add leading zeroes')
self.assertEqual(self.edi._formatfield('-1.23',tfield2,testdummy), ' -1.23','numeric field at max with minus and decimal sign')
#test exp
self.assertEqual(self.edi._formatfield('12E+3',tfield2,testdummy), ' 12000','Exponent notation is possible')
self.assertEqual(self.edi._formatfield('12E3',tfield2,testdummy), ' 12000','Exponent notation is possible->to std notation')
self.assertEqual(self.edi._formatfield('12e+3',tfield2,testdummy), ' 12000','Exponent notation is possible; e->E')
self.assertEqual(self.edi._formatfield('12e3',tfield2,testdummy), ' 12000','Exponent notation is possible; e->E')
self.assertEqual(self.edi._formatfield('4567E+3',tfield2,testdummy), ' 4567000','do not count + and E')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'12345678E+3',tfield2,testdummy) #too big with exp
#~ #print '>>',self.edi._formatfield('12E-3',tfield2,testdummy)
#~ self.assertEqual(self.edi._formatfield('12E-3',tfield2,testdummy), '0000.012','Exponent notation is possible')
#~ self.assertEqual(self.edi._formatfield('12e-3',tfield2,testdummy), '0000.012','Exponent notation is possible; e->E')
#~ self.assertEqual(self.edi._formatfield('1234567E-3',tfield2,testdummy), '1234.567','do not count + and E')
#~ self.assertEqual(self.edi._formatfield('1234567E-6',tfield2,testdummy), '1.234567','do not count + and E')
#~ self.assertEqual(self.edi._formatfield('123456E-6',tfield2,testdummy), '0.123456','do not count + and E')
#~ self.assertEqual(self.edi._formatfield('-12345E-5',tfield2,testdummy), '-0.12345','do not count + and E')
#~ self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'12345678E-8',tfield2,testdummy) #gets 0.12345678, is too big
#~ self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'12345678E+3',tfield2,testdummy) #too big with exp
tfield4 = ['TEST1', 'M', 30, 'RR', True, 3, 30,'R']
self.assertEqual(self.edi._formatfield('12345678901234560',tfield4,testdummy), ' 12345678901234560','lot of digits')
tfield5 = ['TEST1','M',4,'RR',True,2, 4, 'N']
self.assertEqual(self.edi._formatfield('0.00',tfield5,testdummy), '0.00','lot of digits')
tfield6 = ['TEST1','M',5,'RR',True,2, 5, 'N']
self.assertEqual(self.edi._formatfield('12.45',tfield6,testdummy), '12.45','lot of digits')
def test_out_formatfield_fixedN(self):
self.edi.ta_info['decimaal']='.'
self.edi.ta_info['lengthnumericbare']=False
tfield1 = ['TEST1','M',5,'N',True,2, 5, 'N']
# length decimals minlength format
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), '00.00','empty string')
self.assertEqual(self.edi._formatfield('1',tfield1,testdummy), '01.00', 'basic')
self.assertEqual(self.edi._formatfield(' 1',tfield1,testdummy), '01.00', 'basic')
self.assertEqual(self.edi._formatfield('1 ',tfield1,testdummy), '01.00', 'basic')
self.assertEqual(self.edi._formatfield('0',tfield1,testdummy), '00.00','zero stays zero')
self.assertEqual(self.edi._formatfield('-0',tfield1,testdummy), '-0.00','neg.zero stays neg.zero')
self.assertEqual(self.edi._formatfield('-0.00',tfield1,testdummy), '-0.00','')
self.assertEqual(self.edi._formatfield('-0.001',tfield1,testdummy), '-0.00','')
self.assertEqual(self.edi._formatfield('-.12',tfield1,testdummy), '-0.12','no zero before dec,sign is OK')
self.assertEqual(self.edi._formatfield('001',tfield1,testdummy), '01.00','leading zeroes are removed')
self.assertEqual(self.edi._formatfield('0.10',tfield1,testdummy), '00.10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('12.1049',tfield1,testdummy), '12.10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('-1.23',tfield1,testdummy), '-1.23','numeric field at max with minus and decimal sign')
self.assertEqual(self.edi._formatfield('0001',tfield1,testdummy), '01.00','strips leading zeroes if possobel')
self.assertEqual(self.edi._formatfield('+13',tfield1,testdummy), '13.00','strips leading zeroes if possobel')
self.edi.ta_info['decimaal']=','
self.assertEqual(self.edi._formatfield('1.23',tfield1,testdummy), '01,23','other dec.sig, replace')
self.edi.ta_info['decimaal']='.'
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'123.1049',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'123',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1234',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-1234.56',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1<3',tfield1,testdummy) #wrong char
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1-3',tfield1,testdummy) #'-' in middel of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'123-',tfield1,testdummy) #'-' at end of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1,3',tfield1,testdummy) #',', where ',' is not traid sep.
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1+3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1E3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'13+',tfield1,testdummy) #'+' at end
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1234.100',tfield1,testdummy) #field too big
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'.',tfield1,testdummy) #no num
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-',tfield1,testdummy) #no num
# #test filling up to min length
tfield2 = ['TEST1', 'M', 8, 'N', True, 2, 8,'N']
self.assertEqual(self.edi._formatfield('123.45',tfield2,testdummy), '00123.45','just large enough')
self.assertEqual(self.edi._formatfield('123.4549',tfield2,testdummy), '00123.45','just large enough')
self.assertEqual(self.edi._formatfield('123.455',tfield2,testdummy), '00123.46','just large enough')
self.assertEqual(self.edi._formatfield('0.1000',tfield2,testdummy), '00000.10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('00001',tfield2,testdummy), '00001.00','keep leading zeroes')
self.assertEqual(self.edi._formatfield('12',tfield2,testdummy), '00012.00','add leading zeroes')
self.assertEqual(self.edi._formatfield('.1',tfield2,testdummy), '00000.10','add leading zeroes')
#test exp; bots tries to convert to normal
self.assertEqual(self.edi._formatfield('78E+3',tfield2,testdummy), '78000.00','add leading zeroes')
self.assertEqual(self.edi._formatfield('-8E+3',tfield2,testdummy), '-8000.00','add leading zeroes')
self.assertEqual(self.edi._formatfield('-178e-3',tfield2,testdummy), '-0000.18','add leading zeroes')
self.assertEqual(self.edi._formatfield('-178e-5',tfield2,testdummy), '-0000.00','add leading zeroes')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'178E+4',tfield2,testdummy) #too big with exp
tfield4 = ['TEST1', 'M', 30, 'N', True, 3, 30,'N']
self.assertEqual(self.edi._formatfield('1234567890123456',tfield4,testdummy), '00000000001234567890123456.000','lot of digits')
#test N format, zero decimals
tfield7 = ['TEST1', 'M', 5, 'N', True, 0, 5, 'N']
# length decimals minlength format
self.assertEqual(self.edi._formatfield('12345',tfield7,testdummy), '12345','')
self.assertEqual(self.edi._formatfield('1.234',tfield7,testdummy), '00001','')
self.assertEqual(self.edi._formatfield('123.4',tfield7,testdummy), '00123','')
self.assertEqual(self.edi._formatfield('0.0',tfield7,testdummy), '00000','')
def test_out_formatfield_fixedNL(self):
self.edi.ta_info['decimaal']='.'
self.edi.ta_info['lengthnumericbare']=False
tfield1 = ['TEST1','M',5,'NL',True,2, 5, 'N']
# length decimals minlength format
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), '0.00 ','empty string')
self.assertEqual(self.edi._formatfield('1',tfield1,testdummy), '1.00 ', 'basic')
self.assertEqual(self.edi._formatfield(' 1',tfield1,testdummy), '1.00 ', 'basic')
self.assertEqual(self.edi._formatfield('1 ',tfield1,testdummy), '1.00 ', 'basic')
self.assertEqual(self.edi._formatfield('0',tfield1,testdummy), '0.00 ','zero stays zero')
self.assertEqual(self.edi._formatfield('-0',tfield1,testdummy), '-0.00','neg.zero stays neg.zero')
self.assertEqual(self.edi._formatfield('-0.00',tfield1,testdummy), '-0.00','')
self.assertEqual(self.edi._formatfield('-0.001',tfield1,testdummy), '-0.00','')
self.assertEqual(self.edi._formatfield('-.12',tfield1,testdummy), '-0.12','no zero before dec,sign is OK')
self.assertEqual(self.edi._formatfield('001',tfield1,testdummy), '1.00 ','leading zeroes are removed')
self.assertEqual(self.edi._formatfield('0.10',tfield1,testdummy), '0.10 ','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('12.1049',tfield1,testdummy), '12.10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('-1.23',tfield1,testdummy), '-1.23','numeric field at max with minus and decimal sign')
self.assertEqual(self.edi._formatfield('0001',tfield1,testdummy), '1.00 ','strips leading zeroes if possobel')
self.assertEqual(self.edi._formatfield('+13',tfield1,testdummy), '13.00','strips leading zeroes if possobel')
self.edi.ta_info['decimaal']=','
self.assertEqual(self.edi._formatfield('1.23',tfield1,testdummy), '1,23 ','other dec.sig, replace')
self.edi.ta_info['decimaal']='.'
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'123.1049',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'123',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1234',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-1234.56',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1<3',tfield1,testdummy) #wrong char
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1-3',tfield1,testdummy) #'-' in middel of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'123-',tfield1,testdummy) #'-' at end of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1,3',tfield1,testdummy) #',', where ',' is not traid sep.
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1+3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1E3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'13+',tfield1,testdummy) #'+' at end
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1234.100',tfield1,testdummy) #field too big
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'.',tfield1,testdummy) #no num
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-',tfield1,testdummy) #no num
# #test filling up to min length
tfield2 = ['TEST1', 'M', 8, 'NL', True, 2, 8,'N']
self.assertEqual(self.edi._formatfield('123.45',tfield2,testdummy), '123.45 ','just large enough')
self.assertEqual(self.edi._formatfield('123.4549',tfield2,testdummy), '123.45 ','just large enough')
self.assertEqual(self.edi._formatfield('123.455',tfield2,testdummy), '123.46 ','just large enough')
self.assertEqual(self.edi._formatfield('0.1000',tfield2,testdummy), '0.10 ','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('00001',tfield2,testdummy), '1.00 ','keep leading zeroes')
self.assertEqual(self.edi._formatfield('12',tfield2,testdummy), '12.00 ','add leading zeroes')
self.assertEqual(self.edi._formatfield('.1',tfield2,testdummy), '0.10 ','add leading zeroes')
#test exp; bots tries to convert to normal
self.assertEqual(self.edi._formatfield('78E+3',tfield2,testdummy), '78000.00','add leading zeroes')
self.assertEqual(self.edi._formatfield('-8E+3',tfield2,testdummy), '-8000.00','add leading zeroes')
self.assertEqual(self.edi._formatfield('-178e-3',tfield2,testdummy), '-0.18 ','add leading zeroes')
self.assertEqual(self.edi._formatfield('-178e-5',tfield2,testdummy), '-0.00 ','add leading zeroes')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'178E+4',tfield2,testdummy) #too big with exp
tfield4 = ['TEST1', 'M', 30, 'NL', True, 3, 30,'N']
self.assertEqual(self.edi._formatfield('1234567890123456',tfield4,testdummy), '1234567890123456.000 ','lot of digits')
#test N format, zero decimals
tfield7 = ['TEST1', 'M', 5, 'NL', True, 0, 5, 'N']
# length decimals minlength format
self.assertEqual(self.edi._formatfield('12345',tfield7,testdummy), '12345','')
self.assertEqual(self.edi._formatfield('1.234',tfield7,testdummy), '1 ','')
self.assertEqual(self.edi._formatfield('123.4',tfield7,testdummy), '123 ','')
self.assertEqual(self.edi._formatfield('0.0',tfield7,testdummy), '0 ','')
def test_out_formatfield_fixedNR(self):
self.edi.ta_info['decimaal']='.'
self.edi.ta_info['lengthnumericbare']=False
tfield1 = ['TEST1','M',5,'NR',True,2, 5, 'N']
# length decimals minlength format
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), ' 0.00','empty string')
self.assertEqual(self.edi._formatfield('1',tfield1,testdummy), ' 1.00', 'basic')
self.assertEqual(self.edi._formatfield(' 1',tfield1,testdummy), ' 1.00', 'basic')
self.assertEqual(self.edi._formatfield('1 ',tfield1,testdummy), ' 1.00', 'basic')
self.assertEqual(self.edi._formatfield('0',tfield1,testdummy), ' 0.00','zero stays zero')
self.assertEqual(self.edi._formatfield('-0',tfield1,testdummy), '-0.00','neg.zero stays neg.zero')
self.assertEqual(self.edi._formatfield('-0.00',tfield1,testdummy), '-0.00','')
self.assertEqual(self.edi._formatfield('-0.001',tfield1,testdummy), '-0.00','')
self.assertEqual(self.edi._formatfield('-.12',tfield1,testdummy), '-0.12','no zero before dec,sign is OK')
self.assertEqual(self.edi._formatfield('001',tfield1,testdummy), ' 1.00','leading zeroes are removed')
self.assertEqual(self.edi._formatfield('0.10',tfield1,testdummy), ' 0.10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('12.1049',tfield1,testdummy), '12.10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('-1.23',tfield1,testdummy), '-1.23','numeric field at max with minus and decimal sign')
self.assertEqual(self.edi._formatfield('0001',tfield1,testdummy), ' 1.00','strips leading zeroes if possobel')
self.assertEqual(self.edi._formatfield('+13',tfield1,testdummy), '13.00','strips leading zeroes if possobel')
self.edi.ta_info['decimaal']=','
self.assertEqual(self.edi._formatfield('1.23',tfield1,testdummy), ' 1,23','other dec.sig, replace')
self.edi.ta_info['decimaal']='.'
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'123.1049',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'123',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1234',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-1234.56',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1<3',tfield1,testdummy) #wrong char
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1-3',tfield1,testdummy) #'-' in middel of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'123-',tfield1,testdummy) #'-' at end of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1,3',tfield1,testdummy) #',', where ',' is not traid sep.
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1+3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1E3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'13+',tfield1,testdummy) #'+' at end
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1234.100',tfield1,testdummy) #field too big
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'.',tfield1,testdummy) #no num
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-',tfield1,testdummy) #no num
# #test filling up to min length
tfield2 = ['TEST1', 'M', 8, 'NR', True, 2, 8,'N']
self.assertEqual(self.edi._formatfield('123.45',tfield2,testdummy), ' 123.45','just large enough')
self.assertEqual(self.edi._formatfield('123.4549',tfield2,testdummy), ' 123.45','just large enough')
self.assertEqual(self.edi._formatfield('123.455',tfield2,testdummy), ' 123.46','just large enough')
self.assertEqual(self.edi._formatfield('0.1000',tfield2,testdummy), ' 0.10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('00001',tfield2,testdummy), ' 1.00','keep leading zeroes')
self.assertEqual(self.edi._formatfield('12',tfield2,testdummy), ' 12.00','add leading zeroes')
self.assertEqual(self.edi._formatfield('.1',tfield2,testdummy), ' 0.10','add leading zeroes')
#test exp; bots tries to convert to normal
self.assertEqual(self.edi._formatfield('78E+3',tfield2,testdummy), '78000.00','add leading zeroes')
self.assertEqual(self.edi._formatfield('-8E+3',tfield2,testdummy), '-8000.00','add leading zeroes')
self.assertEqual(self.edi._formatfield('-178e-3',tfield2,testdummy), ' -0.18','add leading zeroes')
self.assertEqual(self.edi._formatfield('-178e-5',tfield2,testdummy), ' -0.00','add leading zeroes')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'178E+4',tfield2,testdummy) #too big with exp
tfield4 = ['TEST1', 'M', 30, 'NR', True, 3, 30,'N']
self.assertEqual(self.edi._formatfield('1234567890123456',tfield4,testdummy), ' 1234567890123456.000','lot of digits')
#test N format, zero decimals
tfield7 = ['TEST1', 'M', 5, 'NR', True, 0, 5, 'N']
# length decimals minlength format
self.assertEqual(self.edi._formatfield('12345',tfield7,testdummy), '12345','')
self.assertEqual(self.edi._formatfield('1.234',tfield7,testdummy), ' 1','')
self.assertEqual(self.edi._formatfield('123.4',tfield7,testdummy), ' 123','')
self.assertEqual(self.edi._formatfield('0.0',tfield7,testdummy), ' 0','')
def test_out_formatfield_fixedI(self):
self.edi.ta_info['lengthnumericbare']=False
self.edi.ta_info['decimaal']='.'
tfield1 = ['TEST1','M',5,'I',True,2, 5, 'I']
# length decimals minlength format
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), '00000','empty string is initialised as 00000')
self.assertEqual(self.edi._formatfield('1',tfield1,testdummy), '00100', 'basic')
self.assertEqual(self.edi._formatfield(' 1',tfield1,testdummy), '00100', 'basic')
self.assertEqual(self.edi._formatfield('1 ',tfield1,testdummy), '00100', 'basic')
self.assertEqual(self.edi._formatfield('0',tfield1,testdummy), '00000','zero stays zero')
self.assertEqual(self.edi._formatfield('-0',tfield1,testdummy), '-0000','neg.zero stays neg.zero')
self.assertEqual(self.edi._formatfield('-0.00',tfield1,testdummy), '-0000','')
self.assertEqual(self.edi._formatfield('-0.001',tfield1,testdummy), '-0000','')
self.assertEqual(self.edi._formatfield('-.12',tfield1,testdummy), '-0012','no zero before dec,sign is OK') #TODO: puts ) in front
self.assertEqual(self.edi._formatfield('123',tfield1,testdummy), '12300','numeric field at max')
self.assertEqual(self.edi._formatfield('001',tfield1,testdummy), '00100','leading zeroes are removed')
self.assertEqual(self.edi._formatfield('0.10',tfield1,testdummy), '00010','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('123.1049',tfield1,testdummy), '12310','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('-1.23',tfield1,testdummy), '-0123','numeric field at max with minus and decimal sign')
self.assertEqual(self.edi._formatfield('0001',tfield1,testdummy), '00100','strips leading zeroes if possobel')
self.assertEqual(self.edi._formatfield('+123',tfield1,testdummy), '12300','strips leading zeroes if possobel')
self.edi.ta_info['decimaal']=','
self.assertEqual(self.edi._formatfield('1.23',tfield1,testdummy), '00123','other dec.sig, replace')
self.edi.ta_info['decimaal']='.'
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1234',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-1234.56',tfield1,testdummy) #field too large
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1<3',tfield1,testdummy) #wrong char
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1-3',tfield1,testdummy) #'-' in middel of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'123-',tfield1,testdummy) #'-' at end of number
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1,3',tfield1,testdummy) #',', where ',' is not traid sep.
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1+3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1E3',tfield1,testdummy) #'+' in middle of number (no exp)
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'13+',tfield1,testdummy) #'+' at end
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1234.100',tfield1,testdummy) #field too big
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'.',tfield1,testdummy) #no num
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-',tfield1,testdummy) #no num
#~ #test filling up to min length
tfield2 = ['TEST1', 'M', 8, 'I', True, 2, 8,'I']
self.assertEqual(self.edi._formatfield('123.45',tfield2,testdummy), '00012345','just large enough')
self.assertEqual(self.edi._formatfield('123.4549',tfield2,testdummy), '00012345','just large enough')
self.assertEqual(self.edi._formatfield('123.455',tfield2,testdummy), '00012346','just large enough')
self.assertEqual(self.edi._formatfield('0.1000',tfield2,testdummy), '00000010','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('00001',tfield2,testdummy), '00000100','keep leading zeroes')
self.assertEqual(self.edi._formatfield('12',tfield2,testdummy), '00001200','add leading zeroes')
self.assertEqual(self.edi._formatfield('.1',tfield2,testdummy), '00000010','add leading zeroes')
#test exp; bots tries to convert to normal
self.assertEqual(self.edi._formatfield('178E+3',tfield2,testdummy), '17800000','add leading zeroes')
self.assertEqual(self.edi._formatfield('-17E+3',tfield2,testdummy), '-1700000','add leading zeroes')
self.assertEqual(self.edi._formatfield('-178e-3',tfield2,testdummy), '-0000018','add leading zeroes')
self.assertEqual(self.edi._formatfield('-178e-5',tfield2,testdummy), '-0000000','add leading zeroes')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'178E+4',tfield2,testdummy) #too big with exp
tfield4 = ['TEST1', 'M', 80, 'I', True, 3, 0,'I']
self.assertEqual(self.edi._formatfield('123456789012340',tfield4,testdummy), '123456789012340000','lot of digits')
def test_out_formatfield_fixedD(self):
tfield1 = ['TEST1', 'M', 8, 'D', True, 0, 8,'D']
# length decimals minlength
self.assertEqual(self.edi._formatfield('20071001',tfield1,testdummy), '20071001','basic')
self.assertEqual(self.edi._formatfield('99991001',tfield1,testdummy), '99991001','max year')
self.assertEqual(self.edi._formatfield('00011001',tfield1,testdummy), '00011001','min year')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'2007093112',tfield1,testdummy) #too long
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'20070931',tfield1,testdummy) #no valid date
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-0070931',tfield1,testdummy) #no valid date
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'70931',tfield1,testdummy) #too short
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'0931',tfield1,testdummy) #too short
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'0931BC',tfield1,testdummy) #alfanum
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'OOOOBC',tfield1,testdummy) #alfanum
tfield2 = ['TEST1', 'M', 6, 'D', True, 0, 6,'D']
# length decimals minlength
self.assertEqual(self.edi._formatfield('071001',tfield2,testdummy), '071001','basic')
def test_out_formatfield_fixedT(self):
tfield1 = ['TEST1', 'M', 4, 'T', True, 0, 4,'T']
# length decimals minlength
self.assertEqual(self.edi._formatfield('2359',tfield1,testdummy), '2359','basic')
self.assertEqual(self.edi._formatfield('0000',tfield1,testdummy), '0000','basic')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'2401',tfield1,testdummy) #no valid date
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1261',tfield1,testdummy) #no valid date
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1262',tfield1,testdummy) #no valid date - python allows 61 secnds?
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'2400',tfield1,testdummy) #no valid date
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'2501',tfield1,testdummy) #no valid date
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-1200',tfield1,testdummy) #no valid date
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'120',tfield1,testdummy) #too short
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'093123',tfield1,testdummy) #too long
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'11PM',tfield1,testdummy) #alfanum
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'TIME',tfield1,testdummy) #alfanum
tfield2 = ['TEST1', 'M', 6, 'T', True, 0, 6,'T']
# length decimals minlength
self.assertEqual(self.edi._formatfield('000000',tfield2,testdummy), '000000','basic')
self.assertEqual(self.edi._formatfield('230000',tfield2,testdummy), '230000','basic')
self.assertEqual(self.edi._formatfield('235959',tfield2,testdummy), '235959','basic')
self.assertEqual(self.edi._formatfield('123456',tfield2,testdummy), '123456','basic')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'240001',tfield2,testdummy) #no valid date
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'126101',tfield2,testdummy) #no valid date
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'120062',tfield2,testdummy) #no valid date - python allows 61 secnds?
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'240000',tfield2,testdummy) #no valid date
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'250001',tfield2,testdummy) #no valid date
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'-12000',tfield2,testdummy) #no valid date
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'120',tfield2,testdummy) #too short
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'0931233',tfield2,testdummy) #too short
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'1100PM',tfield2,testdummy) #alfanum
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'11TIME',tfield2,testdummy) #alfanum
def test_out_formatfield_fixedA(self):
tfield1 = ['TEST1', 'M', 5, 'A', True, 0, 5,'A']
# length decimals minlength
self.assertEqual(self.edi._formatfield('abcde',tfield1,testdummy), 'abcde','basic')
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), ' ','basic')
self.assertEqual(self.edi._formatfield('ab ',tfield1,testdummy), 'ab ','basic')
self.assertEqual(self.edi._formatfield('a b',tfield1,testdummy), 'a b ','basic')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'abcdef',tfield1,testdummy) #no valid date
tfield1 = ['TEST1', 'M', 5, 'A', True, 0, 5,'A']
# length decimals minlength
self.assertEqual(self.edi._formatfield('abcde',tfield1,testdummy), 'abcde','basic')
self.assertEqual(self.edi._formatfield('ab ',tfield1,testdummy), 'ab ','basic')
self.assertEqual(self.edi._formatfield('a b',tfield1,testdummy), 'a b ','basic')
self.assertEqual(self.edi._formatfield('a',tfield1,testdummy), 'a ','basic')
self.assertEqual(self.edi._formatfield(' ',tfield1,testdummy), ' ','basic')
self.assertEqual(self.edi._formatfield(' ',tfield1,testdummy), ' ','basic')
self.assertEqual(self.edi._formatfield(' ',tfield1,testdummy), ' ','basic')
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), ' ','basic')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'abcdef',tfield1,testdummy) #no valid date
def test_out_formatfield_fixedAR(self):
tfield1 = ['TEST1', 'M', 5, 'AR', True, 0, 5,'A']
# length decimals minlength
self.assertEqual(self.edi._formatfield('abcde',tfield1,testdummy), 'abcde','basic')
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), ' ','basic')
self.assertEqual(self.edi._formatfield('ab ',tfield1,testdummy), ' ab ','basic')
self.assertEqual(self.edi._formatfield('a b',tfield1,testdummy), ' a b','basic')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'abcdef',tfield1,testdummy) #no valid date
tfield1 = ['TEST1', 'M', 5, 'AR', True, 0, 5,'A']
# length decimals minlength
self.assertEqual(self.edi._formatfield('abcde',tfield1,testdummy), 'abcde','basic')
self.assertEqual(self.edi._formatfield('ab ',tfield1,testdummy), ' ab ','basic')
self.assertEqual(self.edi._formatfield('a b',tfield1,testdummy), ' a b','basic')
self.assertEqual(self.edi._formatfield('a',tfield1,testdummy), ' a','basic')
self.assertEqual(self.edi._formatfield(' ',tfield1,testdummy), ' ','basic')
self.assertEqual(self.edi._formatfield(' ',tfield1,testdummy), ' ','basic')
self.assertEqual(self.edi._formatfield(' ',tfield1,testdummy), ' ','basic')
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), ' ','basic')
self.assertRaises(botslib.OutMessageError,self.edi._formatfield,'abcdef',tfield1,testdummy) #no valid date
class TestFormatFieldInmessage(unittest.TestCase):
#both var and fixed fields are tested. Is not much difference (white-box testing)
def setUp(self):
#need to have a inmessage-object for tests. Read is a edifile and a grammar.
self.edi = inmessage.edifromfile(frompartner='',
topartner='',
filename='botssys/infile/unitformats/formats01.edi',
messagetype='edifact',
testindicator='0',
editype='edifact',
charset='UNOA',
alt='')
def testformatfieldR(self):
self.edi.ta_info['lengthnumericbare']=True
tfield1 = ['TEST1','M',3,'N',True,0, 0, 'R']
# length decimals minlength format
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), '0', 'empty numeric string is accepted, is zero')
self.assertEqual(self.edi._formatfield('1',tfield1,testdummy), '1', 'basic')
self.assertEqual(self.edi._formatfield('0',tfield1,testdummy), '0','zero stays zero')
self.assertEqual(self.edi._formatfield('-0',tfield1,testdummy), '-0','neg.zero stays neg.zero')
self.assertEqual(self.edi._formatfield('-0.00',tfield1,testdummy), '-0.00','')
self.assertEqual(self.edi._formatfield('-.12',tfield1,testdummy), '-0.12','no zero before dec,sign is OK')
self.assertEqual(self.edi._formatfield('123',tfield1,testdummy), '123','numeric field at max')
self.assertEqual(self.edi._formatfield('001',tfield1,testdummy), '1','leading zeroes are removed')
self.assertEqual(self.edi._formatfield('0.10',tfield1,testdummy), '0.10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('-1.23',tfield1,testdummy), '-1.23','numeric field at max with minus and decimal sign')
self.edi.ta_info['decimaal']=','
self.assertEqual(self.edi._formatfield('1,23-',tfield1,testdummy), '-1.23','other dec.sig, replace')
self.edi.ta_info['decimaal']='.'
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'1234',tfield1,testdummy) #field too large
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'0001',tfield1,testdummy) #leading zeroes; field too large
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'-1.234',tfield1,testdummy) #field too large
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'1<3',tfield1,testdummy) #wrong char
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'1-3',tfield1,testdummy) #'-' in middel of number
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'1,3',tfield1,testdummy) #',', where ',' is not traid sep.
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'1+3',tfield1,testdummy) #'+' in middle of number
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'13+',tfield1,testdummy) #'+' in middle of number
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'0.100',tfield1,testdummy) #field too big
#test field to short
tfield2 = ['TEST1', 'M', 8, 'N', True, 0, 5,'R']
self.assertEqual(self.edi._formatfield('12345',tfield2,testdummy), '12345','just large enough')
self.assertEqual(self.edi._formatfield('0.1000',tfield2,testdummy), '0.1000','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('00001',tfield2,testdummy), '1','remove leading zeroes')
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'1235',tfield2,testdummy) #field too short
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'-12.34',tfield2,testdummy) #field too short
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'-',tfield2,testdummy) #field too short
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'.',tfield2,testdummy) #field too short
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'-.',tfield2,testdummy) #field too short
#WARN: dubious tests. This is Bots filosophy: be flexible in input, be right in output.
self.assertEqual(self.edi._formatfield('123-',tfield1,testdummy), '-123','numeric field minus at end')
self.assertEqual(self.edi._formatfield('.001',tfield1,testdummy), '0.001','if no zero before dec.sign, length>max.length')
self.assertEqual(self.edi._formatfield('+13',tfield1,testdummy), '13','plus is allowed') #WARN: if plus used, plus is countd in length!!
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'12E+3',tfield2,testdummy) #field too large
tfield4 = ['TEST1', 'M', 8, 'N', True, 3, 0,'R']
self.assertEqual(self.edi._formatfield('123.4561',tfield4,testdummy), '123.4561','no checking to many digits incoming') #should round here?
tfield4 = ['TEST1', 'M', 80, 'N', True, 3, 0,'R']
self.assertEqual(self.edi._formatfield('12345678901234560',tfield4,testdummy), '12345678901234560','lot of digits')
self.edi.ta_info['lengthnumericbare']=False
self.assertEqual(self.edi._formatfield('-1.45',tfield2,testdummy), '-1.45','just large enough')
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'-12345678',tfield2,testdummy) #field too large
def testformatfieldN(self):
self.edi.ta_info['lengthnumericbare']=True
tfield1 = ['TEST1', 'M', 3, 'R', True, 2, 0,'N']
# length decimals minlength
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'',tfield1,testdummy) #empty string
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'1',tfield1,testdummy) #empty string
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'0',tfield1,testdummy) #empty string
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'-0',tfield1,testdummy) #empty string
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'01.00',tfield1,testdummy) #empty string
self.assertEqual(self.edi._formatfield('1.00',tfield1,testdummy), '1.00', 'basic')
self.assertEqual(self.edi._formatfield('0.00',tfield1,testdummy), '0.00','zero stays zero')
self.assertEqual(self.edi._formatfield('-0.00',tfield1,testdummy), '-0.00','neg.zero stays neg.zero')
self.assertEqual(self.edi._formatfield('-.12',tfield1,testdummy), '-0.12','no zero before dec,sign is OK')
self.assertEqual(self.edi._formatfield('1.23',tfield1,testdummy), '1.23','numeric field at max')
self.assertEqual(self.edi._formatfield('0.10',tfield1,testdummy), '0.10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('-1.23',tfield1,testdummy), '-1.23','numeric field at max with minus and decimal sign')
self.edi.ta_info['decimaal']=','
self.assertEqual(self.edi._formatfield('1,23-',tfield1,testdummy), '-1.23','other dec.sig, replace')
self.edi.ta_info['decimaal']='.'
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'1234',tfield1,testdummy) #field too large
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'0001',tfield1,testdummy) #leading zeroes; field too large
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'-1.234',tfield1,testdummy) #field too large
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'1<3',tfield1,testdummy) #wrong char
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'1-3',tfield1,testdummy) #'-' in middel of number
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'1,3',tfield1,testdummy) #',', where ',' is not traid sep.
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'1+3',tfield1,testdummy) #'+' in middle of number
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'13+',tfield1,testdummy) #'+' in middle of number
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'0.100',tfield1,testdummy) #field too big
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'12E+3',tfield1,testdummy) #no exp
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'.',tfield1,testdummy) #no exp
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'-',tfield1,testdummy) #no exp
#test field to short
tfield2 = ['TEST1', 'M', 8, 'R', True, 4, 5,'N']
self.assertEqual(self.edi._formatfield('1.2345',tfield2,testdummy), '1.2345','just large enough')
self.assertEqual(self.edi._formatfield('0.1000',tfield2,testdummy), '0.1000','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('001.1234',tfield2,testdummy), '1.1234','remove leading zeroes')
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'1235',tfield2,testdummy) #field too short
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'-12.34',tfield2,testdummy) #field too short
#WARN: dubious tests. This is Bots filosophy: be flexible in input, be right in output.
self.assertEqual(self.edi._formatfield('1234.1234-',tfield2,testdummy), '-1234.1234','numeric field - minus at end')
self.assertEqual(self.edi._formatfield('.01',tfield1,testdummy), '0.01','if no zero before dec.sign, length>max.length')
self.assertEqual(self.edi._formatfield('+13.1234',tfield2,testdummy), '13.1234','plus is allowed') #WARN: if plus used, plus is counted in length!!
tfield3 = ['TEST1', 'M', 18, 'R', True, 0, 0,'N']
tfield4 = ['TEST1', 'M', 8, 'R', True, 3, 0,'N']
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'123.4561',tfield4,testdummy) #to many digits
def testformatfieldI(self):
self.edi.ta_info['lengthnumericbare']=True
tfield1 = ['TEST1', 'M', 5, 'I', True, 2, 0,'I']
# length decimals minlength
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), '0.00', 'empty numeric is accepted, is zero')
self.assertEqual(self.edi._formatfield('123',tfield1,testdummy), '1.23', 'basic')
self.assertEqual(self.edi._formatfield('1',tfield1,testdummy), '0.01', 'basic')
self.assertEqual(self.edi._formatfield('0',tfield1,testdummy), '0.00','zero stays zero')
self.assertEqual(self.edi._formatfield('-0',tfield1,testdummy), '-0.00','neg.zero stays neg.zero')
self.assertEqual(self.edi._formatfield('-000',tfield1,testdummy), '-0.00','')
self.assertEqual(self.edi._formatfield('-12',tfield1,testdummy), '-0.12','no zero before dec,sign is OK')
self.assertEqual(self.edi._formatfield('12345',tfield1,testdummy), '123.45','numeric field at max')
self.assertEqual(self.edi._formatfield('00001',tfield1,testdummy), '0.01','leading zeroes are removed')
self.assertEqual(self.edi._formatfield('010',tfield1,testdummy), '0.10','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('-99123',tfield1,testdummy), '-991.23','numeric field at max with minus and decimal sign')
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'123456',tfield1,testdummy) #field too large
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'000100',tfield1,testdummy) #field too large
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'000001',tfield1,testdummy) #leading zeroes; field too large
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'12<3',tfield1,testdummy) #wrong char
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'12-3',tfield1,testdummy) #'-' in middel of number
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'12,3',tfield1,testdummy) #','.
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'12+3',tfield1,testdummy) #'+' in middle of number
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'123+',tfield1,testdummy) #'+'
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'12E+3',tfield1,testdummy) #'+'
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'-',tfield1,testdummy) #only -
#~ #test field to short
tfield2 = ['TEST1', 'M', 8, 'I', True, 2, 5,'I']
self.assertEqual(self.edi._formatfield('12345',tfield2,testdummy), '123.45','just large enough')
self.assertEqual(self.edi._formatfield('10000',tfield2,testdummy), '100.00','keep zeroes after last dec.digit')
self.assertEqual(self.edi._formatfield('00100',tfield2,testdummy), '1.00','remove leading zeroes')
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'1235',tfield2,testdummy) #field too short
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'-1234',tfield2,testdummy) #field too short
tfield3 = ['TEST1', 'M', 18, 'I', True, 0, 0,'I']
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'12E+3',tfield3,testdummy) #no exponent
#~ #WARN: dubious tests. This is Bots filosophy: be flexible in input, be right in output.
self.assertEqual(self.edi._formatfield('123-',tfield1,testdummy), '-1.23','numeric field minus at end')
self.assertEqual(self.edi._formatfield('+13',tfield1,testdummy), '0.13','plus is allowed') #WARN: if plus used, plus is countd in length!!
def testformatfieldD(self):
tfield1 = ['TEST1', 'M', 20, 'D', True, 0, 0,'D']
# length decimals minlength
self.assertEqual(self.edi._formatfield('20071001',tfield1,testdummy), '20071001','basic')
self.assertEqual(self.edi._formatfield('071001',tfield1,testdummy), '071001','basic')
self.assertEqual(self.edi._formatfield('99991001',tfield1,testdummy), '99991001','max year')
self.assertEqual(self.edi._formatfield('00011001',tfield1,testdummy), '00011001','min year')
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'2007093112',tfield1,testdummy) #too long
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'20070931',tfield1,testdummy) #no valid date
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'-0070931',tfield1,testdummy) #no valid date
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'70931',tfield1,testdummy) #too short
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'0931',tfield1,testdummy) #too short
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'0931BC',tfield1,testdummy) #alfanum
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'OOOOBC',tfield1,testdummy) #alfanum
def testformatfieldT(self):
tfield1 = ['TEST1', 'M', 10, 'T', True, 0, 0,'T']
# length decimals minlength
self.assertEqual(self.edi._formatfield('2359',tfield1,testdummy), '2359','basic')
self.assertEqual(self.edi._formatfield('0000',tfield1,testdummy), '0000','basic')
self.assertEqual(self.edi._formatfield('000000',tfield1,testdummy), '000000','basic')
self.assertEqual(self.edi._formatfield('230000',tfield1,testdummy), '230000','basic')
self.assertEqual(self.edi._formatfield('235959',tfield1,testdummy), '235959','basic')
self.assertEqual(self.edi._formatfield('123456',tfield1,testdummy), '123456','basic')
self.assertEqual(self.edi._formatfield('0931233',tfield1,testdummy), '0931233','basic')
self.assertEqual(self.edi._formatfield('09312334',tfield1,testdummy), '09312334','basic')
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'240001',tfield1,testdummy) #no valid date
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'126101',tfield1,testdummy) #no valid date
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'120062',tfield1,testdummy) #no valid date - python allows 61 secnds?
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'240000',tfield1,testdummy) #no valid date
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'250001',tfield1,testdummy) #no valid date
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'-12000',tfield1,testdummy) #no valid date
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'120',tfield1,testdummy) #too short
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'11PM',tfield1,testdummy) #alfanum
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'TIME',tfield1,testdummy) #alfanum
def testformatfieldA(self):
tfield1 = ['TEST1', 'M', 5, 'T', True, 0, 0,'A']
# length decimals minlength
self.assertEqual(self.edi._formatfield('abcde',tfield1,testdummy), 'abcde','basic')
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), '','basic')
self.assertEqual(self.edi._formatfield('',tfield1,testdummy), '','basic')
self.assertEqual(self.edi._formatfield(' ab',tfield1,testdummy), 'ab','basic')
self.assertEqual(self.edi._formatfield('ab ',tfield1,testdummy), 'ab','basic')
self.assertEqual(self.edi._formatfield(' ab',tfield1,testdummy), 'ab','basic')
self.assertEqual(self.edi._formatfield('ab ',tfield1,testdummy), 'ab','basic')
self.assertEqual(self.edi._formatfield('a b',tfield1,testdummy), 'a b','basic')
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'abcdef',tfield1,testdummy) #no valid date
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'ab ',tfield1,testdummy) #no valid date
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,' ab',tfield1,testdummy) #no valid date - python allows 61 secnds?
tfield1 = ['TEST1', 'M', 5, 'T', True, 0, 2,'A']
# length decimals minlength
self.assertEqual(self.edi._formatfield('abcde',tfield1,testdummy), 'abcde','basic')
self.assertEqual(self.edi._formatfield(' ab',tfield1,testdummy), 'ab','basic')
self.assertEqual(self.edi._formatfield('ab ',tfield1,testdummy), 'ab','basic')
self.assertEqual(self.edi._formatfield(' ab',tfield1,testdummy), 'ab','basic')
self.assertEqual(self.edi._formatfield('ab ',tfield1,testdummy), 'ab','basic')
self.assertEqual(self.edi._formatfield('a b',tfield1,testdummy), 'a b','basic')
self.assertEqual(self.edi._formatfield(' ',tfield1,testdummy), '','basic')
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'a',tfield1,testdummy) #no valid date
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'abcdef',tfield1,testdummy) #no valid date
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'ab ',tfield1,testdummy) #no valid date
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,' ab',tfield1,testdummy) #no valid date - python allows 61 secnds?
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,' ',tfield1,testdummy) #no valid date - python allows 61 secnds?
self.assertRaises(botslib.InMessageFieldError,self.edi._formatfield,'',tfield1,testdummy) #no valid date - python allows 61 secnds?
def testEdifact0402(self):
# old format test are run
self.assertRaises(botslib.InMessageFieldError,inmessage.edifromfile,editype='edifact',
messagetype='edifact',filename='botssys/infile/unitformats/040201F.edi')
self.assertRaises(botslib.InMessageFieldError,inmessage.edifromfile,editype='edifact',
messagetype='edifact',filename='botssys/infile/unitformats/040202F.edi')
self.assertRaises(botslib.InMessageFieldError,inmessage.edifromfile,editype='edifact',
messagetype='edifact',filename='botssys/infile/unitformats/040203F.edi')
self.assertRaises(botslib.InMessageFieldError,inmessage.edifromfile,editype='edifact',
messagetype='edifact',filename='botssys/infile/unitformats/040204F.edi')
self.assertRaises(botslib.InMessageFieldError,inmessage.edifromfile,editype='edifact',
messagetype='edifact',filename='botssys/infile/unitformats/040205F.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',
messagetype='edifact',filename='botssys/infile/unitformats/040206F.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',
messagetype='edifact',filename='botssys/infile/unitformats/040207F.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',
messagetype='edifact',filename='botssys/infile/unitformats/040208F.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',
messagetype='edifact',filename='botssys/infile/unitformats/040209F.edi')
self.assertRaises(botslib.InMessageFieldError,inmessage.edifromfile,editype='edifact',
messagetype='edifact',filename='botssys/infile/unitformats/040210F.edi')
self.assertRaises(botslib.InMessageFieldError,inmessage.edifromfile,editype='edifact',
messagetype='edifact',filename='botssys/infile/unitformats/040211F.edi')
self.assertRaises(botslib.InMessageFieldError,inmessage.edifromfile,editype='edifact',
messagetype='edifact',filename='botssys/infile/unitformats/040212F.edi')
self.failUnless(inmessage.edifromfile(editype='edifact',
messagetype='edifact',filename='botssys/infile/unitformats/040214T.edi'), 'standaard test')
self.assertRaises(botslib.InMessageFieldError,inmessage.edifromfile,editype='edifact',
messagetype='edifact',filename='botssys/infile/unitformats/040215F.edi')
self.assertRaises(botslib.InMessageFieldError,inmessage.edifromfile,editype='edifact',
messagetype='edifact',filename='botssys/infile/unitformats/040217F.edi')
self.assertRaises(botslib.InMessageFieldError,inmessage.edifromfile,editype='edifact',
messagetype='edifact',filename='botssys/infile/unitformats/040218F.edi')
self.assertRaises(botslib.InMessageFieldError,inmessage.edifromfile,editype='edifact',
messagetype='edifact',filename='botssys/infile/unitformats/040219F.edi')
if __name__ == '__main__':
botsinit.generalinit('config')
#~ botslib.initbotscharsets()
botsinit.initenginelogging()
unittest.main()
| [
[
1,
0,
0.001,
0.001,
0,
0.66,
0,
88,
0,
1,
0,
0,
88,
0,
0
],
[
1,
0,
0.0019,
0.001,
0,
0.66,
0.0833,
816,
0,
1,
0,
0,
816,
0,
0
],
[
1,
0,
0.0029,
0.001,
0,
0.66,
... | [
"import unittest",
"import bots.botslib as botslib",
"import bots.botsinit as botsinit",
"import bots.inmessage as inmessage",
"import bots.outmessage as outmessage",
"from bots.botsconfig import *",
"import utilsunit",
"''' plugin unitformats '''",
"testdummy={MPATH:'dummy for tests'}",
"class Te... |
import copy
import os
import glob
import bots.inmessage as inmessage
import bots.outmessage as outmessage
def comparenode(node1,node2org):
node2 = copy.deepcopy(node2org)
if node1.record is not None and node2.record is None:
print 'node2 is "None"'
return False
if node1.record is None and node2.record is not None:
print 'node1 is "None"'
return False
return comparenodecore(node1,node2)
def comparenodecore(node1,node2):
if node1.record is None and node2.record is None:
pass
else:
for key,value in node1.record.items():
if key not in node2.record:
print 'key not in node2', key,value
return False
elif node2.record[key]!=value:
print 'unequal attr', key,value,node2.record[key]
return False
for key,value in node2.record.items():
if key not in node1.record:
print 'key not in node1', key,value
return False
elif node1.record[key]!=value:
print 'unequal attr', key,value,node1.record[key]
return False
if len(node1.children) != len(node2.children):
print 'number of children not equal'
return False
for child1 in node1.children:
for i,child2 in enumerate(node2.children):
if child1.record['BOTSID'] == child2.record['BOTSID']:
if comparenodecore(child1,child2) != True:
return False
del node2.children[i:i+1]
break
else:
print 'Found no matching record in node2 for',child1.record
return False
return True
def readfilelines(bestand):
fp = open(bestand,'rU')
terug=fp.readlines()
fp.close()
return terug
def readfile(bestand):
fp = open(bestand,'rU')
terug=fp.read()
fp.close()
return terug
def readwrite(filenamein='',filenameout='',**args):
inn = inmessage.edifromfile(filename=filenamein,**args)
out = outmessage.outmessage_init(filename=filenameout,divtext='',topartner='',**args) #make outmessage object
out.root = inn.root
out.writeall()
def getdirbysize(path):
''' read fils in directory path, return as a sorted list.'''
lijst = getdir(path)
lijst.sort(key=lambda s: os.path.getsize(s))
return lijst
def getdir(path):
''' read files in directory path, return incl length.'''
return [s for s in glob.glob(path) if os.path.isfile(s)]
| [
[
1,
0,
0.0123,
0.0123,
0,
0.66,
0,
739,
0,
1,
0,
0,
739,
0,
0
],
[
1,
0,
0.0247,
0.0123,
0,
0.66,
0.0909,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.037,
0.0123,
0,
0... | [
"import copy",
"import os",
"import glob",
"import bots.inmessage as inmessage",
"import bots.outmessage as outmessage",
"def comparenode(node1,node2org):\n node2 = copy.deepcopy(node2org)\n if node1.record is not None and node2.record is None:\n print('node2 is \"None\"')\n return Fal... |
#constants/definitions for Bots
#to be used as from bots.config import *
#for statust in ta:
OPEN = 0 #Bots always closes transaction. OPEN is severe error
ERROR = 1 #error in transaction.
OK = 2 #successfull, result is 'save'. But processing has stopped: next step with error, or no next steps defined
DONE = 3 #successfull, and result is picked up by next step
#for status in ta:
PROCESS = 1
DISCARD= 3
EXTERNIN = 200 #transaction is OK; file is exported; out of reach
RAWIN = 210 #the file as received, unprocessed; eg mail is in email-format (headers, body, attachments)
MIMEIN = 215 #mime is checked and read; mime-info (sender, receiver) is in db-ta
FILEIN = 220 #received edifile; ready for further use
SET_FOR_PROCESSING = 230
TRANSLATE = 300 #file to be translated
PARSED = 310 #the edifile is lexed and parsed
SPLITUP = 320 #the edimessages in the PARSED edifile have been split up
TRANSLATED = 330 #edimessage is result of translation
MERGED = 400 #is enveloped
FILEOUT = 500 #edifile ready to be 'send' (just the edi-file)
RAWOUT = 510 #file in send format eg email format (including headers, body, attachemnts)
EXTERNOUT = 520 #transaction is complete; file is exported; out of reach
#grammar.structure: keys in grammarrecords
ID = 0
MIN = 1
MAX = 2
COUNT = 3
LEVEL = 4
MPATH = 5
FIELDS = 6
QUERIES = 7
SUBTRANSLATION = 8
BOTSIDnr = 9
#grammar.recorddefs: dict keys for fields of record er: record[FIELDS][ID] == 'C124.0034'
#already definedID = 0
MANDATORY = 1
LENGTH = 2
SUBFIELDS = 2 #for composites
FORMAT = 3 #format in grammar file
ISFIELD = 4
DECIMALS = 5
MINLENGTH = 6
BFORMAT = 7 #internal bots format; formats in grammar are convertd to bformat
#modules inmessage, outmessage; record in self.records; ex:
#already defined ID = 0
VALUE = 1
POS = 2
LIN = 3
SFIELD = 4 #boolean: True: is subfield, False: field or first element composite
#already defined MPATH = 5 #only for first field (=recordID)
FIXEDLINE = 6 #for fixed records; tmp storage of fixed record
FORMATFROMGRAMMAR = 7 #to store FORMAT field has in grammar
| [
[
14,
0,
0.0833,
0.0167,
0,
0.66,
0,
484,
1,
0,
0,
0,
0,
1,
0
],
[
14,
0,
0.1,
0.0167,
0,
0.66,
0.0238,
88,
1,
0,
0,
0,
0,
1,
0
],
[
14,
0,
0.1167,
0.0167,
0,
0.66,... | [
"OPEN = 0 #Bots always closes transaction. OPEN is severe error",
"ERROR = 1 #error in transaction.",
"OK = 2 #successfull, result is 'save'. But processing has stopped: next step with error, or no next steps defined",
"DONE = 3 #successfull, and result is picked up by next step",
"PROCESS =... |
"""
sef2bots.py
Command line params: sourcefile.sef targetfile.py
Optional command line params: -seq, -struct
Converts a SEF grammar into a Bots grammar. If targetfile exists (and is writeable),
it will be overwritten.
If -seq is specified, field names in record definitions will be
sequential (TAG01, TAG02, ..., where TAG is the record tag) instead of
the 'normal' field names.
If -struct is specified, only the Bots grammar variable 'structure' will be
constructed, i.e. the 'recorddefs' variable will be left out.
Parses the .SETS, .SEGS, .COMS and .ELMS sections. Any other sections are ignored.
(Mostly) assumes correct SEF syntax. May well break on some syntax errors.
If there are multiple .SETS sections, only the last one is processed. If there are
multiple message definitions in the (last) .SETS section, only the last one is
processed.
If there are multiple definitions of a segment, only the first one is taken
into account.
If there are multiple definitions of a field, only the last one is taken into
account.
Skips ^ and ignores .!$-&*@ in segment/field refs.
Also ignores syntax rules and dependency notes.
Changes seg max of '>1' to 99999 and elm maxlength to 99999 if 0 or > 99999
If you don't like that, change the 'constants' MAXMAX and/or MAXLEN below
"""
MAXMAX = 99999 # for dealing with segs/groups with max '>1'
MAXLEN = 99999 # for overly large elm maxlengths
TAB = ' '
import sys
import os
import copy
import atexit
import traceback
def showusage(scriptname):
print "Usage: python %s [-seq] [-nostruct] [-norecords] sourcefile targetfile" % scriptname
print " Convert SEF grammar in <sourcefile> into Bots grammar in <targetfile>."
print " Option -seq : use sequential numbered fields in record definitions instead of field names/ID's."
print " Option -nostruct : the 'structure' will not be written."
print " Option -norecords : the 'records' will not be written."
print
sys.exit(0)
class SEFError(Exception):
pass
class StructComp(object):
""" For components of the Bots grammar variable 'structure' """
def __init__(self, tag, min, max, sub = None):
self.id = tag
self.min = min
self.max = max
if sub:
self.sub = sub
else:
self.sub = []
def tostring(self, tablevel = 0):
s = tablevel*TAB + "{ID: '%s', MIN: %d, MAX: %d" % (self.id, self.min, self.max)
if self.sub:
s += ", LEVEL: [\n" \
+ ",\n".join([subcomp.tostring(tablevel + 1) for subcomp in self.sub]) + "," \
+ "\n" + tablevel * TAB + "]"
s += "}"
return s
class RecDef(object):
""" For records/segments; these end up in the Bots grammar variable 'recorddefs' """
def __init__(self, tag, sub = None):
self.id = tag
if sub:
self.sub = sub
else:
self.sub = []
def tostring(self, useseq = False, tablevel = 0):
return tablevel*TAB + \
"'%s': [\n"%(self.id) + \
"\n".join([c.tostring(useseq, tablevel+1) for c in self.sub]) +\
"\n" + \
tablevel*TAB + "],"
class FieldDef(object):
""" For composite and non-composite fields """
def __init__(self, tag, req = 'C', minlen = '', maxlen = '', type = 'AN', sub = None, freq = 1, seq = None):
self.id = tag
self.req = req
self.minlen = minlen
self.maxlen = maxlen
self.type = type
self.sub = sub
if not sub:
self.sub = []
self.freq = freq
self.seq = seq
def tostring(self, useseq = False, tablevel = 0):
if not useseq:
fldname = self.id
else:
fldname = self.seq
if not self.sub:
if self.minlen.strip() == '1':
return tablevel * TAB + "['%s', '%s', %s, '%s']" %\
(fldname, self.req, self.maxlen, self.type) + ","
else:
return tablevel * TAB + "['%s', '%s', (%s, %s), '%s']" %\
(fldname, self.req, self.minlen, self.maxlen, self.type) + ","
else:
return tablevel * TAB + "['%s', '%s', [\n" % (fldname, self.req) \
+ "\n".join([field.tostring(useseq, tablevel + 1) for field in self.sub]) \
+ "\n" + tablevel * TAB + "]],"
def split2(line, seps):
"""
Split <line> on whichever character in <seps> occurs in <line> first.
Return pair (line_up_to_first_sep_found, first_sep_found_plus_rest_of_line)
If none of <seps> occurs, return pair ('', <line>)
"""
i = 0
length = len(line)
while i < length and line[i] not in seps:
i += 1
if i == length:
return '', line
return line[:i], line[i:]
def do_set(line):
"""
Reads the (current) .SETS section and converts it into a Bots grammar 'structure'.
Returns the *contents* of the structure, as a string.
"""
definition = line.split('=')
line = definition[1].lstrip('^')
comps = readcomps(line)
tree = comps[0]
tree.sub = comps[1:]
return tree.tostring()
def readcomps(line):
""" Reads all components from a .SETS line, and returns them in a nested list """
comps = []
while line:
comp, line = readcomp(line)
comps.append(comp)
#~ displaystructure(comps)
return comps
def displaystructure(comps,tablevel=0):
for i in comps:
print tablevel*TAB, i.id,i.min,i.max
if i.sub:
displaystructure(i.sub,tablevel+1)
def readcomp(line):
"""
Reads a component, which can be either a segment or a segment group.
Returns pair (component, rest_of_line)
"""
discard, line = split2(line, "[{")
if not line:
return None, ''
if line[0] == '[':
return readseg(line)
if line[0] == '{':
return readgroup(line)
raise SEFError("readcomp() - unexpected character at start of: %s" % line)
def readseg(line):
""" Reads a single segment. Returns pair (segment, rest_of_line) """
discard, line = line.split('[', 1)
segstr, line = line.split(']', 1)
components = segstr.split(',')
num = len(components)
maxstr = ''
if num == 3:
tag, req, maxstr = components
elif num == 2:
tag, req = components
elif num == 1:
tag, req = components[0], 'C'
if req == 'M':
min = 1
else:
min = 0
if tag[0] in ".!$-&":
tag = tag[1:]
if '*' in tag:
tag = tag.split('*')[0]
if '@' in tag:
tag = tag.split('@')[0]
if tag.upper() == 'LS':
print "LS segment found"
if not maxstr:
max = 1
elif maxstr == '>1':
max = MAXMAX
print "Changed max for seg '%s' to %d (orig. %s)" % (tag, MAXMAX, maxstr)
else:
max = int(maxstr)
return StructComp(tag, min, max), line
def readgroup(line):
""" Reads a segment group. Returns pair (segment_group, rest_of_line) """
discard, line = line.split('{', 1)
#~ print '>>',line
tag, line = split2(line, ':+-[{')
#~ print '>>',tag,'>>',line
maxstr = ''
if line[0] == ':': # next element can be group.max
maxstr, line = split2(line[1:], '+-[{')
#~ print '>>',line
discard, line = split2(line, "[{")
group = StructComp(tag, 0, 0) # dummy values for group. This is later on adjusted
done = False
while not done:
if not line or line[0] == '}':
done = True
else:
comp, line = readcomp(line)
group.sub.append(comp)
if group.sub:
header = group.sub[0]
group.id = header.id #use right tag for header segment
if header.min > group.min:
group.min = header.min
group.sub = group.sub[1:]
if not maxstr:
group.max = 1
else:
if maxstr != '>1':
group.max = int(maxstr)
else:
group.max = MAXMAX
if tag:
oldtag = tag
else:
oldtag = group.id
print "Changed max for group '%s' to %d (orig. %s)" % (oldtag, MAXMAX, maxstr)
return group, line[1:]
def comdef(line, issegdef = False):
"""
Reads segment or composite definition (syntactically identical; defaults to composite).
Returns RecDef (for segment) or FieldDef (for composite)
"""
tag, spec = line.split('=')
if issegdef:
com = RecDef(tag)
else:
com = FieldDef(tag)
com.sub = getfields(spec)[0]
return com
def getfields(line, isgroup = False):
""" Returns pair (fieldlist, rest_of_line) """
if isgroup and line[0] == '}':
return [], line[1:]
if not isgroup and not line:
return [], ''
if not isgroup and line[0] in ",+":
return [], line[1:]
if line[0] == '[':
field, line = getfield(line[1:])
multifield = [field]
for i in range(1, field.freq):
extrafield = copy.deepcopy(field)
extrafield.req = 'C'
multifield.append(extrafield)
fields, line = getfields(line, isgroup)
return multifield + fields, line
if line[0] == '{':
multstr, line = split2(line[1:], "[{")
if not multstr:
mult = 1
else:
mult = int(multstr)
group, line = getfields(line, True)
repgroup = []
for i in range(mult):
repgroup += copy.deepcopy(group)
fields, line = getfields(line, isgroup)
return repgroup + fields, line
def getfield(line):
""" Returns pair (single_field_ref, rest_of_line) """
splits = line.split(']', 1)
field = fielddef(splits[0])
if len(splits) == 1:
return field, ''
return field, splits[1]
def fielddef(line):
"""
Get a field's tag, its req (M or else C), its min and max lengths, and its frequency (repeat count).
Return FieldDef
"""
if line[0] in ".!$-&":
line = line[1:]
if ',' not in line:
req, freq = 'C', 1
else:
splits = line.split(',')
num = len(splits)
if num == 3:
line, req, freq = splits
freq = int(freq)
elif num == 2:
(line, req), freq = splits, 1
else:
line, req, freq = splits[0], 'C', 1
if req != 'M':
req = 'C'
if ';' not in line:
lenstr = ''
else:
line, lenstr = line.split(';')
if '@' in line:
line, discard = line.split('@', 1)
if not lenstr:
minlen = maxlen = ''
else:
if ':' in lenstr:
minlen, maxlen = lenstr.split(':')
else:
minlen = lenstr
return FieldDef(line, req = req, minlen = minlen, maxlen = maxlen, freq = freq)
def elmdef(line):
""" Reads elm definition (min and max lengths and data type), returns FieldDef """
tag, spec = line.split('=')
type, minlenstr, maxlenstr = spec.split(',')
try:
maxlen = int(maxlenstr)
except ValueError:
maxlen = 0
if maxlen == 0 or maxlen > MAXLEN:
print "Changed max length for elm '%s' to %d (orig. %s)" % (tag, MAXLEN, maxlenstr)
maxlenstr = str(MAXLEN)
elm = FieldDef(tag, minlen = minlenstr, maxlen = maxlenstr, type = type)
return elm
def getelmsinfo(elms, coms):
"""
Get types and lengths from elm defs into com defs,
and rename multiple occurrences of subfields
"""
for comid in coms:
com = coms[comid]
counters = {}
sfids = [sf.id for sf in com.sub]
for i, sfield in enumerate(com.sub):
sfield.seq = "%02d" % (i + 1)
if sfield.id not in elms:
raise SEFError("getelmsinfo() - no subfield definition found for element '%s'" % sfield.id)
elm = elms[sfield.id]
if not sfield.minlen:
sfield.minlen = elm.minlen
if not sfield.maxlen:
sfield.maxlen = elm.maxlen
sfield.type = elm.type
if sfield.id not in counters:
counters[sfield.id] = 1
else:
counters[sfield.id] += 1
if counters[sfield.id] > 1 or sfield.id in sfids[i + 1:]:
sfield.id += "#%d" % counters[sfield.id]
def getfieldsinfo(elms, coms, segs):
"""
Get types and lengths from elm defs and com defs into seg defs,
and rename multiple occurrences of fields. Also rename subfields
of composites to include the name of their parents.
Finally, add the necessary BOTSID element.
"""
for seg in segs:
counters = {}
fids = [f.id for f in seg.sub]
for i, field in enumerate(seg.sub):
field.seq = "%s%02d" % (seg.id, i + 1)
iscomposite = False
if field.id in elms:
elm = elms[field.id]
field.type = elm.type
if not field.minlen:
field.minlen = elm.minlen
if not field.maxlen:
field.maxlen = elm.maxlen
elif field.id in coms:
iscomposite = True
com = coms[field.id]
field.sub = copy.deepcopy(com.sub)
else:
raise SEFError("getfieldsinfo() - no field definition found for element '%s'" % field.id)
if not field.id in counters:
counters[field.id] = 1
else:
counters[field.id] += 1
if counters[field.id] > 1 or field.id in fids[i + 1:]:
field.id += "#%d" % counters[field.id]
if iscomposite:
for sfield in field.sub:
sfield.id = field.id + '.' + sfield.id
sfield.seq = field.seq + '.' + sfield.seq
seg.sub.insert(0, FieldDef('BOTSID', req = 'M', minlen = "1", maxlen = "3", type = "AN", seq = 'BOTSID'))
def convertfile(infile, outfile, useseq, nostruct, norecords,edifactversionID):
struct = ""
segdefs, segdict, comdefs, elmdefs = [], {}, {}, {}
# segdict just keeps a list of segs already found, so they don't get re-defined
in_sets = in_segs = in_coms = in_elms = False
#*******reading sef grammar***********************
for line in infile:
line = line.strip('\n')
if line:
if line[0] == '*': # a comment, skip
pass
elif line[0] == '.':
line = line.upper()
in_sets = in_segs = in_coms = in_elms = False
if line == '.SETS':
in_sets = True
elif line == '.SEGS':
in_segs = True
elif line == '.COMS':
in_coms = True
elif line == '.ELMS':
in_elms = True
else:
if in_sets:
struct = do_set(line)
elif not norecords: #if record need to be written
if in_segs:
seg = comdef(line, issegdef = True)
# if multiple defs for this seg, only do first one
if seg.id not in segdict:
segdict[seg.id] = 1
segdefs.append(seg)
elif in_coms:
com = comdef(line)
comdefs[com.id] = com
elif in_elms:
elm = elmdef(line)
elmdefs[elm.id] = elm
#*****writing bots grammar **************
outfile.write('from bots.botsconfig import *\n')
if not nostruct: #if structure: need syntax
outfile.write('from edifactsyntax3 import syntax\n')
if norecords: #if record need to import thee
outfile.write('from records%s import recorddefs\n\n'%edifactversionID)
#****************************************
if not nostruct:
outfile.write("\nstructure = [\n%s\n]\n" % struct)
if not norecords:
getelmsinfo(elmdefs, comdefs)
getfieldsinfo(elmdefs, comdefs, segdefs)
outfile.write("\nrecorddefs = {\n%s\n}\n" % "\n".join([seg.tostring(useseq) for seg in segdefs]))
def start(args):
useseq, nostruct, norecords, infilename, outfilename = False, False, False, None, None
for arg in args:
if not arg:
continue
if arg in ["-h", "--help", "?", "/?", "-?"]:
showusage(args[0].split(os.sep)[-1])
if arg == "-seq":
useseq = True
elif arg == "-nostruct":
nostruct = True
elif arg == "-norecords":
norecords = True
elif not infilename:
infilename = arg
elif not outfilename:
outfilename = arg
else:
showusage(args[0].split(os.sep)[-1])
if not infilename or not outfilename:
showusage(args[0].split(os.sep)[-1])
#************************************
infile = open(infilename, 'r')
outfile = open(outfilename, 'w')
edifactversionID = os.path.splitext(os.path.basename(outfilename))[0][6:]
print ' Convert sef->bots "%s".'%(outfilename)
convertfile(infile, outfile, useseq, nostruct, norecords,edifactversionID)
infile.close()
outfile.close()
if __name__ == "__main__":
try:
start(sys.argv[1:])
except:
traceback.print_exc()
else:
print "Done"
| [
[
8,
0,
0.0375,
0.073,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
14,
0,
0.0769,
0.002,
0,
0.66,
0.0333,
113,
1,
0,
0,
0,
0,
1,
0
],
[
14,
0,
0.0789,
0.002,
0,
0.66,
... | [
"\"\"\"\nsef2bots.py\n\nCommand line params: sourcefile.sef targetfile.py\n\nOptional command line params: -seq, -struct\n\nConverts a SEF grammar into a Bots grammar. If targetfile exists (and is writeable),",
"MAXMAX = 99999 # for dealing with segs/groups with max '>1'",
"MAXLEN = 99999 # for overly large elm... |
#!/usr/bin/env python
import sys
import os
import logging
from logging.handlers import TimedRotatingFileHandler
from django.core.handlers.wsgi import WSGIHandler
from django.utils.translation import ugettext as _
import cherrypy
from cherrypy import wsgiserver
import botslib
import botsglobal
import botsinit
def showusage():
usage = '''
This is "%(name)s", a part of Bots open source edi translator - http://bots.sourceforge.net.
The %(name)s is the web server for bots; the interface (bots-monitor) can be accessed in a browser, eg 'http://localhost:8080'.
Usage:
%(name)s -c<directory>
Options:
-c<directory> directory for configuration files (default: config).
'''%{'name':os.path.basename(sys.argv[0])}
print usage
sys.exit(0)
def start():
#NOTE bots is always on PYTHONPATH!!! - otherwise it will not start.
#***command line arguments**************************
configdir = 'config'
for arg in sys.argv[1:]:
if not arg:
continue
if arg.startswith('-c'):
configdir = arg[2:]
if not configdir:
print 'Configuration directory indicated, but no directory name.'
sys.exit(1)
elif arg in ["?", "/?"] or arg.startswith('-'):
showusage()
else:
showusage()
#***init general: find locating of bots, configfiles, init paths etc.***********************
botsinit.generalinit(configdir)
#***initialise logging. This logging only contains the logging from bots-webserver, not from cherrypy.
botsglobal.logger = logging.getLogger('bots-webserver')
botsglobal.logger.setLevel(logging.DEBUG)
h = TimedRotatingFileHandler(botslib.join(botsglobal.ini.get('directories','logging'),'webserver.log'), backupCount=10)
fileformat = logging.Formatter("%(asctime)s %(levelname)-8s: %(message)s",'%Y%m%d %H:%M:%S')
h.setFormatter(fileformat)
botsglobal.logger.addHandler(h)
#***init cherrypy as webserver*********************************************
#global configuration for cherrypy
cherrypy.config.update({'global': {'log.screen': False, 'server.environment': botsglobal.ini.get('webserver','environment','production')}})
#cherrypy handling of static files
conf = {'/': {'tools.staticdir.on' : True,'tools.staticdir.dir' : 'media' ,'tools.staticdir.root': botsglobal.ini.get('directories','botspath')}}
servestaticfiles = cherrypy.tree.mount(None, '/media', conf) #None: no cherrypy application (as this only serves static files)
#cherrypy handling of django
servedjango = WSGIHandler() #was: servedjango = AdminMediaHandler(WSGIHandler()) but django does not need the AdminMediaHandler in this setup. is much faster.
#cherrypy uses a dispatcher in order to handle the serving of static files and django.
dispatcher = wsgiserver.WSGIPathInfoDispatcher({'/': servedjango, '/media': servestaticfiles})
botswebserver = wsgiserver.CherryPyWSGIServer(bind_addr=('0.0.0.0', botsglobal.ini.getint('webserver','port',8080)), wsgi_app=dispatcher, server_name=botsglobal.ini.get('webserver','name','bots-webserver'))
botsglobal.logger.info(_(u'Bots web-server started.'))
#handle ssl: cherrypy < 3.2 always uses pyOpenssl. cherrypy >= 3.2 uses python buildin ssl (python >= 2.6 has buildin support for ssl).
ssl_certificate = botsglobal.ini.get('webserver','ssl_certificate',None)
ssl_private_key = botsglobal.ini.get('webserver','ssl_private_key',None)
if ssl_certificate and ssl_private_key:
if cherrypy.__version__ >= '3.2.0':
adapter_class = wsgiserver.get_ssl_adapter_class('builtin')
botswebserver.ssl_adapter = adapter_class(ssl_certificate,ssl_private_key)
else:
#but: pyOpenssl should be there!
botswebserver.ssl_certificate = ssl_certificate
botswebserver.ssl_private_key = ssl_private_key
botsglobal.logger.info(_(u'Bots web-server uses ssl (https).'))
else:
botsglobal.logger.info(_(u'Bots web-server uses plain http (no ssl).'))
#***start the cherrypy webserver.
try:
botswebserver.start()
except KeyboardInterrupt:
botswebserver.stop()
if __name__=='__main__':
start()
| [
[
1,
0,
0.0225,
0.0112,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0337,
0.0112,
0,
0.66,
0.0769,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0449,
0.0112,
0,
... | [
"import sys",
"import os",
"import logging",
"from logging.handlers import TimedRotatingFileHandler",
"from django.core.handlers.wsgi import WSGIHandler",
"from django.utils.translation import ugettext as _",
"import cherrypy",
"from cherrypy import wsgiserver",
"import botslib",
"import botsgloba... |
# Django settings for bots project.
import os
import bots
#*******settings for bots error reports**********************************
MANAGERS = ( #bots will send error reports to the MANAGERS
('name_manager', 'manager@domain.org'),
)
#~ EMAIL_HOST = 'smtp.gmail.com' #Default: 'localhost'
#~ EMAIL_PORT = '587' #Default: 25
#~ EMAIL_USE_TLS = True #Default: False
#~ EMAIL_HOST_USER = 'user@gmail.com' #Default: ''. Username to use for the SMTP server defined in EMAIL_HOST. If empty, Django won't attempt authentication.
#~ EMAIL_HOST_PASSWORD = '' #Default: ''. PASSWORD to use for the SMTP server defined in EMAIL_HOST. If empty, Django won't attempt authentication.
#~ SERVER_EMAIL = 'user@gmail.com' #Sender of bots error reports. Default: 'root@localhost'
#~ EMAIL_SUBJECT_PREFIX = '' #This is prepended on email subject.
#*********path settings*************************advised is not to change these values!!
PROJECT_PATH = os.path.abspath(os.path.dirname(bots.__file__))
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = PROJECT_PATH + '/'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
#~ FILE_UPLOAD_TEMP_DIR = os.path.join(PROJECT_PATH, 'botssys/pluginsuploaded') #set in bots.ini
ROOT_URLCONF = 'bots.urls'
LOGIN_URL = '/login/'
LOGIN_REDIRECT_URL = '/'
LOGOUT_URL = '/logout/'
#~ LOGOUT_REDIRECT_URL = #??not such parameter; is set in urls
TEMPLATE_DIRS = (
os.path.join(PROJECT_PATH, 'templates'),
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
#*********database settings*************************
#django-admin syncdb --pythonpath='/home/hje/botsup' --settings='bots.config.settings'
#SQLITE:
DATABASE_ENGINE = 'sqlite3' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = os.path.join(PROJECT_PATH, 'botssys/sqlitedb/botsdb') #path to database; if relative path: interpreted relative to bots root directory
DATABASE_USER = ''
DATABASE_PASSWORD = ''
DATABASE_HOST = ''
DATABASE_PORT = ''
DATABASE_OPTIONS = {}
#~ #MySQL:
#~ DATABASE_ENGINE = 'mysql'
#~ DATABASE_NAME = 'botsdb'
#~ DATABASE_USER = 'bots'
#~ DATABASE_PASSWORD = 'botsbots'
#~ DATABASE_HOST = '192.168.0.7'
#~ DATABASE_PORT = '3306'
#~ DATABASE_OPTIONS = {'use_unicode':True,'charset':'utf8',"init_command": 'SET storage_engine=INNODB'}
#PostgreSQL:
#~ DATABASE_ENGINE = 'postgresql_psycopg2'
#~ DATABASE_NAME = 'botsdb'
#~ DATABASE_USER = 'bots'
#~ DATABASE_PASSWORD = 'botsbots'
#~ DATABASE_HOST = '192.168.0.7'
#~ DATABASE_PORT = '5432'
#~ DATABASE_OPTIONS = {}
#*********sessions, cookies, log out time*************************
SESSION_EXPIRE_AT_BROWSER_CLOSE = True #True: always log in when browser is closed
SESSION_COOKIE_AGE = 3600 #seconds a user needs to login when no activity
SESSION_SAVE_EVERY_REQUEST = True #if True: SESSION_COOKIE_AGE is interpreted as: since last activity
#*********localization*************************
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/Amsterdam'
DATE_FORMAT = "Y-m-d"
DATETIME_FORMAT = "Y-m-d G:i"
TIME_FORMAT = "G:i"
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
#~ LANGUAGE_CODE = 'en-us'
LANGUAGE_CODE = 'en'
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
#*************************************************************************
#*********other django setting. please consult django docs.***************
#set in bots.ini
#~ DEBUG = True
#~ TEMPLATE_DEBUG = DEBUG
SITE_ID = 1
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'm@-u37qiujmeqfbu$daaaaz)sp^7an4u@h=wfx9dd$$$zl2i*x9#awojdc'
ADMINS = (
('bots', 'your_email@domain.com'),
)
#save uploaded file (=plugin) always to file. no path for temp storage is used, so system default is used.
FILE_UPLOAD_HANDLERS = (
"django.core.files.uploadhandler.TemporaryFileUploadHandler",
)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
#'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'bots.persistfilters.FilterPersistMiddleware',
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'bots',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.core.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
)
| [
[
1,
0,
0.0146,
0.0073,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0219,
0.0073,
0,
0.66,
0.0286,
261,
0,
1,
0,
0,
261,
0,
0
],
[
14,
0,
0.0511,
0.0219,
0,
... | [
"import os",
"import bots",
"MANAGERS = ( #bots will send error reports to the MANAGERS\n ('name_manager', 'manager@domain.org'),\n )",
"PROJECT_PATH = os.path.abspath(os.path.dirname(bots.__file__))",
"MEDIA_ROOT = PROJECT_PATH + '/'",
"MEDIA_URL = ''",
"ADMIN_MEDIA_PREFIX = '/media/'",
"ROO... |
import time
import django
import models
import viewlib
import botslib
import botsglobal
django.contrib.admin.widgets.AdminSplitDateTime
HiddenInput = django.forms.widgets.HiddenInput
DEFAULT_ENTRY = ('',"---------")
editypelist=[DEFAULT_ENTRY] + sorted(models.EDITYPES)
confirmtypelist=[DEFAULT_ENTRY] + models.CONFIRMTYPE
def getroutelist(): #needed because the routeid is needed (and this is not theprimary key
return [DEFAULT_ENTRY]+[(l,l) for l in models.routes.objects.values_list('idroute', flat=True).order_by('idroute').distinct() ]
def getinmessagetypes():
return [DEFAULT_ENTRY]+[(l,l) for l in models.translate.objects.values_list('frommessagetype', flat=True).order_by('frommessagetype').distinct() ]
def getoutmessagetypes():
return [DEFAULT_ENTRY]+[(l,l) for l in models.translate.objects.values_list('tomessagetype', flat=True).order_by('tomessagetype').distinct() ]
def getallmessagetypes():
return [DEFAULT_ENTRY]+[(l,l) for l in sorted(set(list(models.translate.objects.values_list('tomessagetype', flat=True).all()) + list(models.translate.objects.values_list('frommessagetype', flat=True).all()) )) ]
def getpartners():
return [DEFAULT_ENTRY]+[(l,l) for l in models.partner.objects.values_list('idpartner', flat=True).filter(isgroup=False,active=True).order_by('idpartner') ]
def getfromchannels():
return [DEFAULT_ENTRY]+[(l,l) for l in models.channel.objects.values_list('idchannel', flat=True).filter(inorout='in').order_by('idchannel') ]
def gettochannels():
return [DEFAULT_ENTRY]+[(l,l) for l in models.channel.objects.values_list('idchannel', flat=True).filter(inorout='out').order_by('idchannel') ]
class Select(django.forms.Form):
datefrom = django.forms.DateTimeField(initial=viewlib.datetimefrom)
dateuntil = django.forms.DateTimeField(initial=viewlib.datetimeuntil)
page = django.forms.IntegerField(required=False,initial=1,widget=HiddenInput())
sortedby = django.forms.CharField(initial='ts',widget=HiddenInput())
sortedasc = django.forms.BooleanField(initial=False,required=False,widget=HiddenInput())
class View(django.forms.Form):
datefrom = django.forms.DateTimeField(required=False,initial=viewlib.datetimefrom,widget=HiddenInput())
dateuntil = django.forms.DateTimeField(required=False,initial=viewlib.datetimeuntil,widget=HiddenInput())
page = django.forms.IntegerField(required=False,initial=1,widget=HiddenInput())
sortedby = django.forms.CharField(required=False,initial='ts',widget=HiddenInput())
sortedasc = django.forms.BooleanField(required=False,initial=False,widget=HiddenInput())
class SelectReports(Select):
template = 'bots/selectform.html'
action = '/reports/'
status = django.forms.ChoiceField([DEFAULT_ENTRY,('1',"Error"),('0',"Done")],required=False,initial='')
class ViewReports(View):
template = 'bots/reports.html'
action = '/reports/'
status = django.forms.IntegerField(required=False,initial='',widget=HiddenInput())
class SelectIncoming(Select):
template = 'bots/selectform.html'
action = '/incoming/'
statust = django.forms.ChoiceField([DEFAULT_ENTRY,('1',"Error"),('3',"Done")],required=False,initial='')
idroute = django.forms.ChoiceField([],required=False,initial='')
frompartner = django.forms.ChoiceField([],required=False)
topartner = django.forms.ChoiceField([],required=False)
ineditype = django.forms.ChoiceField(editypelist,required=False)
inmessagetype = django.forms.ChoiceField([],required=False)
outeditype = django.forms.ChoiceField(editypelist,required=False)
outmessagetype = django.forms.ChoiceField([],required=False)
lastrun = django.forms.BooleanField(required=False,initial=False)
def __init__(self, *args, **kwargs):
super(SelectIncoming, self).__init__(*args, **kwargs)
self.fields['idroute'].choices = getroutelist()
self.fields['inmessagetype'].choices = getinmessagetypes()
self.fields['outmessagetype'].choices = getoutmessagetypes()
self.fields['frompartner'].choices = getpartners()
self.fields['topartner'].choices = getpartners()
class ViewIncoming(View):
template = 'bots/incoming.html'
action = '/incoming/'
statust = django.forms.IntegerField(required=False,initial='',widget=HiddenInput())
idroute = django.forms.CharField(required=False,widget=HiddenInput())
frompartner = django.forms.CharField(required=False,widget=HiddenInput())
topartner = django.forms.CharField(required=False,widget=HiddenInput())
ineditype = django.forms.CharField(required=False,widget=HiddenInput())
inmessagetype = django.forms.CharField(required=False,widget=HiddenInput())
outeditype = django.forms.CharField(required=False,widget=HiddenInput())
outmessagetype = django.forms.CharField(required=False,widget=HiddenInput())
lastrun = django.forms.BooleanField(required=False,initial=False,widget=HiddenInput())
botskey = django.forms.CharField(required=False,widget=HiddenInput())
class SelectDocument(Select):
template = 'bots/selectform.html'
action = '/document/'
idroute = django.forms.ChoiceField([],required=False,initial='')
frompartner = django.forms.ChoiceField([],required=False)
topartner = django.forms.ChoiceField([],required=False)
editype = django.forms.ChoiceField(editypelist,required=False)
messagetype = django.forms.ChoiceField(required=False)
lastrun = django.forms.BooleanField(required=False,initial=False)
botskey = django.forms.CharField(required=False,label='Document number',max_length=35)
def __init__(self, *args, **kwargs):
super(SelectDocument, self).__init__(*args, **kwargs)
self.fields['idroute'].choices = getroutelist()
self.fields['messagetype'].choices = getoutmessagetypes()
self.fields['frompartner'].choices = getpartners()
self.fields['topartner'].choices = getpartners()
class ViewDocument(View):
template = 'bots/document.html'
action = '/document/'
idroute = django.forms.CharField(required=False,widget=HiddenInput())
frompartner = django.forms.CharField(required=False,widget=HiddenInput())
topartner = django.forms.CharField(required=False,widget=HiddenInput())
editype = django.forms.CharField(required=False,widget=HiddenInput())
messagetype = django.forms.CharField(required=False,widget=HiddenInput())
lastrun = django.forms.BooleanField(required=False,initial=False,widget=HiddenInput())
botskey = django.forms.CharField(required=False,widget=HiddenInput())
class SelectOutgoing(Select):
template = 'bots/selectform.html'
action = '/outgoing/'
idroute = django.forms.ChoiceField([],required=False,initial='')
frompartner = django.forms.ChoiceField([],required=False)
topartner = django.forms.ChoiceField([],required=False)
editype = django.forms.ChoiceField(editypelist,required=False)
messagetype = django.forms.ChoiceField(required=False)
lastrun = django.forms.BooleanField(required=False,initial=False)
def __init__(self, *args, **kwargs):
super(SelectOutgoing, self).__init__(*args, **kwargs)
self.fields['idroute'].choices = getroutelist()
self.fields['messagetype'].choices = getoutmessagetypes()
self.fields['frompartner'].choices = getpartners()
self.fields['topartner'].choices = getpartners()
class ViewOutgoing(View):
template = 'bots/outgoing.html'
action = '/outgoing/'
idroute = django.forms.CharField(required=False,widget=HiddenInput())
frompartner = django.forms.CharField(required=False,widget=HiddenInput())
topartner = django.forms.CharField(required=False,widget=HiddenInput())
editype = django.forms.CharField(required=False,widget=HiddenInput())
messagetype = django.forms.CharField(required=False,widget=HiddenInput())
lastrun = django.forms.BooleanField(required=False,initial=False,widget=HiddenInput())
class SelectProcess(Select):
template = 'bots/selectform.html'
action = '/process/'
idroute = django.forms.ChoiceField([],required=False,initial='')
lastrun = django.forms.BooleanField(required=False,initial=False)
def __init__(self, *args, **kwargs):
super(SelectProcess, self).__init__(*args, **kwargs)
self.fields['idroute'].choices = getroutelist()
class ViewProcess(View):
template = 'bots/process.html'
action = '/process/'
idroute = django.forms.CharField(required=False,widget=HiddenInput())
lastrun = django.forms.BooleanField(required=False,initial=False,widget=HiddenInput())
class SelectConfirm(Select):
template = 'bots/selectform.html'
action = '/confirm/'
confirmtype = django.forms.ChoiceField(confirmtypelist,required=False,initial='0')
confirmed = django.forms.ChoiceField([('0',"All runs"),('1',"Current run"),('2',"Last run")],required=False,initial='0')
idroute = django.forms.ChoiceField([],required=False,initial='')
editype = django.forms.ChoiceField(editypelist,required=False)
messagetype = django.forms.ChoiceField([],required=False)
frompartner = django.forms.ChoiceField([],required=False)
topartner = django.forms.ChoiceField([],required=False)
fromchannel = django.forms.ChoiceField([],required=False)
tochannel = django.forms.ChoiceField([],required=False)
def __init__(self, *args, **kwargs):
super(SelectConfirm, self).__init__(*args, **kwargs)
self.fields['idroute'].choices = getroutelist()
self.fields['messagetype'].choices = getallmessagetypes()
self.fields['frompartner'].choices = getpartners()
self.fields['topartner'].choices = getpartners()
self.fields['fromchannel'].choices = getfromchannels()
self.fields['tochannel'].choices = gettochannels()
class ViewConfirm(View):
template = 'bots/confirm.html'
action = '/confirm/'
confirmtype = django.forms.CharField(required=False,widget=HiddenInput())
confirmed = django.forms.CharField(required=False,widget=HiddenInput())
idroute = django.forms.CharField(required=False,widget=HiddenInput())
editype = django.forms.CharField(required=False,widget=HiddenInput())
messagetype = django.forms.CharField(required=False,widget=HiddenInput())
frompartner = django.forms.CharField(required=False,widget=HiddenInput())
topartner = django.forms.CharField(required=False,widget=HiddenInput())
fromchannel = django.forms.CharField(required=False,widget=HiddenInput())
tochannel = django.forms.CharField(required=False,widget=HiddenInput())
class UploadFileForm(django.forms.Form):
file = django.forms.FileField(label='Plugin to read',required=True,widget=django.forms.widgets.FileInput(attrs={'size':'100'}))
class PlugoutForm(django.forms.Form):
databaseconfiguration = django.forms.BooleanField(required=False,initial=True,help_text='Routes, channels, translations, partners, etc.')
umlists = django.forms.BooleanField(required=False,initial=True,label='User maintained code lists',help_text='')
fileconfiguration = django.forms.BooleanField(required=False,initial=True,help_text='Grammars, mapping scrips, routes scripts, etc. (bots/usersys)')
infiles = django.forms.BooleanField(required=False,initial=True,help_text='Examples edi file in bots/botssys/infile')
charset = django.forms.BooleanField(required=False,initial=False,label='(Edifact) files with character sets',help_text='seldom needed.')
databasetransactions = django.forms.BooleanField(required=False,initial=False,help_text='From the database: Runs, incoming files, outgoing files, documents; only for support purposes, on request.')
data = django.forms.BooleanField(required=False,initial=False,label='All transaction files',help_text='bots/botssys/data; only for support purposes, on request.')
logfiles = django.forms.BooleanField(required=False,initial=False,label='Log files',help_text='bots/botssys/logging; only for support purposes, on request.')
config = django.forms.BooleanField(required=False,initial=False,label='configuration files',help_text='bots/config; only for support purposes, on request.')
database = django.forms.BooleanField(required=False,initial=False,label='SQLite database',help_text='Only for support purposes, on request.')
filename = django.forms.CharField(required=True,label='Plugin filename',max_length=250)
def __init__(self, *args, **kwargs):
super(PlugoutForm, self).__init__(*args, **kwargs)
self.fields['filename'].initial = botslib.join(botsglobal.ini.get('directories','botssys'),'myplugin' + time.strftime('_%Y%m%d') + '.zip')
class DeleteForm(django.forms.Form):
delbackup = django.forms.BooleanField(required=False,label='Delete backups of user scripts',initial=True,help_text='Delete backup files in usersys (purge).')
deltransactions = django.forms.BooleanField(required=False,label='Delete transactions',initial=True,help_text='Delete runs, reports, incoming, outgoing, data files.')
delconfiguration = django.forms.BooleanField(required=False,label='Delete configuration',initial=False,help_text='Delete routes, channels, translations, partners etc.')
delcodelists = django.forms.BooleanField(required=False,label='Delete user code lists',initial=False,help_text='Delete user code lists.')
deluserscripts = django.forms.BooleanField(required=False,label='Delete all user scripts',initial=False,help_text='Delete all scripts in usersys (grammars, mappings etc) except charsets.')
delinfile = django.forms.BooleanField(required=False,label='Delete botssys/infiles',initial=False,help_text='Delete files in botssys/infile.')
deloutfile = django.forms.BooleanField(required=False,label='Delete botssys/outfiles',initial=False,help_text='Delete files in botssys/outfile.')
| [
[
1,
0,
0.0046,
0.0046,
0,
0.66,
0,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.0093,
0.0046,
0,
0.66,
0.0294,
294,
0,
1,
0,
0,
294,
0,
0
],
[
1,
0,
0.0139,
0.0046,
0,
... | [
"import time",
"import django",
"import models",
"import viewlib",
"import botslib",
"import botsglobal",
"django.contrib.admin.widgets.AdminSplitDateTime",
"HiddenInput = django.forms.widgets.HiddenInput",
"DEFAULT_ENTRY = ('',\"---------\")",
"editypelist=[DEFAULT_ENTRY] + sorted(models.EDITYPES... |
from django.utils.translation import ugettext as _
#bots-modules
import botslib
import node
from botsconfig import *
class Message(object):
''' abstract class; represents a edi message.
is subclassed as outmessage or inmessage object.
'''
def __init__(self):
self.recordnumber=0 #segment counter. Is not used for UNT of SE record; some editypes want sequential recordnumbering
def kill(self):
""" explicitly del big attributes....."""
if hasattr(self,'ta_info'): del self.ta_info
if hasattr(self,'root'): del self.root
if hasattr(self,'defmessage'): del self.defmessage
if hasattr(self,'records'): del self.records
if hasattr(self,'rawinput'): del self.rawinput
@staticmethod
def display(records):
'''for debugging lexed records.'''
for record in records:
t = 0
for veld in record:
if t==0:
print '%s (Record-id)'%(veld[VALUE])
else:
if veld[SFIELD]:
print ' %s (sub)'%(veld[VALUE])
else:
print ' %s (veld)'%(veld[VALUE])
t += 1
def change(self,where,change):
''' query tree (self.root) with where; if found replace with change; return True if change, return False if not changed.'''
if self.root.record is None:
raise botslib.MappingRootError(_(u'change($where,$change"): "root" of incoming message is empty; either split messages or use inn.getloop'),where=where,change=change)
return self.root.change(where,change)
def delete(self,*mpaths):
''' query tree (self.root) with mpath; delete if found. return True if deleted, return False if not deleted.'''
if self.root.record is None:
raise botslib.MappingRootError(_(u'delete($mpath): "root" of incoming message is empty; either split messages or use inn.getloop'),mpath=mpaths)
return self.root.delete(*mpaths)
def get(self,*mpaths):
''' query tree (self.root) with mpath; get value (string); get None if not found.'''
if self.root.record is None:
raise botslib.MappingRootError(_(u'get($mpath): "root" of incoming message is empty; either split messages or use inn.getloop'),mpath=mpaths)
return self.root.get(*mpaths)
def getnozero(self,*mpaths):
''' like get, returns None is value is zero (0) or not numeric.
Is sometimes usefull in mapping.'''
if self.root.record is None:
raise botslib.MappingRootError(_(u'get($mpath): "root" of incoming message is empty; either split messages or use inn.getloop'),mpath=mpaths)
return self.root.getnozero(*mpaths)
def getcount(self):
''' count number of nodes in self.root. Number of nodes is number of records.'''
return self.root.getcount()
def getcountoccurrences(self,*mpaths):
''' count number of nodes in self.root. Number of nodes is number of records.'''
count = 0
for value in self.getloop(*mpaths):
count += 1
return count
def getcountsum(self,*mpaths):
''' return the sum for all values found in mpath. Eg total number of ordered quantities.'''
if self.root.record is None:
raise botslib.MappingRootError(_(u'get($mpath): "root" of incoming message is empty; either split messages or use inn.getloop'),mpath=mpaths)
return self.root.getcountsum(*mpaths)
def getloop(self,*mpaths):
''' query tree with mpath; generates all the nodes. Is typically used as: for record in inn.get(mpath):
'''
if self.root.record: #self.root is a real root
for terug in self.root.getloop(*mpaths): #search recursive for rest of mpaths
yield terug
else: #self.root is dummy root
for childnode in self.root.children:
for terug in childnode.getloop(*mpaths): #search recursive for rest of mpaths
yield terug
def put(self,*mpaths,**kwargs):
if self.root.record is None and self.root.children:
raise botslib.MappingRootError(_(u'put($mpath): "root" of outgoing message is empty; use out.putloop'),mpath=mpaths)
return self.root.put(*mpaths,**kwargs)
def putloop(self,*mpaths):
if not self.root.record: #no input yet, and start with a putloop(): dummy root
if len(mpaths) == 1:
self.root.append(node.Node(mpaths[0]))
return self.root.children[-1]
else: #TODO: what if self.root.record is None and len(mpaths) > 1?
raise botslib.MappingRootError(_(u'putloop($mpath): mpath too long???'),mpath=mpaths)
return self.root.putloop(*mpaths)
def sort(self,*mpaths):
if self.root.record is None:
raise botslib.MappingRootError(_(u'get($mpath): "root" of message is empty; either split messages or use inn.getloop'),mpath=mpaths)
self.root.sort(*mpaths)
def normalisetree(self,node):
''' The node tree is check, sorted, fields are formatted etc.
Always use this method before writing output.
'''
self._checktree(node,self.defmessage.structure[0])
#~ node.display()
self._canonicaltree(node,self.defmessage.structure[0])
def _checktree(self,tree,structure):
''' checks tree with table:
- all records should be in table at the right place in hierarchy
- for each record, all fields should be in grammar
This function checks the root of grammar-structure with root of node tree
'''
if tree.record['BOTSID'] == structure[ID]:
#check tree recursively with structure
self._checktreecore(tree,structure)
else:
raise botslib.MessageError(_(u'Grammar "$grammar" has (root)record "$grammarroot"; found "$root".'),root=tree.record['BOTSID'],grammarroot=structure[ID],grammar=self.defmessage.grammarname)
def _checktreecore(self,node,structure):
''' recursive
'''
deletelist=[]
self._checkfields(node.record,structure)
if node.children and not LEVEL in structure:
if self.ta_info['checkunknownentities']:
raise botslib.MessageError(_(u'Record "$record" in message has children, but grammar "$grammar" not. Found "$xx".'),record=node.record['BOTSID'],grammar=self.defmessage.grammarname,xx=node.children[0].record['BOTSID'])
node.children=[]
return
for childnode in node.children: #for every node:
for structure_record in structure[LEVEL]: #search in grammar-records
if childnode.record['BOTSID'] == structure_record[ID]: #if found right structure_record
#check children recursive
self._checktreecore(childnode,structure_record)
break #check next mpathnode
else: #checked all structure_record in grammar, but nothing found
if self.ta_info['checkunknownentities']:
raise botslib.MessageError(_(u'Record "$record" in message not in structure of grammar "$grammar". Whole record: "$content".'),record=childnode.record['BOTSID'],grammar=self.defmessage.grammarname,content=childnode.record)
deletelist.append(childnode)
for child in deletelist:
node.children.remove(child)
def _checkfields(self,record,structure_record):
''' checks for every field in record if field exists in structure_record (from grammar).
'''
deletelist=[]
for field in record.keys(): #all fields in record should exist in structure_record
if field == 'BOTSIDnr':
continue
for grammarfield in structure_record[FIELDS]:
if grammarfield[ISFIELD]: #if field (no composite)
if field == grammarfield[ID]:
break
else: #if composite
for grammarsubfield in grammarfield[SUBFIELDS]: #loop subfields
if field == grammarsubfield[ID]:
break
else:
continue
break
else:
if self.ta_info['checkunknownentities']:
raise botslib.MessageError(_(u'Record: "$mpath" field "$field" does not exist.'),field=field,mpath=structure_record[MPATH])
deletelist.append(field)
for field in deletelist:
del record[field]
def _canonicaltree(self,node,structure,headerrecordnumber=0):
''' For nodes: check min and max occurence; sort the records conform grammar
'''
sortednodelist = []
self._canonicalfields(node.record,structure,headerrecordnumber) #handle fields of this record
if LEVEL in structure:
for structure_record in structure[LEVEL]: #for structure_record of this level in grammar
count = 0 #count number of occurences of record
for childnode in node.children: #for every node in mpathtree; SPEED: delete nodes from list when found
if childnode.record['BOTSID'] != structure_record[ID] or childnode.record['BOTSIDnr'] != structure_record[BOTSIDnr]: #if it is not the right NODE":
continue
count += 1
self._canonicaltree(childnode,structure_record,self.recordnumber) #use rest of index in deeper level
sortednodelist.append(childnode)
if structure_record[MIN] > count:
raise botslib.MessageError(_(u'Record "$mpath" mandatory but not present.'),mpath=structure_record[MPATH])
if structure_record[MAX] < count:
raise botslib.MessageError(_(u'Record "$mpath" occurs to often ($count times).'),mpath=structure_record[MPATH],count=count)
node.children=sortednodelist
if hasattr(self,'get_queries_from_edi'):
self.get_queries_from_edi(node,structure)
def _canonicalfields(self,noderecord,structure_record,headerrecordnumber):
''' For fields: check M/C; format the fields. Fields are not sorted (a dict can not be sorted).
Fields are never added.
'''
for grammarfield in structure_record[FIELDS]:
if grammarfield[ISFIELD]: #if field (no composite)
value = noderecord.get(grammarfield[ID])
#~ print '(message)field',noderecord,grammarfield
if not value:
#~ print 'field',grammarfield[ID], 'has no value'
if grammarfield[MANDATORY] == 'M':
raise botslib.MessageError(_(u'Record "$mpath" field "$field" is mandatory.'),mpath=structure_record[MPATH],field=grammarfield[ID])
continue
#~ print 'field',grammarfield[ID], 'value', value
noderecord[grammarfield[ID]] = self._formatfield(value,grammarfield,structure_record)
else: #if composite
for grammarsubfield in grammarfield[SUBFIELDS]: #loop subfields to see if data in composite
if noderecord.get(grammarsubfield[ID]):
break #composite has data.
else: #composite has no data
if grammarfield[MANDATORY]=='M':
raise botslib.MessageError(_(u'Record "$mpath" composite "$field" is mandatory.'),mpath=structure_record[MPATH],field=grammarfield[ID])
continue
#there is data in the composite!
for grammarsubfield in grammarfield[SUBFIELDS]: #loop subfields
value = noderecord.get(grammarsubfield[ID])
if not value:
if grammarsubfield[MANDATORY]=='M':
raise botslib.MessageError(_(u'Record "$mpath" subfield "$field" is mandatory: "$record".'),mpath=structure_record[MPATH],field=grammarsubfield[ID],record=noderecord)
continue
noderecord[grammarsubfield[ID]] = self._formatfield(value,grammarsubfield,structure_record)
| [
[
1,
0,
0.0043,
0.0043,
0,
0.66,
0,
389,
0,
1,
0,
0,
389,
0,
0
],
[
1,
0,
0.013,
0.0043,
0,
0.66,
0.25,
484,
0,
1,
0,
0,
484,
0,
0
],
[
1,
0,
0.0173,
0.0043,
0,
0.6... | [
"from django.utils.translation import ugettext as _",
"import botslib",
"import node",
"from botsconfig import *",
"class Message(object):\n ''' abstract class; represents a edi message.\n is subclassed as outmessage or inmessage object.\n '''\n def __init__(self):\n self.recordnumber... |
import os
import re
import zipfile
from django.utils.translation import ugettext as _
#bots-modules
import botslib
import botsglobal
from botsconfig import *
@botslib.log_session
def preprocess(routedict,function, status=FILEIN,**argv):
''' for pre- and postprocessing of files.
these are NOT translations; translation involve grammars, mapping scripts etc. think of eg:
- unzipping zipped files.
- convert excel to csv
- password protected files.
Select files from INFILE -> SET_FOR_PROCESSING using criteria
Than the actual processing function is called.
The processing function does: SET_FOR_PROCESSING -> PROCESSING -> FILEIN
If errors occur during processing, no ta are left with status FILEIN !
preprocess is called right after the in-communicatiation
'''
nr_files = 0
preprocessnumber = botslib.getpreprocessnumber()
if not botslib.addinfo(change={'status':preprocessnumber},where={'status':status,'idroute':routedict['idroute'],'fromchannel':routedict['fromchannel']}): #check if there is something to do
return 0
for row in botslib.query(u'''SELECT idta,filename,charset
FROM ta
WHERE idta>%(rootidta)s
AND status=%(status)s
AND statust=%(statust)s
AND idroute=%(idroute)s
AND fromchannel=%(fromchannel)s
''',
{'status':preprocessnumber,'statust':OK,'idroute':routedict['idroute'],'fromchannel':routedict['fromchannel'],'rootidta':botslib.get_minta4query()}):
try:
botsglobal.logmap.debug(u'Start preprocessing "%s" for file "%s".',function.__name__,row['filename'])
ta_set_for_processing = botslib.OldTransaction(row['idta'])
ta_processing = ta_set_for_processing.copyta(status=preprocessnumber+1)
ta_processing.filename=row['filename']
function(ta_from=ta_processing,endstatus=status,routedict=routedict,**argv)
except:
txt=botslib.txtexc()
ta_processing.failure()
ta_processing.update(statust=ERROR,errortext=txt)
else:
botsglobal.logmap.debug(u'OK preprocessing "%s" for file "%s".',function.__name__,row['filename'])
ta_set_for_processing.update(statust=DONE)
ta_processing.update(statust=DONE)
nr_files += 1
return nr_files
header = re.compile('(\s*(ISA))|(\s*(UNA.{6})?\s*(U\s*N\s*B)s*.{1}(.{4}).{1}(.{1}))',re.DOTALL)
# group: 1 2 3 4 5 6 7
def mailbag(ta_from,endstatus,**argv):
''' split 'mailbag' files to separate files each containing one interchange (ISA-IEA or UNA/UNB-UNZ).
handles x12 and edifact; these can be mixed.
recognizes xml files. messagetype 'xml' has a special handling when reading xml-files.
about auto-detect/mailbag:
- in US mailbag is used: one file for all received edi messages...appended in one file. I heard that edifact and x12 can be mixed,
but have actually never seen this.
- bots needs a 'splitter': one edi-file, more interchanges. it is preferred to split these first.
- handle multiple UNA in one file, including different charsets.
- auto-detect: is is x12, edifact, xml, or??
'''
edifile = botslib.readdata(filename=ta_from.filename) #read as binary...
startpos=0
while (1):
found = header.search(edifile[startpos:])
if found is None:
if startpos: #ISA/UNB have been found in file; no new ISA/UNB is found. So all processing is done.
break
#guess if this is an xml file.....
sniffxml = edifile[:25]
sniffxml = sniffxml.lstrip(' \t\n\r\f\v\xFF\xFE\xEF\xBB\xBF\x00') #to find first ' real' data; some char are because of BOM, UTF-16 etc
if sniffxml and sniffxml[0]=='<':
ta_to=ta_from.copyta(status=endstatus,statust=OK,filename=ta_from.filename,editype='xml',messagetype='mailbag') #make transaction for translated message; gets ta_info of ta_frommes
#~ ta_tomes.update(status=STATUSTMP,statust=OK,filename=ta_set_for_processing.filename,editype='xml') #update outmessage transaction with ta_info;
break;
else:
raise botslib.InMessageError(_(u'Found no content in mailbag.'))
elif found.group(1):
editype='x12'
headpos=startpos+ found.start(2)
count=0
for c in edifile[headpos:headpos+120]: #search first 120 characters to find separators
if c in '\r\n' and count!=105:
continue
count +=1
if count==4:
field_sep = c
elif count==106:
record_sep = c
break
#~ foundtrailer = re.search(re.escape(record_sep)+'\s*IEA'+re.escape(field_sep)+'.+?'+re.escape(record_sep),edifile[headpos:],re.DOTALL)
foundtrailer = re.search(re.escape(record_sep)+'\s*I\s*E\s*A\s*'+re.escape(field_sep)+'.+?'+re.escape(record_sep),edifile[headpos:],re.DOTALL)
elif found.group(3):
editype='edifact'
if found.group(4):
field_sep = edifile[startpos + found.start(4) + 4]
record_sep = edifile[startpos + found.start(4) + 8]
headpos=startpos+ found.start(4)
else:
field_sep = '+'
record_sep = "'"
headpos=startpos+ found.start(5)
foundtrailer = re.search(re.escape(record_sep)+'\s*U\s*N\s*Z\s*'+re.escape(field_sep)+'.+?'+re.escape(record_sep),edifile[headpos:],re.DOTALL)
if not foundtrailer:
raise botslib.InMessageError(_(u'Found no valid envelope trailer in mailbag.'))
endpos = headpos+foundtrailer.end()
#so: interchange is from headerpos untill endpos
#~ if header.search(edifile[headpos+25:endpos]): #check if there is another header in the interchange
#~ raise botslib.InMessageError(u'Error in mailbag format: found no valid envelope trailer.')
ta_to = ta_from.copyta(status=endstatus) #make transaction for translated message; gets ta_info of ta_frommes
tofilename = str(ta_to.idta)
tofile = botslib.opendata(tofilename,'wb')
tofile.write(edifile[headpos:endpos])
tofile.close()
ta_to.update(statust=OK,filename=tofilename,editype=editype,messagetype=editype) #update outmessage transaction with ta_info;
startpos=endpos
botsglobal.logger.debug(_(u' File written: "%s".'),tofilename)
def botsunzip(ta_from,endstatus,password=None,pass_non_zip=False,**argv):
''' unzip file;
editype & messagetype are unchanged.
'''
try:
z = zipfile.ZipFile(botslib.abspathdata(filename=ta_from.filename),mode='r')
except zipfile.BadZipfile:
botsglobal.logger.debug(_(u'File is not a zip-file.'))
if pass_non_zip: #just pass the file
botsglobal.logger.debug(_(u'"pass_non_zip" is True, just pass the file.'))
ta_to = ta_from.copyta(status=endstatus,statust=OK)
return
raise botslib.InMessageError(_(u'File is not a zip-file.'))
if password:
z.setpassword(password)
for f in z.infolist():
if f.filename[-1] == '/': #check if this is a dir; if so continue
continue
ta_to = ta_from.copyta(status=endstatus)
tofilename = str(ta_to.idta)
tofile = botslib.opendata(tofilename,'wb')
tofile.write(z.read(f.filename))
tofile.close()
ta_to.update(statust=OK,filename=tofilename) #update outmessage transaction with ta_info;
botsglobal.logger.debug(_(u' File written: "%s".'),tofilename)
def extractpdf(ta_from,endstatus,**argv):
''' Try to extract text content of a PDF file to a csv.
You know this is not a great idea, right? But we'll do the best we can anyway!
Page and line numbers are added to each row.
Columns and rows are based on the x and y coordinates of each text element within tolerance allowed.
Multiple text elements may combine to make one field, some PDFs have every character separated!
You may need to experiment with x_group and y_group values, but defaults seem ok for most files.
Output csv is UTF-8 encoded - The csv module doesn't directly support reading and writing Unicode
If the PDF is just an image, all bets are off. Maybe try OCR, good luck with that!
Mike Griffin 14/12/2011
'''
from pdfminer.pdfinterp import PDFResourceManager, process_pdf
from pdfminer.converter import TextConverter
from pdfminer.layout import LAParams, LTContainer, LTText, LTTextBox
import csv
class CsvConverter(TextConverter):
def __init__(self, *args, **kwargs):
TextConverter.__init__(self, *args, **kwargs)
def receive_layout(self, ltpage):
# recursively get every text element and it's coordinates
def render(item):
if isinstance(item, LTContainer):
for child in item:
render(child)
elif isinstance(item, LTText):
(_,_,x,y) = item.bbox
# group the y values (rows) within group tolerance
for v in yv:
if y > v-y_group and y < v+y_group:
y = v
yv.append(y)
line = lines[int(-y)]
line[x] = item.get_text().encode('utf-8')
from collections import defaultdict
lines = defaultdict(lambda : {})
yv = []
render(ltpage)
lineid = 0
for y in sorted(lines.keys()):
line = lines[y]
lineid += 1
csvdata = [ltpage.pageid,lineid] # first 2 columns are page and line numbers
# group the x values (fields) within group tolerance
p = 0
field_txt=''
for x in sorted(line.keys()):
gap = x - p
if p > 0 and gap > x_group:
csvdata.append(field_txt)
field_txt=''
field_txt += line[x]
p = x
csvdata.append(field_txt)
csvout.writerow(csvdata)
if lineid == 0:
raise botslib.InMessageError(_(u'PDF text extraction failed, it may contain just image(s)?'))
#get some optional parameters
x_group = argv.get('x_group',10) # group text closer than this as one field
y_group = argv.get('y_group',5) # group lines closer than this as one line
password = argv.get('password','')
quotechar = argv.get('quotechar','"')
field_sep = argv.get('field_sep',',')
escape = argv.get('escape','\\')
charset = argv.get('charset','utf-8')
if not escape:
doublequote = True
else:
doublequote = False
try:
pdf_stream = botslib.opendata(ta_from.filename, 'rb')
ta_to = ta_from.copyta(status=endstatus)
tofilename = str(ta_to.idta)
csv_stream = botslib.opendata(tofilename,'wb')
csvout = csv.writer(csv_stream, quotechar=quotechar, delimiter=field_sep, doublequote=doublequote, escapechar=escape)
# Process PDF
rsrcmgr = PDFResourceManager(caching=True)
device = CsvConverter(rsrcmgr, csv_stream, codec=charset)
process_pdf(rsrcmgr, device, pdf_stream, pagenos=set(), password=password, caching=True, check_extractable=True)
device.close()
pdf_stream.close()
csv_stream.close()
ta_to.update(statust=OK,filename=tofilename) #update outmessage transaction with ta_info;
botsglobal.logger.debug(_(u' File written: "%s".'),tofilename)
except:
txt=botslib.txtexc()
botsglobal.logger.error(_(u'PDF extraction failed, may not be a PDF file? Error:\n%s'),txt)
raise botslib.InMessageError(_(u'PDF extraction failed, may not be a PDF file? Error:\n$error'),error=txt)
def extractexcel(ta_from,endstatus,**argv):
''' extract excel file.
editype & messagetype are unchanged.
'''
#***functions used by extractexcel
#-------------------------------------------------------------------------------
def read_xls(infilename):
# Read excel first sheet into a 2-d array
book = xlrd.open_workbook(infilename)
sheet = book.sheet_by_index(0)
formatter = lambda(t,v): format_excelval(book,t,v,False)
xlsdata = []
for row in range(sheet.nrows):
(types, values) = (sheet.row_types(row), sheet.row_values(row))
xlsdata.append(map(formatter, zip(types, values)))
return xlsdata
#-------------------------------------------------------------------------------
def dump_csv(xlsdata, tofilename):
stream = botslib.opendata(tofilename, 'wb')
csvout = csv.writer(stream, quotechar=quotechar, delimiter=field_sep, doublequote=doublequote, escapechar=escape)
csvout.writerows( map(utf8ize, xlsdata) )
stream.close()
#-------------------------------------------------------------------------------
def format_excelval(book, type, value, wanttupledate):
# Clean up the incoming excel data for some data types
returnrow = []
if type == 2:
if value == int(value):
value = int(value)
elif type == 3:
datetuple = xlrd.xldate_as_tuple(value, book.datemode)
value = datetuple if wanttupledate else tupledate_to_isodate(datetuple)
elif type == 5:
value = xlrd.error_text_from_code[value]
return value
#-------------------------------------------------------------------------------
def tupledate_to_isodate(tupledate):
# Turns a gregorian (year, month, day, hour, minute, nearest_second) into a
# standard YYYY-MM-DDTHH:MM:SS ISO date.
(y,m,d, hh,mm,ss) = tupledate
nonzero = lambda n: n!=0
date = "%04d-%02d-%02d" % (y,m,d) if filter(nonzero, (y,m,d)) else ''
time = "T%02d:%02d:%02d" % (hh,mm,ss) if filter(nonzero, (hh,mm,ss)) or not date else ''
return date+time
#-------------------------------------------------------------------------------
def utf8ize(l):
# Make string-like things into utf-8, leave other things alone
return [unicode(s).encode(charset) if hasattr(s,'encode') else s for s in l]
#***end functions used by extractexcel
import xlrd
import csv
#get parameters for csv-format; defaults are as the csv defaults (in grammar.py)
charset = argv.get('charset',"utf-8")
quotechar = argv.get('quotechar',"'")
field_sep = argv.get('field_sep',':')
escape = argv.get('escape','')
if escape:
doublequote = False
else:
doublequote = True
try:
infilename = botslib.abspathdata(ta_from.filename)
xlsdata = read_xls(infilename)
ta_to = ta_from.copyta(status=endstatus)
tofilename = str(ta_to.idta)
dump_csv(xlsdata,tofilename)
ta_to.update(statust=OK,filename=tofilename) #update outmessage transaction with ta_info;
botsglobal.logger.debug(_(u' File written: "%s".'),tofilename)
except:
txt=botslib.txtexc()
botsglobal.logger.error(_(u'Excel extraction failed, may not be an Excel file? Error:\n%s'),txt)
raise botslib.InMessageError(_(u'Excel extraction failed, may not be an Excel file? Error:\n$error'),error=txt)
| [
[
1,
0,
0.0031,
0.0031,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0061,
0.0031,
0,
0.66,
0.0833,
540,
0,
1,
0,
0,
540,
0,
0
],
[
1,
0,
0.0092,
0.0031,
0,
... | [
"import os",
"import re",
"import zipfile",
"from django.utils.translation import ugettext as _",
"import botslib",
"import botsglobal",
"from botsconfig import *",
"def preprocess(routedict,function, status=FILEIN,**argv):\n ''' for pre- and postprocessing of files.\n these are NOT translat... |
#bots modules
import botslib
import botsglobal
from botsconfig import *
from django.utils.translation import ugettext as _
tavars = 'idta,statust,divtext,child,ts,filename,status,idroute,fromchannel,tochannel,frompartner,topartner,frommail,tomail,contenttype,nrmessages,editype,messagetype,errortext,script'
def evaluate(type,stuff2evaluate):
# try: catch errors in retry....this should of course not happen...
try:
if type in ['--retry','--retrycommunication','--automaticretrycommunication']:
return evaluateretryrun(type,stuff2evaluate)
else:
return evaluaterun(type,stuff2evaluate)
except:
botsglobal.logger.exception(_(u'Error in automatic maintenance.'))
return 1 #there has been an error!
def evaluaterun(type,stuff2evaluate):
''' traces all received files.
Write a filereport for each file,
and writes a report for the run.
'''
resultlast={OPEN:0,ERROR:0,OK:0,DONE:0} #gather results of all filereports for runreport
#look at infiles from this run; trace them to determine their tracestatus.
for tadict in botslib.query('''SELECT ''' + tavars + '''
FROM ta
WHERE idta > %(rootidta)s
AND status=%(status)s ''',
{'status':EXTERNIN,'rootidta':stuff2evaluate}):
botsglobal.logger.debug(u'evaluate %s.',tadict['idta'])
mytrace = Trace(tadict,stuff2evaluate)
resultlast[mytrace.statusttree]+=1
insert_filereport(mytrace)
del mytrace.ta
del mytrace
return finish_evaluation(stuff2evaluate,resultlast,type)
def evaluateretryrun(type,stuff2evaluate):
resultlast={OPEN:0,ERROR:0,OK:0,DONE:0}
didretry = False
for row in botslib.query('''SELECT idta
FROM filereport
GROUP BY idta
HAVING MAX(statust) != %(statust)s''',
{'statust':DONE}):
didretry = True
for tadict in botslib.query('''SELECT ''' + tavars + '''
FROM ta
WHERE idta= %(idta)s ''',
{'idta':row['idta']}):
break
else: #there really should be a corresponding ta
raise botslib.PanicError(_(u'MaintenanceRetry: could not find transaction "$txt".'),txt=row['idta'])
mytrace = Trace(tadict,stuff2evaluate)
resultlast[mytrace.statusttree]+=1
if mytrace.statusttree == DONE:
mytrace.errortext = ''
#~ mytrace.ta.update(tracestatus=mytrace.statusttree)
#ts for retried filereports is tricky: is this the time the file was originally received? best would be to use ts of prepare...
#that is quite difficult, so use time of this run
rootta=botslib.OldTransaction(stuff2evaluate)
rootta.syn('ts') #get the timestamp of this run
mytrace.ts = rootta.ts
insert_filereport(mytrace)
del mytrace.ta
del mytrace
if not didretry:
return 0 #no error
return finish_evaluation(stuff2evaluate,resultlast,type)
def insert_filereport(mytrace):
botslib.change(u'''INSERT INTO filereport (idta,statust,reportidta,retransmit,idroute,fromchannel,ts,
infilename,tochannel,frompartner,topartner,frommail,
tomail,ineditype,inmessagetype,outeditype,outmessagetype,
incontenttype,outcontenttype,nrmessages,outfilename,errortext,
divtext,outidta)
VALUES (%(idta)s,%(statust)s,%(reportidta)s,%(retransmit)s,%(idroute)s,%(fromchannel)s,%(ts)s,
%(infilename)s,%(tochannel)s,%(frompartner)s,%(topartner)s,%(frommail)s,
%(tomail)s,%(ineditype)s,%(inmessagetype)s,%(outeditype)s,%(outmessagetype)s,
%(incontenttype)s,%(outcontenttype)s,%(nrmessages)s,%(outfilename)s,%(errortext)s,
%(divtext)s,%(outidta)s )
''',
{'idta':mytrace.idta,'statust':mytrace.statusttree,'reportidta':mytrace.reportidta,
'retransmit':mytrace.retransmit,'idroute':mytrace.idroute,'fromchannel':mytrace.fromchannel,
'ts':mytrace.ts,'infilename':mytrace.infilename,'tochannel':mytrace.tochannel,
'frompartner':mytrace.frompartner,'topartner':mytrace.topartner,'frommail':mytrace.frommail,
'tomail':mytrace.tomail,'ineditype':mytrace.ineditype,'inmessagetype':mytrace.inmessagetype,
'outeditype':mytrace.outeditype,'outmessagetype':mytrace.outmessagetype,
'incontenttype':mytrace.incontenttype,'outcontenttype':mytrace.outcontenttype,
'nrmessages':mytrace.nrmessages,'outfilename':mytrace.outfilename,'errortext':mytrace.errortext,
'divtext':mytrace.divtext,'outidta':mytrace.outidta})
def finish_evaluation(stuff2evaluate,resultlast,type):
#count nr files send
for row in botslib.query('''SELECT COUNT(*) as count
FROM ta
WHERE idta > %(rootidta)s
AND status=%(status)s
AND statust=%(statust)s ''',
{'status':EXTERNOUT,'rootidta':stuff2evaluate,'statust':DONE}):
send = row['count']
#count process errors
for row in botslib.query('''SELECT COUNT(*) as count
FROM ta
WHERE idta >= %(rootidta)s
AND status=%(status)s
AND statust=%(statust)s''',
{'status':PROCESS,'rootidta':stuff2evaluate,'statust':ERROR}):
processerrors = row['count']
#generate report (in database)
rootta=botslib.OldTransaction(stuff2evaluate)
rootta.syn('ts') #get the timestamp of this run
LastReceived=resultlast[DONE]+resultlast[OK]+resultlast[OPEN]+resultlast[ERROR]
status = bool(resultlast[OK]+resultlast[OPEN]+resultlast[ERROR]+processerrors)
botslib.change(u'''INSERT INTO report (idta,lastopen,lasterror,lastok,lastdone,
send,processerrors,ts,lastreceived,status,type)
VALUES (%(idta)s,
%(lastopen)s,%(lasterror)s,%(lastok)s,%(lastdone)s,
%(send)s,%(processerrors)s,%(ts)s,%(lastreceived)s,%(status)s,%(type)s)
''',
{'idta':stuff2evaluate,
'lastopen':resultlast[OPEN],'lasterror':resultlast[ERROR],'lastok':resultlast[OK],'lastdone':resultlast[DONE],
'send':send,'processerrors':processerrors,'ts':rootta.ts,'lastreceived':LastReceived,'status':status,'type':type[2:]})
return generate_report(stuff2evaluate) #return report status: 0 (no error) or 1 (error)
def generate_report(stuff2evaluate):
for results in botslib.query('''SELECT idta,lastopen,lasterror,lastok,lastdone,
send,processerrors,ts,lastreceived,type,status
FROM report
WHERE idta=%(rootidta)s''',
{'rootidta':stuff2evaluate}):
break
else:
raise botslib.PanicError(_(u'In generate report: could not find report?'))
subject = _(u'[Bots Error Report] %(time)s')%{'time':str(results['ts'])[:16]}
reporttext = _(u'Bots Report; type: %(type)s, time: %(time)s\n')%{'type':results['type'],'time':str(results['ts'])[:19]}
reporttext += _(u' %d files received/processed in run.\n')%(results['lastreceived'])
if results['lastdone']:
reporttext += _(u' %d files without errors,\n')%(results['lastdone'])
if results['lasterror']:
subject += _(u'; %d file errors')%(results['lasterror'])
reporttext += _(u' %d files with errors,\n')%(results['lasterror'])
if results['lastok']:
subject += _(u'; %d files stuck')%(results['lastok'])
reporttext += _(u' %d files got stuck,\n')%(results['lastok'])
if results['lastopen']:
subject += _(u'; %d system errors')%(results['lastopen'])
reporttext += _(u' %d system errors,\n')%(results['lastopen'])
if results['processerrors']:
subject += _(u'; %d process errors')%(results['processerrors'])
reporttext += _(u' %d errors in processes.\n')%(results['processerrors'])
reporttext += _(u' %d files send in run.\n')%(results['send'])
botsglobal.logger.info(reporttext)
# sendreportifprocesserror allows blocking of email reports for process errors
if (results['lasterror'] or results['lastopen'] or results['lastok'] or
(results['processerrors'] and botsglobal.ini.getboolean('settings','sendreportifprocesserror',True))):
botslib.sendbotserrorreport(subject,reporttext)
return int(results['status']) #return report status: 0 (no error) or 1 (error)
class Trace(object):
''' ediobject-ta's form a tree; the incoming ediobject-ta (status EXTERNIN) is root.
(yes, this works for merging, strange but inherent).
tree gets a (one) statust, by walking the tree and evaluating the statust of nodes.
all nodes are put into a tree of ta-objects;
'''
def __init__(self,tadict,stuff2evaluate):
realdict = dict([(key,tadict[key]) for key in tadict.keys()])
self.ta=botslib.OldTransaction(**realdict)
self.rootidta = stuff2evaluate
self._buildevaluationstructure(self.ta)
#~ self.display(self.ta)
self._evaluatestatus()
self._gatherfilereportdata()
def display(self,currentta,level=0):
print level*' ',currentta.idta,currentta.statust,currentta.talijst
for ta in currentta.talijst:
self.display(ta,level+1)
def _buildevaluationstructure(self,tacurrent):
''' recursive,for each db-ta:
- fill global talist with the children (and children of children, etc)
'''
#gather next steps/ta's for tacurrent;
if tacurrent.child: #find successor by using child relation ship
for row in botslib.query('''SELECT ''' + tavars + '''
FROM ta
WHERE idta=%(child)s''',
{'child':tacurrent.child}):
realdict = dict([(key,row[key]) for key in row.keys()])
tacurrent.talijst = [botslib.OldTransaction(**realdict)]
else: #find successor by using parent-relationship; mostly this relation except for merge operations
talijst = []
for row in botslib.query('''SELECT ''' + tavars + '''
FROM ta
WHERE idta > %(currentidta)s
AND parent=%(currentidta)s ''', #adding the idta > %(parent)s to selection speeds up a lot.
{'currentidta':tacurrent.idta}):
realdict = dict([(key,row[key]) for key in row.keys()])
talijst.append(botslib.OldTransaction(**realdict))
#filter:
#one ta might have multiple children; 2 possible reasons for that:
#1. split up
#2. error is processing the file; and retried
#Here case 2 (error/retry) is filtered; it is not interesting to evaluate the older errors!
#So: if the same filename and different script: use newest idta
#shortcut: when an error occurs in a split all is turned back.
#so: split up is OK as a whole or because of retries.
#so: if split, and different scripts: split is becaue of retries: use newest idta.
#~ print tacurrent.talijst
if len(talijst) > 1 and talijst[0].script != talijst[1].script:
#find higest idta
highest_ta = talijst[0]
for ta in talijst[1:]:
if ta.idta > highest_ta.idta:
highest_ta = ta
tacurrent.talijst = [highest_ta]
else:
tacurrent.talijst = talijst
#recursive build:
for child in tacurrent.talijst:
self._buildevaluationstructure(child)
def _evaluatestatus(self):
self.done = False
try:
self.statusttree = self._evaluatetreestatus(self.ta)
if self.statusttree == OK:
self.statusttree = ERROR #this is ugly!!
except botslib.TraceNotPickedUpError:
self.statusttree = OK
except: #botslib.TraceError:
self.statusttree = OPEN
def _evaluatetreestatus(self,tacurrent):
''' recursive, walks tree of ediobject-ta, depth-first
for each db-ta:
- get statust of all child-db-ta (recursive); count these statust's
- evaluate this
rules for evaluating:
- typical error-situation: DONE->OK->ERROR
- Db-ta with statust OK will be picked up next botsrun.
- if succes on next botsrun: DONE-> DONE-> ERROR
-> DONE
- one db-ta can have more children; each of these children has to evaluated
- not possible is: DONE-> ERROR (because there should always be statust OK)
'''
statustcount = [0,0,0,0] #count of statust: number of OPEN, ERROR, OK, DONE
for child in tacurrent.talijst:
if child.idta > self.rootidta:
self.done = True
statustcount[self._evaluatetreestatus(child)]+=1
else: #evaluate & return statust of current ta & children;
if tacurrent.statust==DONE:
if statustcount[OK]:
return OK #at least one of the child-trees is not DONE
elif statustcount[DONE]:
return DONE #all is OK
elif statustcount[ERROR]:
raise botslib.TraceError(_(u'DONE but no child is DONE or OK (idta: $idta).'),idta=tacurrent.idta)
else: #if no ERROR and has no children: end of trace
return DONE
elif tacurrent.statust==OK:
if statustcount[ERROR]:
return OK #child(ren) ERROR, this is expected
elif statustcount[DONE]:
raise botslib.TraceError(_(u'OK but child is DONE (idta: $idta). Changing setup while errors are pending?'),idta=tacurrent.idta)
elif statustcount[OK]:
raise botslib.TraceError(_(u'OK but child is OK (idta: $idta). Changing setup while errors are pending?'),idta=tacurrent.idta)
else:
raise botslib.TraceNotPickedUpError(_(u'OK but file is not processed further (idta: $idta).'),idta=tacurrent.idta)
elif tacurrent.statust==ERROR:
if tacurrent.talijst:
raise botslib.TraceError(_(u'ERROR but has child(ren) (idta: $idta). Changing setup while errors are pending?'),idta=tacurrent.idta)
else:
#~ self.errorta += [tacurrent]
return ERROR
else: #tacurrent.statust==OPEN
raise botslib.TraceError(_(u'Severe error: found statust (idta: $idta).'),idta=tacurrent.idta)
def _gatherfilereportdata(self):
''' Walk the ta-tree again in order to retrieve information/data belonging to incoming file; statust (OK, DONE, ERROR etc) is NOT done here.
If information is different in different ta's: place '*'
Start 'root'-ta; a file coming in; status=EXTERNIN. Retrieve as much information from ta's as possible for the filereport.
'''
def core(ta):
if ta.status==MIMEIN:
self.frommail=ta.frommail
self.tomail=ta.tomail
self.incontenttype=ta.contenttype
elif ta.status==RAWOUT:
if ta.frommail:
if self.frommail:
if self.frommail != ta.frommail and asterisk:
self.frommail='*'
else:
self.frommail=ta.frommail
if ta.tomail:
if self.tomail:
if self.tomail != ta.tomail and asterisk:
self.tomail='*'
else:
self.tomail=ta.tomail
if ta.contenttype:
if self.outcontenttype:
if self.outcontenttype != ta.contenttype and asterisk:
self.outcontenttype='*'
else:
self.outcontenttype=ta.contenttype
if ta.idta:
if self.outidta:
if self.outidta != ta.idta and asterisk:
self.outidta=0
else:
self.outidta=ta.idta
elif ta.status==TRANSLATE:
#self.ineditype=ta.editype
if self.ineditype:
if self.ineditype!=ta.editype and asterisk:
self.ineditype='*'
else:
self.ineditype=ta.editype
elif ta.status==SPLITUP:
self.nrmessages+=1
if self.inmessagetype:
if self.inmessagetype!=ta.messagetype and asterisk:
self.inmessagetype='*'
else:
self.inmessagetype=ta.messagetype
elif ta.status==TRANSLATED:
#self.outeditype=ta.editype
if self.outeditype:
if self.outeditype!=ta.editype and asterisk:
self.outeditype='*'
else:
self.outeditype=ta.editype
if self.outmessagetype:
if self.outmessagetype!=ta.messagetype and asterisk:
self.outmessagetype='*'
else:
self.outmessagetype=ta.messagetype
if self.divtext:
if self.divtext!=ta.divtext and asterisk:
self.divtext='*'
else:
self.divtext=ta.divtext
elif ta.status==EXTERNOUT:
if self.outfilename:
if self.outfilename != ta.filename and asterisk:
self.outfilename='*'
else:
self.outfilename=ta.filename
if self.tochannel:
if self.tochannel != ta.tochannel and asterisk:
self.tochannel='*'
else:
self.tochannel=ta.tochannel
if ta.frompartner:
if not self.frompartner:
self.frompartner=ta.frompartner
elif self.frompartner!=ta.frompartner and asterisk:
self.frompartner='*'
if ta.topartner:
if not self.topartner:
self.topartner=ta.topartner
elif self.topartner!=ta.topartner and asterisk:
self.topartner='*'
if ta.errortext:
self.errortext = ta.errortext
for child in ta.talijst:
core(child)
#end of core function
asterisk = botsglobal.ini.getboolean('settings','multiplevaluesasterisk',True)
self.idta = self.ta.idta
self.reportidta = self.rootidta
self.retransmit = 0
self.idroute = self.ta.idroute
self.fromchannel = self.ta.fromchannel
self.ts = self.ta.ts
self.infilename = self.ta.filename
self.tochannel = ''
self.frompartner = ''
self.topartner = ''
self.frommail = ''
self.tomail = ''
self.ineditype = ''
self.inmessagetype = ''
self.outeditype = ''
self.outmessagetype = ''
self.incontenttype = ''
self.outcontenttype = ''
self.nrmessages = 0
self.outfilename = ''
self.outidta = 0
self.errortext = ''
self.divtext = ''
core(self.ta)
| [
[
1,
0,
0.0049,
0.0025,
0,
0.66,
0,
484,
0,
1,
0,
0,
484,
0,
0
],
[
1,
0,
0.0074,
0.0025,
0,
0.66,
0.0909,
261,
0,
1,
0,
0,
261,
0,
0
],
[
1,
0,
0.0099,
0.0025,
0,
... | [
"import botslib",
"import botsglobal",
"from botsconfig import *",
"from django.utils.translation import ugettext as _",
"tavars = 'idta,statust,divtext,child,ts,filename,status,idroute,fromchannel,tochannel,frompartner,topartner,frommail,tomail,contenttype,nrmessages,editype,messagetype,errortext,script'",... |
'''module contains the functions to be called from user scripts'''
try:
import cPickle as pickle
except:
import pickle
import copy
import collections
from django.utils.translation import ugettext as _
#bots-modules
import botslib
import botsglobal
import inmessage
import outmessage
from botsconfig import *
#*******************************************************************************************************************
#****** functions imported from other modules. reason: user scripting uses primary transform functions *************
#*******************************************************************************************************************
from botslib import addinfo,updateinfo,changestatustinfo,checkunique
from envelope import mergemessages
from communication import run
@botslib.log_session
def translate(startstatus=TRANSLATE,endstatus=TRANSLATED,idroute=''):
''' translates edifiles in one or more edimessages.
reads and parses edifiles that have to be translated.
tries to split files into messages (using 'nextmessage' of grammar); if no splitting: edifile is one message.
searches the right translation in translate-table;
runs the mapping-script for the translation;
Function takes db-ta with status=TRANSLATE->PARSED->SPLITUP->TRANSLATED
'''
#select edifiles to translate; fill ta-object
#~ import gc
#~ gc.disable()
for row in botslib.query(u'''SELECT idta,frompartner,topartner,filename,messagetype,testindicator,editype,charset,alt,fromchannel
FROM ta
WHERE idta>%(rootidta)s
AND status=%(status)s
AND statust=%(statust)s
AND idroute=%(idroute)s
''',
{'status':startstatus,'statust':OK,'idroute':idroute,'rootidta':botslib.get_minta4query()}):
try:
ta_fromfile=botslib.OldTransaction(row['idta']) #TRANSLATE ta
ta_parsedfile = ta_fromfile.copyta(status=PARSED) #copy TRANSLATE to PARSED ta
#whole edi-file is read, parsed and made into a inmessage-object:
edifile = inmessage.edifromfile(frompartner=row['frompartner'],
topartner=row['topartner'],
filename=row['filename'],
messagetype=row['messagetype'],
testindicator=row['testindicator'],
editype=row['editype'],
charset=row['charset'],
alt=row['alt'],
fromchannel=row['fromchannel'],
idroute=idroute)
botsglobal.logger.debug(u'start read and parse input file "%s" editype "%s" messagetype "%s".',row['filename'],row['editype'],row['messagetype'])
for inn in edifile.nextmessage(): #for each message in the edifile:
#inn.ta_info: parameters from inmessage.edifromfile(), syntax-information and parse-information
ta_frommes=ta_parsedfile.copyta(status=SPLITUP) #copy PARSED to SPLITUP ta
inn.ta_info['idta_fromfile'] = ta_fromfile.idta #for confirmations in user script; used to give idta of 'confirming message'
ta_frommes.update(**inn.ta_info) #update ta-record SLIPTUP with info from message content and/or grammar
while 1: #whileloop continues as long as there are alt-translations
#************select parameters for translation(script):
for row2 in botslib.query(u'''SELECT tscript,tomessagetype,toeditype
FROM translate
WHERE frommessagetype = %(frommessagetype)s
AND fromeditype = %(fromeditype)s
AND active=%(booll)s
AND alt=%(alt)s
AND (frompartner_id IS NULL OR frompartner_id=%(frompartner)s OR frompartner_id in (SELECT to_partner_id
FROM partnergroup
WHERE from_partner_id=%(frompartner)s ))
AND (topartner_id IS NULL OR topartner_id=%(topartner)s OR topartner_id in (SELECT to_partner_id
FROM partnergroup
WHERE from_partner_id=%(topartner)s ))
ORDER BY alt DESC,
CASE WHEN frompartner_id IS NULL THEN 1 ELSE 0 END, frompartner_id ,
CASE WHEN topartner_id IS NULL THEN 1 ELSE 0 END, topartner_id ''',
{'frommessagetype':inn.ta_info['messagetype'],
'fromeditype':inn.ta_info['editype'],
'alt':inn.ta_info['alt'],
'frompartner':inn.ta_info['frompartner'],
'topartner':inn.ta_info['topartner'],
'booll':True}):
break #escape if found; we need only the first - ORDER BY in the query
else: #no translation record is found
raise botslib.TranslationNotFoundError(_(u'Editype "$editype", messagetype "$messagetype", frompartner "$frompartner", topartner "$topartner", alt "$alt"'),
editype=inn.ta_info['editype'],
messagetype=inn.ta_info['messagetype'],
frompartner=inn.ta_info['frompartner'],
topartner=inn.ta_info['topartner'],
alt=inn.ta_info['alt'])
ta_tomes=ta_frommes.copyta(status=endstatus) #copy SPLITUP to TRANSLATED ta
tofilename = str(ta_tomes.idta)
tscript=row2['tscript']
tomessage = outmessage.outmessage_init(messagetype=row2['tomessagetype'],editype=row2['toeditype'],filename=tofilename,reference=unique('messagecounter'),statust=OK,divtext=tscript) #make outmessage object
#copy ta_info
botsglobal.logger.debug(u'script "%s" translates messagetype "%s" to messagetype "%s".',tscript,inn.ta_info['messagetype'],tomessage.ta_info['messagetype'])
translationscript,scriptfilename = botslib.botsimport('mappings',inn.ta_info['editype'] + '.' + tscript) #get the mapping-script
doalttranslation = botslib.runscript(translationscript,scriptfilename,'main',inn=inn,out=tomessage)
botsglobal.logger.debug(u'script "%s" finished.',tscript)
if 'topartner' not in tomessage.ta_info: #tomessage does not contain values from ta......
tomessage.ta_info['topartner']=inn.ta_info['topartner']
if tomessage.ta_info['statust'] == DONE: #if indicated in user script the message should be discarded
botsglobal.logger.debug(u'No output file because mapping script explicitly indicated this.')
tomessage.ta_info['filename'] = ''
tomessage.ta_info['status'] = DISCARD
else:
botsglobal.logger.debug(u'Start writing output file editype "%s" messagetype "%s".',tomessage.ta_info['editype'],tomessage.ta_info['messagetype'])
tomessage.writeall() #write tomessage (result of translation).
#problem is that not all values ta_tomes are know to to_message....
#~ print 'tomessage.ta_info',tomessage.ta_info
ta_tomes.update(**tomessage.ta_info) #update outmessage transaction with ta_info;
del tomessage
#~ gc.collect()
if not doalttranslation:
break #out of while loop
else:
inn.ta_info['alt'] = doalttranslation
#end of while-loop
#~ print inn.ta_info
ta_frommes.update(statust=DONE,**inn.ta_info) #update db. inn.ta_info could be changed by script. Is this useful?
del inn
#~ gc.collect()
#exceptions file_in-level
except:
#~ edifile.handleconfirm(ta_fromfile,error=True) #only useful if errors are reported in acknowledgement (eg x12 997). Not used now.
txt=botslib.txtexc()
ta_parsedfile.failure()
ta_parsedfile.update(statust=ERROR,errortext=txt)
botsglobal.logger.debug(u'error in translating input file "%s":\n%s',row['filename'],txt)
else:
edifile.handleconfirm(ta_fromfile,error=False)
ta_fromfile.update(statust=DONE)
ta_parsedfile.update(statust=DONE,**edifile.confirminfo)
botsglobal.logger.debug(u'translated input file "%s".',row['filename'])
del edifile
#~ gc.collect()
#~ gc.enable()
#*********************************************************************
#*** utily functions for persist: store things in the bots database.
#*** this is intended as a memory stretching across messages.
#*********************************************************************
def persist_add(domein,botskey,value):
''' store persistent values in db.
'''
content = pickle.dumps(value,0)
if botsglobal.settings.DATABASE_ENGINE != 'sqlite3' and len(content)>1024:
raise botslib.PersistError(_(u'Data too long for domein "$domein", botskey "$botskey", value "$value".'),domein=domein,botskey=botskey,value=value)
try:
botslib.change(u'''INSERT INTO persist (domein,botskey,content)
VALUES (%(domein)s,%(botskey)s,%(content)s)''',
{'domein':domein,'botskey':botskey,'content':content})
except:
raise botslib.PersistError(_(u'Failed to add for domein "$domein", botskey "$botskey", value "$value".'),domein=domein,botskey=botskey,value=value)
def persist_update(domein,botskey,value):
''' store persistent values in db.
'''
content = pickle.dumps(value,0)
if botsglobal.settings.DATABASE_ENGINE != 'sqlite3' and len(content)>1024:
raise botslib.PersistError(_(u'Data too long for domein "$domein", botskey "$botskey", value "$value".'),domein=domein,botskey=botskey,value=value)
botslib.change(u'''UPDATE persist
SET content=%(content)s
WHERE domein=%(domein)s
AND botskey=%(botskey)s''',
{'domein':domein,'botskey':botskey,'content':content})
def persist_add_update(domein,botskey,value):
# add the record, or update it if already there.
try:
persist_add(domein,botskey,value)
except:
persist_update(domein,botskey,value)
def persist_delete(domein,botskey):
''' store persistent values in db.
'''
botslib.change(u'''DELETE FROM persist
WHERE domein=%(domein)s
AND botskey=%(botskey)s''',
{'domein':domein,'botskey':botskey})
def persist_lookup(domein,botskey):
''' lookup persistent values in db.
'''
for row in botslib.query(u'''SELECT content
FROM persist
WHERE domein=%(domein)s
AND botskey=%(botskey)s''',
{'domein':domein,'botskey':botskey}):
return pickle.loads(str(row['content']))
return None
#*********************************************************************
#*** utily functions for codeconversion
#*** 2 types: codeconversion via database tabel ccode, and via file.
#*** 20111116: codeconversion via file is depreciated, will disappear.
#*********************************************************************
#***code conversion via database tabel ccode
def ccode(ccodeid,leftcode,field='rightcode'):
''' converts code using a db-table.
converted value is returned, exception if not there.
'''
for row in botslib.query(u'''SELECT ''' +field+ '''
FROM ccode
WHERE ccodeid_id = %(ccodeid)s
AND leftcode = %(leftcode)s''',
{'ccodeid':ccodeid,
'leftcode':leftcode,
}):
return row[field]
raise botslib.CodeConversionError(_(u'Value "$value" not in code-conversion, user table "$table".'),value=leftcode,table=ccodeid)
codetconversion = ccode
def safe_ccode(ccodeid,leftcode,field='rightcode'):
''' converts code using a db-table.
converted value is returned, if not there return orginal code
'''
try:
return ccode(ccodeid,leftcode,field)
except botslib.CodeConversionError:
return leftcode
safecodetconversion = safe_ccode
def reverse_ccode(ccodeid,rightcode,field='leftcode'):
''' as ccode but reversed lookup.'''
for row in botslib.query(u'''SELECT ''' +field+ '''
FROM ccode
WHERE ccodeid_id = %(ccodeid)s
AND rightcode = %(rightcode)s''',
{'ccodeid':ccodeid,
'rightcode':rightcode,
}):
return row[field]
raise botslib.CodeConversionError(_(u'Value "$value" not in code-conversion, user table "$table".'),value=rightcode,table=ccodeid)
rcodetconversion = reverse_ccode
def safe_reverse_ccode(ccodeid,rightcode,field='leftcode'):
''' as safe_ccode but reversed lookup.'''
try:
return ccode(ccodeid,rightcode,field)
except botslib.CodeConversionError:
return rightcode
safercodetconversion = safe_reverse_ccode
def getcodeset(ccodeid,leftcode,field='rightcode'):
''' Get a code set
'''
return list(botslib.query(u'''SELECT ''' +field+ '''
FROM ccode
WHERE ccodeid_id = %(ccodeid)s
AND leftcode = %(leftcode)s''',
{'ccodeid':ccodeid,
'leftcode':leftcode,
}))
#***code conversion via file. 20111116: depreciated
def safecodeconversion(modulename,value):
''' converts code using a codelist.
converted value is returned.
codelist is first imported from file in codeconversions (lookup right place/mudule in bots.ini)
'''
module,filename = botslib.botsimport('codeconversions',modulename)
try:
return module.codeconversions[value]
except KeyError:
return value
def codeconversion(modulename,value):
''' converts code using a codelist.
converted value is returned.
codelist is first imported from file in codeconversions (lookup right place/mudule in bots.ini)
'''
module,filename = botslib.botsimport('codeconversions',modulename)
try:
return module.codeconversions[value]
except KeyError:
raise botslib.CodeConversionError(_(u'Value "$value" not in file for codeconversion "$filename".'),value=value,filename=filename)
def safercodeconversion(modulename,value):
''' as codeconversion but reverses the dictionary first'''
module,filename = botslib.botsimport('codeconversions',modulename)
if not hasattr(module,'botsreversed'+'codeconversions'):
reversedict = dict((value,key) for key,value in module.codeconversions.items())
setattr(module,'botsreversed'+'codeconversions',reversedict)
try:
return module.botsreversedcodeconversions[value]
except KeyError:
return value
def rcodeconversion(modulename,value):
''' as codeconversion but reverses the dictionary first'''
module,filename = botslib.botsimport('codeconversions',modulename)
if not hasattr(module,'botsreversed'+'codeconversions'):
reversedict = dict((value,key) for key,value in module.codeconversions.items())
setattr(module,'botsreversed'+'codeconversions',reversedict)
try:
return module.botsreversedcodeconversions[value]
except KeyError:
raise botslib.CodeConversionError(_(u'Value "$value" not in file for reversed codeconversion "$filename".'),value=value,filename=filename)
#*********************************************************************
#*** utily functions for calculating/generating/checking EAN/GTIN/GLN
#*********************************************************************
def calceancheckdigit(ean):
''' input: EAN without checkdigit; returns the checkdigit'''
try:
if not ean.isdigit():
raise botslib.EanError(_(u'GTIN "$ean" should be string with only numericals'),ean=ean)
except AttributeError:
raise botslib.EanError(_(u'GTIN "$ean" should be string, but is a "$type"'),ean=ean,type=type(ean))
sum1=sum([int(x)*3 for x in ean[-1::-2]]) + sum([int(x) for x in ean[-2::-2]])
return str((1000-sum1)%10)
def calceancheckdigit2(ean):
''' just for fun: slightly different algoritm for calculating the ean checkdigit. same results; is 10% faster.
'''
sum1 = 0
factor = 3
for i in ean[-1::-1]:
sum1 += int(i) * factor
factor = 4 - factor #factor flip-flops between 3 and 1...
return str(((1000 - sum1) % 10))
def checkean(ean):
''' input: EAN; returns: True (valid EAN) of False (EAN not valid)'''
return (ean[-1] == calceancheckdigit(ean[:-1]))
def addeancheckdigit(ean):
''' input: EAN without checkdigit; returns EAN with checkdigit'''
return ean+calceancheckdigit(ean)
#*********************************************************************
#*** div utily functions for mappings
#*********************************************************************
def unique(domein):
''' generate unique number within range domein.
uses db to keep track of last generated number.
if domein not used before, initialized with 1.
'''
return str(botslib.unique(domein))
def inn2out(inn,out):
''' copies inn-message to outmessage
'''
out.root = copy.deepcopy(inn.root)
def useoneof(*args):
for arg in args:
if arg:
return arg
else:
return None
def dateformat(date):
''' for edifact: return right format code for the date. '''
if not date:
return None
if len(date)==8:
return '102'
if len(date)==12:
return '203'
if len(date)==16:
return '718'
return None
def datemask(value,frommask,tomask):
''' value is formatted according as in frommask;
returned is the value formatted according to tomask.
'''
if not value:
return value
convdict = collections.defaultdict(list)
for key,value in zip(frommask,value):
convdict[key].append(value)
#~ return ''.join([convdict.get(c,[c]).pop(0) for c in tomask]) #very short, but not faster....
terug = ''
for c in tomask:
terug += convdict.get(c,[c]).pop(0)
return terug
| [
[
8,
0,
0.0026,
0.0026,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
7,
0,
0.009,
0.0103,
0,
0.66,
0.025,
0,
0,
1,
0,
0,
0,
0,
0
],
[
1,
1,
0.0078,
0.0026,
1,
0.22,
... | [
"'''module contains the functions to be called from user scripts'''",
"try:\n import cPickle as pickle\nexcept:\n import pickle",
" import cPickle as pickle",
" import pickle",
"import copy",
"import collections",
"from django.utils.translation import ugettext as _",
"import botslib",
"i... |
from datetime import datetime
from django.db import models
from django.utils.translation import ugettext as _
'''
django is not excellent in generating db. But they have provided a way to customize the generated database using SQL. see bots/sql/*.
'''
STATUST = [
(0, _(u'Open')),
(1, _(u'Error')),
(2, _(u'Stuck')),
(3, _(u'Done')),
]
STATUS = [
(1,_(u'process')),
(3,_(u'discarded')),
(200,_(u'FileReceive')),
(210,_(u'RawInfile')),
(215,_(u'Mimein')),
(220,_(u'Infile')),
(230,_(u'Set for preprocess')),
(231,_(u'Preprocess')),
(232,_(u'Set for preprocess')),
(233,_(u'Preprocess')),
(234,_(u'Set for preprocess')),
(235,_(u'Preprocess')),
(236,_(u'Set for preprocess')),
(237,_(u'Preprocess')),
(238,_(u'Set for preprocess')),
(239,_(u'Preprocess')),
(300,_(u'Translate')),
(310,_(u'Parsed')),
(320,_(u'Splitup')),
(330,_(u'Translated')),
(400,_(u'Merged')),
(500,_(u'Outfile')),
(510,_(u'RawOutfile')),
(520,_(u'FileSend')),
]
EDITYPES = [
('csv', _(u'csv')),
('database', _(u'database (old)')),
('db', _(u'db')),
('edifact', _(u'edifact')),
('email-confirmation',_(u'email-confirmation')),
('fixed', _(u'fixed')),
('idoc', _(u'idoc')),
('json', _(u'json')),
('jsonnocheck', _(u'jsonnocheck')),
('mailbag', _(u'mailbag')),
('raw', _(u'raw')),
('template', _(u'template')),
('templatehtml', _(u'template-html')),
('tradacoms', _(u'tradacoms')),
('xml', _(u'xml')),
('xmlnocheck', _(u'xmlnocheck')),
('x12', _(u'x12')),
]
INOROUT = (
('in', _(u'in')),
('out', _(u'out')),
)
CHANNELTYPE = (
('file', _(u'file')),
('smtp', _(u'smtp')),
('smtps', _(u'smtps')),
('smtpstarttls', _(u'smtpstarttls')),
('pop3', _(u'pop3')),
('pop3s', _(u'pop3s')),
('pop3apop', _(u'pop3apop')),
('imap4', _(u'imap4')),
('imap4s', _(u'imap4s')),
('ftp', _(u'ftp')),
('ftps', _(u'ftps (explicit)')),
('ftpis', _(u'ftps (implicit)')),
('sftp', _(u'sftp (ssh)')),
('xmlrpc', _(u'xmlrpc')),
('mimefile', _(u'mimefile')),
('communicationscript', _(u'communicationscript')),
('db', _(u'db')),
('database', _(u'database (old)')),
('intercommit', _(u'intercommit')),
)
CONFIRMTYPE = [
('ask-email-MDN',_(u'ask an email confirmation (MDN) when sending')),
('send-email-MDN',_(u'send an email confirmation (MDN) when receiving')),
('ask-x12-997',_(u'ask a x12 confirmation (997) when sending')),
('send-x12-997',_(u'send a x12 confirmation (997) when receiving')),
('ask-edifact-CONTRL',_(u'ask an edifact confirmation (CONTRL) when sending')),
('send-edifact-CONTRL',_(u'send an edifact confirmation (CONTRL) when receiving')),
]
RULETYPE = (
('all',_(u'all')),
('route',_(u'route')),
('channel',_(u'channel')),
('frompartner',_(u'frompartner')),
('topartner',_(u'topartner')),
('messagetype',_(u'messagetype')),
)
ENCODE_MIME = (
('always',_(u'base64')),
('never',_(u'never')),
('ascii',_(u'base64 if not ascii')),
)
class StripCharField(models.CharField):
''' strip values before saving to database. this is not default in django #%^&*'''
def get_db_prep_value(self, value,*args,**kwargs):
"""Returns field's value prepared for interacting with the database
backend.
Used by the default implementations of ``get_db_prep_save``and
`get_db_prep_lookup```
"""
if isinstance(value, basestring):
return value.strip()
else:
return value
class botsmodel(models.Model):
class Meta:
abstract = True
def delete(self, *args, **kwargs):
''' bots does not use cascaded deletes!; so for delete: set references to null'''
self.clear_nullable_related()
super(botsmodel, self).delete(*args, **kwargs)
def clear_nullable_related(self):
"""
Recursively clears any nullable foreign key fields on related objects.
Django is hard-wired for cascading deletes, which is very dangerous for
us. This simulates ON DELETE SET NULL behavior manually.
"""
for related in self._meta.get_all_related_objects():
accessor = related.get_accessor_name()
related_set = getattr(self, accessor)
if related.field.null:
related_set.clear()
else:
for related_object in related_set.all():
related_object.clear_nullable_related()
#***********************************************************************************
#******** written by webserver ********************************************************
#***********************************************************************************
class confirmrule(botsmodel):
#~ id = models.IntegerField(primary_key=True)
active = models.BooleanField(default=False)
confirmtype = StripCharField(max_length=35,choices=CONFIRMTYPE)
ruletype = StripCharField(max_length=35,choices=RULETYPE)
negativerule = models.BooleanField(default=False)
frompartner = models.ForeignKey('partner',related_name='cfrompartner',null=True,blank=True)
topartner = models.ForeignKey('partner',related_name='ctopartner',null=True,blank=True)
#~ idroute = models.ForeignKey('routes',null=True,blank=True,verbose_name='route')
idroute = StripCharField(max_length=35,null=True,blank=True,verbose_name=_(u'route'))
idchannel = models.ForeignKey('channel',null=True,blank=True,verbose_name=_(u'channel'))
editype = StripCharField(max_length=35,choices=EDITYPES,blank=True)
messagetype = StripCharField(max_length=35,blank=True)
rsrv1 = StripCharField(max_length=35,blank=True,null=True) #added 20100501
rsrv2 = models.IntegerField(null=True) #added 20100501
def __unicode__(self):
return unicode(self.confirmtype) + u' ' + unicode(self.ruletype)
class Meta:
db_table = 'confirmrule'
verbose_name = _(u'confirm rule')
ordering = ['confirmtype','ruletype']
class ccodetrigger(botsmodel):
ccodeid = StripCharField(primary_key=True,max_length=35,verbose_name=_(u'type code'))
ccodeid_desc = StripCharField(max_length=35,null=True,blank=True)
def __unicode__(self):
return unicode(self.ccodeid)
class Meta:
db_table = 'ccodetrigger'
verbose_name = _(u'user code type')
ordering = ['ccodeid']
class ccode(botsmodel):
#~ id = models.IntegerField(primary_key=True) #added 20091221
ccodeid = models.ForeignKey(ccodetrigger,verbose_name=_(u'type code'))
leftcode = StripCharField(max_length=35,db_index=True)
rightcode = StripCharField(max_length=35,db_index=True)
attr1 = StripCharField(max_length=35,blank=True)
attr2 = StripCharField(max_length=35,blank=True)
attr3 = StripCharField(max_length=35,blank=True)
attr4 = StripCharField(max_length=35,blank=True)
attr5 = StripCharField(max_length=35,blank=True)
attr6 = StripCharField(max_length=35,blank=True)
attr7 = StripCharField(max_length=35,blank=True)
attr8 = StripCharField(max_length=35,blank=True)
def __unicode__(self):
return unicode(self.ccodeid) + u' ' + unicode(self.leftcode) + u' ' + unicode(self.rightcode)
class Meta:
db_table = 'ccode'
verbose_name = _(u'user code')
unique_together = (('ccodeid','leftcode','rightcode'),)
ordering = ['ccodeid']
class channel(botsmodel):
idchannel = StripCharField(max_length=35,primary_key=True)
inorout = StripCharField(max_length=35,choices=INOROUT,verbose_name=_(u'in/out'))
type = StripCharField(max_length=35,choices=CHANNELTYPE) #protocol type
charset = StripCharField(max_length=35,default=u'us-ascii')
host = StripCharField(max_length=256,blank=True)
port = models.PositiveIntegerField(default=0,blank=True,null=True)
username = StripCharField(max_length=35,blank=True)
secret = StripCharField(max_length=35,blank=True,verbose_name=_(u'password'))
starttls = models.BooleanField(default=False,verbose_name='No check from-address',help_text=_(u"Do not check if an incoming 'from' email addresses is known.")) #20091027: used as 'no check on "from:" email address'
apop = models.BooleanField(default=False,verbose_name='No check to-address',help_text=_(u"Do not check if an incoming 'to' email addresses is known.")) #not used anymore (is in 'type' now) #20110104: used as 'no check on "to:" email address'
remove = models.BooleanField(default=False,help_text=_(u'For in-channels: remove the edi files after successful reading. Note: in production you do want to remove the edi files, else these are read over and over again!'))
path = StripCharField(max_length=256,blank=True) #different from host - in ftp both are used
filename = StripCharField(max_length=35,blank=True,help_text=_(u'For "type" ftp and file; read or write this filename. Wildcards allowed, eg "*.edi". Note for out-channels: if no wildcard is used, all edi message are written to one file.'))
lockname = StripCharField(max_length=35,blank=True,help_text=_(u'When reading or writing edi files in this directory use this file to indicate a directory lock.'))
syslock = models.BooleanField(default=False,help_text=_(u'Use system file locking for reading & writing edi files on windows, *nix.'))
parameters = StripCharField(max_length=70,blank=True)
ftpaccount = StripCharField(max_length=35,blank=True)
ftpactive = models.BooleanField(default=False)
ftpbinary = models.BooleanField(default=False)
askmdn = StripCharField(max_length=17,blank=True,choices=ENCODE_MIME,verbose_name=_(u'mime encoding'),help_text=_(u'Should edi-files be base64-encoded in email. Using base64 for edi (default) is often a good choice.')) #not used anymore 20091019: 20100703: used to indicate mime-encoding
sendmdn = StripCharField(max_length=17,blank=True) #not used anymore 20091019
mdnchannel = StripCharField(max_length=35,blank=True) #not used anymore 20091019
archivepath = StripCharField(max_length=256,blank=True,verbose_name=_(u'Archive path'),help_text=_(u'Write incoming or outgoing edi files to an archive. Use absolute or relative path; relative path is relative to bots directory. Eg: "botssys/archive/mychannel".')) #added 20091028
desc = models.TextField(max_length=256,null=True,blank=True)
rsrv1 = StripCharField(max_length=35,blank=True,null=True) #added 20100501
rsrv2 = models.IntegerField(null=True,blank=True,verbose_name=_(u'Max seconds'),help_text=_(u'Max seconds used for the in-communication time for this channel.')) #added 20100501. 20110906: max communication time.
class Meta:
ordering = ['idchannel']
db_table = 'channel'
def __unicode__(self):
return self.idchannel
class partner(botsmodel):
idpartner = StripCharField(max_length=35,primary_key=True,verbose_name=_(u'partner identification'))
active = models.BooleanField(default=False)
isgroup = models.BooleanField(default=False)
name = StripCharField(max_length=256) #only used for user information
mail = StripCharField(max_length=256,blank=True)
cc = models.EmailField(max_length=256,blank=True)
mail2 = models.ManyToManyField(channel, through='chanpar',blank=True)
group = models.ManyToManyField("self",db_table='partnergroup',blank=True,symmetrical=False,limit_choices_to = {'isgroup': True})
rsrv1 = StripCharField(max_length=35,blank=True,null=True) #added 20100501
rsrv2 = models.IntegerField(null=True) #added 20100501
class Meta:
ordering = ['idpartner']
db_table = 'partner'
def __unicode__(self):
return unicode(self.idpartner)
class chanpar(botsmodel):
#~ id = models.IntegerField(primary_key=True) #added 20091221
idpartner = models.ForeignKey(partner,verbose_name=_(u'partner'))
idchannel = models.ForeignKey(channel,verbose_name=_(u'channel'))
mail = StripCharField(max_length=256)
cc = models.EmailField(max_length=256,blank=True) #added 20091111
askmdn = models.BooleanField(default=False) #not used anymore 20091019
sendmdn = models.BooleanField(default=False) #not used anymore 20091019
class Meta:
unique_together = (("idpartner","idchannel"),)
db_table = 'chanpar'
verbose_name = _(u'email address per channel')
verbose_name_plural = _(u'email address per channel')
def __unicode__(self):
return str(self.idpartner) + ' ' + str(self.idchannel) + ' ' + str(self.mail)
class translate(botsmodel):
#~ id = models.IntegerField(primary_key=True)
active = models.BooleanField(default=False)
fromeditype = StripCharField(max_length=35,choices=EDITYPES,help_text=_(u'Editype to translate from.'))
frommessagetype = StripCharField(max_length=35,help_text=_(u'Messagetype to translate from.'))
alt = StripCharField(max_length=35,null=False,blank=True,verbose_name=_(u'Alternative translation'),help_text=_(u'Do this translation only for this alternative translation.'))
frompartner = models.ForeignKey(partner,related_name='tfrompartner',null=True,blank=True,help_text=_(u'Do this translation only for this frompartner.'))
topartner = models.ForeignKey(partner,related_name='ttopartner',null=True,blank=True,help_text=_(u'Do this translation only for this topartner.'))
tscript = StripCharField(max_length=35,help_text=_(u'User mapping script to use for translation.'))
toeditype = StripCharField(max_length=35,choices=EDITYPES,help_text=_(u'Editype to translate to.'))
tomessagetype = StripCharField(max_length=35,help_text=_(u'Messagetype to translate to.'))
desc = models.TextField(max_length=256,null=True,blank=True)
rsrv1 = StripCharField(max_length=35,blank=True,null=True) #added 20100501
rsrv2 = models.IntegerField(null=True) #added 20100501
class Meta:
db_table = 'translate'
verbose_name = _(u'translation')
ordering = ['fromeditype','frommessagetype']
def __unicode__(self):
return unicode(self.fromeditype) + u' ' + unicode(self.frommessagetype) + u' ' + unicode(self.alt) + u' ' + unicode(self.frompartner) + u' ' + unicode(self.topartner)
class routes(botsmodel):
#~ id = models.IntegerField(primary_key=True)
idroute = StripCharField(max_length=35,db_index=True,help_text=_(u'identification of route; one route can consist of multiple parts having the same "idroute".'))
seq = models.PositiveIntegerField(default=1,help_text=_(u'for routes consisting of multiple parts, "seq" indicates the order these parts are run.'))
active = models.BooleanField(default=False)
fromchannel = models.ForeignKey(channel,related_name='rfromchannel',null=True,blank=True,verbose_name=_(u'incoming channel'),limit_choices_to = {'inorout': 'in'})
fromeditype = StripCharField(max_length=35,choices=EDITYPES,blank=True,help_text=_(u'the editype of the incoming edi files.'))
frommessagetype = StripCharField(max_length=35,blank=True,help_text=_(u'the messagetype of incoming edi files. For edifact: messagetype=edifact; for x12: messagetype=x12.'))
tochannel = models.ForeignKey(channel,related_name='rtochannel',null=True,blank=True,verbose_name=_(u'outgoing channel'),limit_choices_to = {'inorout': 'out'})
toeditype = StripCharField(max_length=35,choices=EDITYPES,blank=True,help_text=_(u'Only edi files with this editype to this outgoing channel.'))
tomessagetype = StripCharField(max_length=35,blank=True,help_text=_(u'Only edi files of this messagetype to this outgoing channel.'))
alt = StripCharField(max_length=35,default=u'',blank=True,verbose_name='Alternative translation',help_text=_(u'Only use if there is more than one "translation" for the same editype and messagetype. Advanced use, seldom needed.'))
frompartner = models.ForeignKey(partner,related_name='rfrompartner',null=True,blank=True,help_text=_(u'The frompartner of the incoming edi files. Seldom needed.'))
topartner = models.ForeignKey(partner,related_name='rtopartner',null=True,blank=True,help_text=_(u'The topartner of the incoming edi files. Seldom needed.'))
frompartner_tochannel = models.ForeignKey(partner,related_name='rfrompartner_tochannel',null=True,blank=True,help_text=_(u'Only edi files from this partner/partnergroup for this outgoing channel'))
topartner_tochannel = models.ForeignKey(partner,related_name='rtopartner_tochannel',null=True,blank=True,help_text=_(u'Only edi files to this partner/partnergroup to this channel'))
testindicator = StripCharField(max_length=1,blank=True,help_text=_(u'Only edi files with this testindicator to this outgoing channel.'))
translateind = models.BooleanField(default=True,blank=True,verbose_name='translate',help_text=_(u'Do a translation in this route.'))
notindefaultrun = models.BooleanField(default=False,blank=True,help_text=_(u'Do not use this route in a normal run. Advanced, related to scheduling specific routes or not.'))
desc = models.TextField(max_length=256,null=True,blank=True)
rsrv1 = StripCharField(max_length=35,blank=True,null=True) #added 20100501
rsrv2 = models.IntegerField(null=True) #added 20100501
defer = models.BooleanField(default=False,blank=True,help_text=_(u'Set ready for communication, but defer actual communication (this is done in another route)')) #added 20100601
class Meta:
db_table = 'routes'
verbose_name = _(u'route')
unique_together = (("idroute","seq"),)
ordering = ['idroute','seq']
def __unicode__(self):
return unicode(self.idroute) + u' ' + unicode(self.seq)
#***********************************************************************************
#******** written by engine ********************************************************
#***********************************************************************************
class filereport(botsmodel):
#~ id = models.IntegerField(primary_key=True)
idta = models.IntegerField(db_index=True)
reportidta = models.IntegerField(db_index=True)
statust = models.IntegerField(choices=STATUST)
retransmit = models.IntegerField()
idroute = StripCharField(max_length=35)
fromchannel = StripCharField(max_length=35)
tochannel = StripCharField(max_length=35)
frompartner = StripCharField(max_length=35)
topartner = StripCharField(max_length=35)
frommail = StripCharField(max_length=256)
tomail = StripCharField(max_length=256)
ineditype = StripCharField(max_length=35,choices=EDITYPES)
inmessagetype = StripCharField(max_length=35)
outeditype = StripCharField(max_length=35,choices=EDITYPES)
outmessagetype = StripCharField(max_length=35)
incontenttype = StripCharField(max_length=35)
outcontenttype = StripCharField(max_length=35)
nrmessages = models.IntegerField()
ts = models.DateTimeField(db_index=True) #copied from ta
infilename = StripCharField(max_length=256)
inidta = models.IntegerField(null=True) #not used anymore
outfilename = StripCharField(max_length=256)
outidta = models.IntegerField()
divtext = StripCharField(max_length=35)
errortext = StripCharField(max_length=2048)
rsrv1 = StripCharField(max_length=35,blank=True,null=True) #added 20100501
rsrv2 = models.IntegerField(null=True) #added 20100501
class Meta:
db_table = 'filereport'
unique_together = (("idta","reportidta"),)
class mutex(botsmodel):
#specific SQL is used (database defaults are used)
mutexk = models.IntegerField(primary_key=True)
mutexer = models.IntegerField()
ts = models.DateTimeField()
class Meta:
db_table = 'mutex'
class persist(botsmodel):
#OK, this has gone wrong. There is no primary key here, so django generates this. But there is no ID in the custom sql.
#Django still uses the ID in sql manager. This leads to an error in snapshot plugin. Disabled this in snapshot function; to fix this really database has to be changed.
#specific SQL is used (database defaults are used)
domein = StripCharField(max_length=35)
botskey = StripCharField(max_length=35)
content = StripCharField(max_length=1024)
ts = models.DateTimeField()
class Meta:
db_table = 'persist'
unique_together = (("domein","botskey"),)
class report(botsmodel):
idta = models.IntegerField(primary_key=True) #rename to reportidta
lastreceived = models.IntegerField()
lastdone = models.IntegerField()
lastopen = models.IntegerField()
lastok = models.IntegerField()
lasterror = models.IntegerField()
send = models.IntegerField()
processerrors = models.IntegerField()
ts = models.DateTimeField() #copied from (runroot)ta
type = StripCharField(max_length=35)
status = models.BooleanField()
rsrv1 = StripCharField(max_length=35,blank=True,null=True) #added 20100501
rsrv2 = models.IntegerField(null=True) ##added 20100501
class Meta:
db_table = 'report'
#~ #trigger for sqlite to use local time (instead of utc). I can not add this to sqlite specific sql code, as django does not allow complex (begin ... end) sql here.
#~ CREATE TRIGGER uselocaltime AFTER INSERT ON ta
#~ BEGIN
#~ UPDATE ta
#~ SET ts = datetime('now','localtime')
#~ WHERE idta = new.idta ;
#~ END;
class ta(botsmodel):
#specific SQL is used (database defaults are used)
idta = models.AutoField(primary_key=True)
statust = models.IntegerField(choices=STATUST)
status = models.IntegerField(choices=STATUS)
parent = models.IntegerField(db_index=True)
child = models.IntegerField()
script = models.IntegerField(db_index=True)
idroute = StripCharField(max_length=35)
filename = StripCharField(max_length=256)
frompartner = StripCharField(max_length=35)
topartner = StripCharField(max_length=35)
fromchannel = StripCharField(max_length=35)
tochannel = StripCharField(max_length=35)
editype = StripCharField(max_length=35)
messagetype = StripCharField(max_length=35)
alt = StripCharField(max_length=35)
divtext = StripCharField(max_length=35)
merge = models.BooleanField()
nrmessages = models.IntegerField()
testindicator = StripCharField(max_length=10) #0:production; 1:test. Length to 1?
reference = StripCharField(max_length=70)
frommail = StripCharField(max_length=256)
tomail = StripCharField(max_length=256)
charset = StripCharField(max_length=35)
statuse = models.IntegerField() #obsolete 20091019 but still used by intercommit comm. module
retransmit = models.BooleanField() #20070831: only retransmit, not rereceive
contenttype = StripCharField(max_length=35)
errortext = StripCharField(max_length=2048)
ts = models.DateTimeField()
confirmasked = models.BooleanField() #added 20091019; confirmation asked or send
confirmed = models.BooleanField() #added 20091019; is confirmation received (when asked)
confirmtype = StripCharField(max_length=35) #added 20091019
confirmidta = models.IntegerField() #added 20091019
envelope = StripCharField(max_length=35) #added 20091024
botskey = StripCharField(max_length=35) #added 20091024
cc = StripCharField(max_length=512) #added 20091111
rsrv1 = StripCharField(max_length=35) #added 20100501
rsrv2 = models.IntegerField(null=True) #added 20100501
rsrv3 = StripCharField(max_length=35) #added 20100501
rsrv4 = models.IntegerField(null=True) #added 20100501
class Meta:
db_table = 'ta'
class uniek(botsmodel):
#specific SQL is used (database defaults are used)
domein = StripCharField(max_length=35,primary_key=True)
nummer = models.IntegerField()
class Meta:
db_table = 'uniek'
verbose_name = _(u'counter')
ordering = ['domein']
| [
[
1,
0,
0.0023,
0.0023,
0,
0.66,
0,
426,
0,
1,
0,
0,
426,
0,
0
],
[
1,
0,
0.0046,
0.0023,
0,
0.66,
0.037,
40,
0,
1,
0,
0,
40,
0,
0
],
[
1,
0,
0.0068,
0.0023,
0,
0.6... | [
"from datetime import datetime",
"from django.db import models",
"from django.utils.translation import ugettext as _",
"'''\ndjango is not excellent in generating db. But they have provided a way to customize the generated database using SQL. see bots/sql/*.\n'''",
"STATUST = [\n (0, _(u'Open')),\n (1... |
#!/usr/bin/env python
import os
import optparse
import subprocess
import sys
here = os.path.dirname(__file__)
def main():
usage = "usage: %prog [file1..fileN]"
description = """With no file paths given this script will automatically
compress all jQuery-based files of the admin app. Requires the Google Closure
Compiler library and Java version 6 or later."""
parser = optparse.OptionParser(usage, description=description)
parser.add_option("-c", dest="compiler", default="~/bin/compiler.jar",
help="path to Closure Compiler jar file")
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose")
(options, args) = parser.parse_args()
compiler = os.path.expanduser(options.compiler)
if not os.path.exists(compiler):
sys.exit("Google Closure compiler jar file %s not found. Please use the -c option to specify the path." % compiler)
if not args:
if options.verbose:
sys.stdout.write("No filenames given; defaulting to admin scripts\n")
args = [os.path.join(here, f) for f in [
"actions.js", "collapse.js", "inlines.js", "prepopulate.js"]]
for arg in args:
if not arg.endswith(".js"):
arg = arg + ".js"
to_compress = os.path.expanduser(arg)
if os.path.exists(to_compress):
to_compress_min = "%s.min.js" % "".join(arg.rsplit(".js"))
cmd = "java -jar %s --js %s --js_output_file %s" % (compiler, to_compress, to_compress_min)
if options.verbose:
sys.stdout.write("Running: %s\n" % cmd)
subprocess.call(cmd.split())
else:
sys.stdout.write("File %s not found. Sure it exists?\n" % to_compress)
if __name__ == '__main__':
main()
| [
[
1,
0,
0.0426,
0.0213,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0638,
0.0213,
0,
0.66,
0.1667,
323,
0,
1,
0,
0,
323,
0,
0
],
[
1,
0,
0.0851,
0.0213,
0,
... | [
"import os",
"import optparse",
"import subprocess",
"import sys",
"here = os.path.dirname(__file__)",
"def main():\n usage = \"usage: %prog [file1..fileN]\"\n description = \"\"\"With no file paths given this script will automatically\ncompress all jQuery-based files of the admin app. Requires the ... |
#!/usr/bin/env python
import os
import optparse
import subprocess
import sys
here = os.path.dirname(__file__)
def main():
usage = "usage: %prog [file1..fileN]"
description = """With no file paths given this script will automatically
compress all jQuery-based files of the admin app. Requires the Google Closure
Compiler library and Java version 6 or later."""
parser = optparse.OptionParser(usage, description=description)
parser.add_option("-c", dest="compiler", default="~/bin/compiler.jar",
help="path to Closure Compiler jar file")
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose")
(options, args) = parser.parse_args()
compiler = os.path.expanduser(options.compiler)
if not os.path.exists(compiler):
sys.exit("Google Closure compiler jar file %s not found. Please use the -c option to specify the path." % compiler)
if not args:
if options.verbose:
sys.stdout.write("No filenames given; defaulting to admin scripts\n")
args = [os.path.join(here, f) for f in [
"actions.js", "collapse.js", "inlines.js", "prepopulate.js"]]
for arg in args:
if not arg.endswith(".js"):
arg = arg + ".js"
to_compress = os.path.expanduser(arg)
if os.path.exists(to_compress):
to_compress_min = "%s.min.js" % "".join(arg.rsplit(".js"))
cmd = "java -jar %s --js %s --js_output_file %s" % (compiler, to_compress, to_compress_min)
if options.verbose:
sys.stdout.write("Running: %s\n" % cmd)
subprocess.call(cmd.split())
else:
sys.stdout.write("File %s not found. Sure it exists?\n" % to_compress)
if __name__ == '__main__':
main()
| [
[
1,
0,
0.0426,
0.0213,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0638,
0.0213,
0,
0.66,
0.1667,
323,
0,
1,
0,
0,
323,
0,
0
],
[
1,
0,
0.0851,
0.0213,
0,
... | [
"import os",
"import optparse",
"import subprocess",
"import sys",
"here = os.path.dirname(__file__)",
"def main():\n usage = \"usage: %prog [file1..fileN]\"\n description = \"\"\"With no file paths given this script will automatically\ncompress all jQuery-based files of the admin app. Requires the ... |
''' Base library for bots. Botslib should not import from other Bots-modules.'''
import sys
import os
import codecs
import traceback
import subprocess
import socket #to set a time-out for connections
import string
import urlparse
import urllib
import platform
import django
from django.utils.translation import ugettext as _
#Bots-modules
from botsconfig import *
import botsglobal #as botsglobal
def botsinfo():
return [
(_(u'server name'),botsglobal.ini.get('webserver','name','bots-webserver')),
(_(u'served at port'),botsglobal.ini.getint('webserver','port',8080)),
(_(u'platform'),platform.platform()),
(_(u'machine'),platform.machine()),
(_(u'python version'),sys.version),
(_(u'django version'),django.VERSION),
(_(u'bots version'),botsglobal.version),
(_(u'bots installation path'),botsglobal.ini.get('directories','botspath')),
(_(u'config path'),botsglobal.ini.get('directories','config')),
(_(u'botssys path'),botsglobal.ini.get('directories','botssys')),
(_(u'usersys path'),botsglobal.ini.get('directories','usersysabs')),
(u'DATABASE_ENGINE',botsglobal.settings.DATABASE_ENGINE),
(u'DATABASE_NAME',botsglobal.settings.DATABASE_NAME),
(u'DATABASE_USER',botsglobal.settings.DATABASE_USER),
(u'DATABASE_HOST',botsglobal.settings.DATABASE_HOST),
(u'DATABASE_PORT',botsglobal.settings.DATABASE_PORT),
(u'DATABASE_OPTIONS',botsglobal.settings.DATABASE_OPTIONS),
]
#**********************************************************/**
#**************getters/setters for some globals***********************/**
#**********************************************************/**
def get_minta4query():
''' get the first idta for queries etc.'''
return botsglobal.minta4query
def set_minta4query():
if botsglobal.minta4query: #if already set, do nothing
return
else:
botsglobal.minta4query = _Transaction.processlist[1] #set root-idta of current run
def set_minta4query_retry():
botsglobal.minta4query = get_idta_last_error()
return botsglobal.minta4query
def get_idta_last_error():
for row in query('''SELECT idta
FROM filereport
GROUP BY idta
HAVING MAX(statust) != %(statust)s''',
{'statust':DONE}):
#found incoming file with error
for row2 in query('''SELECT min(reportidta) as min
FROM filereport
WHERE idta = %(idta)s ''',
{'idta':row['idta']}):
return row2['min']
return 0 #if no error found.
def set_minta4query_crashrecovery():
''' set/return rootidta of last run - that is supposed to crashed'''
for row in query('''SELECT max(idta) as max
FROM ta
WHERE script= 0
'''):
if row['max'] is None:
return 0
botsglobal.minta4query = row['max']
return botsglobal.minta4query
return 0
def getlastrun():
return _Transaction.processlist[1] #get root-idta of last run
def setrouteid(routeid):
botsglobal.routeid = routeid
def getrouteid():
return botsglobal.routeid
def setpreprocessnumber(statusnumber):
botsglobal.preprocessnumber = statusnumber
def getpreprocessnumber():
terug = botsglobal.preprocessnumber
botsglobal.preprocessnumber +=2
return terug
#**********************************************************/**
#***************** class Transaction *********************/**
#**********************************************************/**
class _Transaction(object):
''' abstract class for db-ta.
This class is used for communication with db-ta.
'''
#filtering values fo db handling (to avoid unknown fields in db.
filterlist=['statust','status','divtext','parent','child','script','frompartner','topartner','fromchannel','tochannel','editype','messagetype','merge',
'testindicator','reference','frommail','tomail','contenttype','errortext','filename','charset','alt','idroute','nrmessages','retransmit',
'confirmasked','confirmed','confirmtype','confirmidta','envelope','botskey','cc']
processlist=[0] #stack for bots-processes. last one is the current process; starts with 1 element in list: root
def update(self,**ta_info):
''' Updates db-ta with named-parameters/dict.
Use a filter to update only valid fields in db-ta
'''
setstring = ','.join([key+'=%('+key+')s' for key in ta_info if key in _Transaction.filterlist])
if not setstring: #nothing to update
return
ta_info['selfid'] = self.idta #always set this...I'm not sure if this is needed...take no chances
cursor = botsglobal.db.cursor()
cursor.execute(u'''UPDATE ta
SET '''+setstring+ '''
WHERE idta=%(selfid)s''',
ta_info)
botsglobal.db.commit()
cursor.close()
def delete(self):
'''Deletes current transaction '''
cursor = botsglobal.db.cursor()
cursor.execute(u'''DELETE FROM ta
WHERE idta=%(selfid)s''',
{'selfid':self.idta})
botsglobal.db.commit()
cursor.close()
def failure(self):
'''Failure: deletes all children of transaction (and children of children etc)'''
cursor = botsglobal.db.cursor()
cursor.execute(u'''SELECT idta FROM ta
WHERE idta>%(rootidta)s
AND parent=%(selfid)s''',
{'selfid':self.idta,'rootidta':get_minta4query()})
rows = cursor.fetchall()
for row in rows:
ta=OldTransaction(row['idta'])
ta.failure()
cursor.execute(u'''DELETE FROM ta
WHERE idta>%(rootidta)s
AND parent=%(selfid)s''',
{'selfid':self.idta,'rootidta':get_minta4query()})
botsglobal.db.commit()
cursor.close()
def mergefailure(self):
'''Failure while merging: all parents of transaction get status OK (turn back)'''
cursor = botsglobal.db.cursor()
cursor.execute(u'''UPDATE ta
SET statust=%(statustnew)s
WHERE idta>%(rootidta)s
AND child=%(selfid)s
AND statust=%(statustold)s''',
{'selfid':self.idta,'statustold':DONE,'statustnew':OK,'rootidta':get_minta4query()})
botsglobal.db.commit()
cursor.close()
def syn(self,*ta_vars):
'''access of attributes of transaction as ta.fromid, ta.filename etc'''
cursor = botsglobal.db.cursor()
varsstring = ','.join(ta_vars)
cursor.execute(u'''SELECT ''' + varsstring + '''
FROM ta
WHERE idta=%(selfid)s''',
{'selfid':self.idta})
result = cursor.fetchone()
for key in result.keys():
setattr(self,key,result[key])
cursor.close()
def synall(self):
'''access of attributes of transaction as ta.fromid, ta.filename etc'''
cursor = botsglobal.db.cursor()
varsstring = ','.join(self.filterlist)
cursor.execute(u'''SELECT ''' + varsstring + '''
FROM ta
WHERE idta=%(selfid)s''',
{'selfid':self.idta})
result = cursor.fetchone()
for key in result.keys():
setattr(self,key,result[key])
cursor.close()
def copyta(self,status,**ta_info):
''' copy: make a new transaction, copy '''
script = _Transaction.processlist[-1]
cursor = botsglobal.db.cursor()
cursor.execute(u'''INSERT INTO ta (script, status, parent,frompartner,topartner,fromchannel,tochannel,editype,messagetype,alt,merge,testindicator,reference,frommail,tomail,charset,contenttype,filename,idroute,nrmessages,botskey)
SELECT %(script)s,%(newstatus)s,idta,frompartner,topartner,fromchannel,tochannel,editype,messagetype,alt,merge,testindicator,reference,frommail,tomail,charset,contenttype,filename,idroute,nrmessages,botskey
FROM ta
WHERE idta=%(selfid)s''',
{'selfid':self.idta,'script':script,'newstatus':status})
newidta = cursor.lastrowid
if not newidta: #if botsglobal.settings.DATABASE_ENGINE ==
cursor.execute('''SELECT lastval() as idta''')
newidta = cursor.fetchone()['idta']
botsglobal.db.commit()
cursor.close()
newdbta = OldTransaction(newidta)
newdbta.update(**ta_info)
return newdbta
class OldTransaction(_Transaction):
def __init__(self,idta,**ta_info):
'''Use old transaction '''
self.idta = idta
self.talijst=[]
for key in ta_info.keys(): #only used by trace
setattr(self,key,ta_info[key]) #could be done better, but SQLite does not support .items()
class NewTransaction(_Transaction):
def __init__(self,**ta_info):
'''Generates new transaction, returns key of transaction '''
updatedict = dict([(key,value) for key,value in ta_info.items() if key in _Transaction.filterlist])
updatedict['script'] = _Transaction.processlist[-1]
namesstring = ','.join([key for key in updatedict])
varsstring = ','.join(['%('+key+')s' for key in updatedict])
cursor = botsglobal.db.cursor()
cursor.execute(u'''INSERT INTO ta (''' + namesstring + ''')
VALUES (''' + varsstring + ''')''',
updatedict)
self.idta = cursor.lastrowid
if not self.idta:
cursor.execute('''SELECT lastval() as idta''')
self.idta = cursor.fetchone()['idta']
botsglobal.db.commit()
cursor.close()
class NewProcess(NewTransaction):
''' Used in logging of processes. Each process is placed on stack processlist'''
def __init__(self,functionname=''):
super(NewProcess,self).__init__(filename=functionname,status=PROCESS,idroute=getrouteid())
_Transaction.processlist.append(self.idta)
def update(self,**ta_info):
super(NewProcess,self).update(**ta_info)
_Transaction.processlist.pop()
def trace_origin(ta,where=None):
''' bots traces back all from the current step/ta.
where is a dict that is used to indicate a condition.
eg: {'status':EXTERNIN}
If bots finds a ta for which this is true, the ta is added to a list.
The list is returned when all tracing is done, and contains all ta's for which 'where' is True
'''
def trace_recurse(ta):
''' recursive
walk over ta's backward (to origin).
if condition is met, add the ta to a list
'''
for idta in get_parent(ta):
donelijst.append(idta)
taparent=OldTransaction(idta=idta)
taparent.synall()
for key,value in where.items():
if getattr(taparent,key) != value:
break
else: #all where-criteria are true; check if we already have this ta
teruglijst.append(taparent)
trace_recurse(taparent)
def get_parent(ta):
''' yields the parents of a ta '''
if ta.parent: #the is a parent via the normal parent-pointer
if ta.parent not in donelijst:
yield ta.parent
else: #no parent via parent-link, so look via child-link
for row in query('''SELECT idta
FROM ta
WHERE idta>%(rootidta)s
AND child=%(idta)s''',
{'idta':ta.idta,'rootidta':get_minta4query()}):
if row['idta'] in donelijst:
continue
yield row['idta']
donelijst = []
teruglijst = []
ta.syn('parent')
trace_recurse(ta)
return teruglijst
def addinfocore(change,where,wherestring):
''' core function for add/changes information in db-ta's.
where-dict selects db-ta's, change-dict sets values;
returns the number of db-ta that have been changed.
'''
if 'rootidta' not in where:
where['rootidta']=get_minta4query()
wherestring = ' idta > %(rootidta)s AND ' + wherestring
if 'statust' not in where: #by default: look only for statust is OK
where['statust']=OK
wherestring += ' AND statust = %(statust)s '
if 'statust' not in change: #by default: new ta is OK
change['statust']= OK
counter = 0 #count the number of dbta changed
for row in query(u'''SELECT idta FROM ta WHERE '''+wherestring,where):
counter += 1
ta_from = OldTransaction(row['idta'])
ta_from.copyta(**change) #make new ta from ta_from, using parameters from change
ta_from.update(statust=DONE) #update 'old' ta
return counter
def addinfo(change,where):
''' add/changes information in db-ta's by coping the ta's; the status is updated.
using only change and where dict.'''
wherestring = ' AND '.join([key+'=%('+key+')s ' for key in where]) #wherestring for copy & done
return addinfocore(change=change,where=where,wherestring=wherestring)
def updateinfo(change,where):
''' update info in ta if not set; no status change.
where-dict selects db-ta's, change-dict sets values;
returns the number of db-ta that have been changed.
'''
if 'statust' not in where:
where['statust']=OK
wherestring = ' AND '.join([key+'=%('+key+')s ' for key in where]) #wherestring for copy & done
if 'rootidta' not in where:
where['rootidta']=get_minta4query()
wherestring = ' idta > %(rootidta)s AND ' + wherestring
counter = 0 #count the number of dbta changed
for row in query(u'''SELECT idta FROM ta WHERE '''+wherestring,where):
counter += 1
ta_from = OldTransaction(row['idta'])
ta_from.synall()
defchange = {}
for key,value in change.items():
if value and not getattr(ta_from,key,None): #if there is a value and the key is not set in ta_from:
defchange[key]=value
ta_from.update(**defchange)
return counter
def changestatustinfo(change,where):
''' update info in ta if not set; no status change.
where-dict selects db-ta's, change is the new statust;
returns the number of db-ta that have been changed.
'''
if not isinstance(change,int):
raise BotsError(_(u'change not valid: expect status to be an integer. Programming error.'))
if 'statust' not in where:
where['statust']=OK
wherestring = ' AND '.join([key+'=%('+key+')s ' for key in where]) #wherestring for copy & done
if 'rootidta' not in where:
where['rootidta']=get_minta4query()
wherestring = ' idta > %(rootidta)s AND ' + wherestring
counter = 0 #count the number of dbta changed
for row in query(u'''SELECT idta FROM ta WHERE '''+wherestring,where):
counter += 1
ta_from = OldTransaction(row['idta'])
ta_from.update(statust = change)
return counter
#**********************************************************/**
#*************************Database***********************/**
#**********************************************************/**
def set_database_lock():
try:
change(u'''INSERT INTO mutex (mutexk) VALUES (1)''')
except:
return False
return True
def remove_database_lock():
change('''DELETE FROM mutex WHERE mutexk=1''')
def query(querystring,*args):
''' general query. yields rows from query '''
cursor = botsglobal.db.cursor()
cursor.execute(querystring,*args)
results = cursor.fetchall()
cursor.close()
for result in results:
yield result
def change(querystring,*args):
'''general inset/update. no return'''
cursor = botsglobal.db.cursor()
try:
cursor.execute(querystring,*args)
except: #IntegrityError from postgresql
botsglobal.db.rollback()
raise
botsglobal.db.commit()
cursor.close()
def unique(domein):
''' generate unique number within range domain.
uses db to keep track of last generated number
if domain not used before, initialize with 1.
'''
cursor = botsglobal.db.cursor()
try:
cursor.execute(u'''UPDATE uniek SET nummer=nummer+1 WHERE domein=%(domein)s''',{'domein':domein})
cursor.execute(u'''SELECT nummer FROM uniek WHERE domein=%(domein)s''',{'domein':domein})
nummer = cursor.fetchone()['nummer']
except: # ???.DatabaseError; domein does not exist
cursor.execute(u'''INSERT INTO uniek (domein) VALUES (%(domein)s)''',{'domein': domein})
nummer = 1
if nummer > sys.maxint-2:
nummer = 1
cursor.execute(u'''UPDATE uniek SET nummer=1 WHERE domein=%(domein)s''',{'domein':domein})
botsglobal.db.commit()
cursor.close()
return nummer
def checkunique(domein, receivednumber):
''' to check of received number is sequential: value is compare with earlier received value.
if domain not used before, initialize it . '1' is the first value expected.
'''
cursor = botsglobal.db.cursor()
try:
cursor.execute(u'''SELECT nummer FROM uniek WHERE domein=%(domein)s''',{'domein':domein})
expectednumber = cursor.fetchone()['nummer'] + 1
except: # ???.DatabaseError; domein does not exist
cursor.execute(u'''INSERT INTO uniek (domein,nummer) VALUES (%(domein)s,0)''',{'domein': domein})
expectednumber = 1
if expectednumber == receivednumber:
if expectednumber > sys.maxint-2:
nummer = 1
cursor.execute(u'''UPDATE uniek SET nummer=nummer+1 WHERE domein=%(domein)s''',{'domein':domein})
terug = True
else:
terug = False
botsglobal.db.commit()
cursor.close()
return terug
def keeptrackoflastretry(domein,newlastta):
''' keep track of last automaticretrycommunication/retry
if domain not used before, initialize it . '1' is the first value expected.
'''
cursor = botsglobal.db.cursor()
try:
cursor.execute(u'''SELECT nummer FROM uniek WHERE domein=%(domein)s''',{'domein':domein})
oldlastta = cursor.fetchone()['nummer']
except: # ???.DatabaseError; domein does not exist
cursor.execute(u'''INSERT INTO uniek (domein) VALUES (%(domein)s)''',{'domein': domein})
oldlastta = 1
cursor.execute(u'''UPDATE uniek SET nummer=%(nummer)s WHERE domein=%(domein)s''',{'domein':domein,'nummer':newlastta})
botsglobal.db.commit()
cursor.close()
return oldlastta
#**********************************************************/**
#*************************Logging, Error handling********************/**
#**********************************************************/**
def sendbotserrorreport(subject,reporttext):
if botsglobal.ini.getboolean('settings','sendreportiferror',False):
from django.core.mail import mail_managers
try:
mail_managers(subject, reporttext)
except:
botsglobal.logger.debug(u'Error in sending error report: %s',txtexc())
def log_session(f):
''' used as decorator.
The decorated functions are logged as processes.
Errors in these functions are caught and logged.
'''
def wrapper(*args,**argv):
try:
ta_session = NewProcess(f.__name__)
except:
botsglobal.logger.exception(u'System error - no new session made')
raise
try:
terug =f(*args,**argv)
except:
txt=txtexc()
botsglobal.logger.debug(u'Error in process: %s',txt)
ta_session.update(statust=ERROR,errortext=txt)
else:
ta_session.update(statust=DONE)
return terug
return wrapper
def txtexc():
''' Get text from last exception '''
if botsglobal.ini:
if botsglobal.ini.getboolean('settings','debug',False):
limit = None
else:
limit=0
else:
limit=0
#problems with char set for some input data that are reported in traces....so always decode this;
terug = traceback.format_exc(limit).decode('utf-8','ignore')
#~ botsglobal.logger.debug(u'exception %s',terug)
if hasattr(botsglobal,'dbinfo') and botsglobal.dbinfo.drivername != 'sqlite': #sqlite does not enforce strict lengths
return terug[-1848:] #filed isze is 2048; but more text can be prepended.
else:
return terug
class ErrorProcess(NewTransaction):
''' Used in logging of errors in processes.
20110828: used in communication.py
'''
def __init__(self,functionname='',errortext='',channeldict=None):
fromchannel = tochannel = ''
if channeldict:
if channeldict['inorout'] == 'in':
fromchannel = channeldict['idchannel']
else:
tochannel = channeldict['idchannel']
super(ErrorProcess,self).__init__(filename=functionname,status=PROCESS,idroute=getrouteid(),statust=ERROR,errortext=errortext,fromchannel=fromchannel,tochannel=tochannel)
#**********************************************************/**
#*************************File handling os.path, imports etc***********************/**
#**********************************************************/**
def botsbaseimport(modulename):
''' Do a dynamic import.
Errors/exceptions are handled in calling functions.
'''
if modulename.startswith('.'):
modulename = modulename[1:]
module = __import__(modulename)
components = modulename.split('.')
for comp in components[1:]:
module = getattr(module, comp)
return module
def botsimport(soort,modulename):
''' import modules from usersys.
return: imported module, filename imported module;
if could not be found or error in module: raise
'''
try: #__import__ is picky on the charset used. Might be different for different OS'es. So: test if charset is us-ascii
modulename.encode('ascii')
except UnicodeEncodeError: #if not us-ascii, convert to punycode
modulename = modulename.encode('punycode')
modulepath = '.'.join((botsglobal.usersysimportpath,soort,modulename)) #assemble import string
modulefile = join(botsglobal.usersysimportpath,soort,modulename) #assemble abs filename for errortexts
try:
module = botsbaseimport(modulepath)
except ImportError: #if module not found
botsglobal.logger.debug(u'no import of "%s".',modulefile)
raise
except: #other errors
txt=txtexc()
raise ScriptImportError(_(u'import error in "$module", error:\n$txt'),module=modulefile,txt=txt)
else:
botsglobal.logger.debug(u'import "%s".',modulefile)
return module,modulefile
def join(*paths):
'''Does does more as join.....
- join the paths (compare os.path.join)
- if path is not absolute, interpretate this as relative from bots directory.
- normalize'''
return os.path.normpath(os.path.join(botsglobal.ini.get('directories','botspath'),*paths))
def dirshouldbethere(path):
if path and not os.path.exists(path):
os.makedirs(path)
return True
return False
def abspath(soort,filename):
''' get absolute path for internal files; path is a section in bots.ini '''
directory = botsglobal.ini.get('directories',soort)
return join(directory,filename)
def abspathdata(filename):
''' abspathdata if filename incl dir: return absolute path; else (only filename): return absolute path (datadir)'''
if '/' in filename: #if filename already contains path
return join(filename)
else:
directory = botsglobal.ini.get('directories','data')
datasubdir = filename[:-3]
if not datasubdir:
datasubdir = '0'
return join(directory,datasubdir,filename)
def opendata(filename,mode,charset=None,errors=None):
''' open internal data file. if no encoding specified: read file raw/binary.'''
filename = abspathdata(filename)
if 'w' in mode:
dirshouldbethere(os.path.dirname(filename))
if charset:
return codecs.open(filename,mode,charset,errors)
else:
return open(filename,mode)
def readdata(filename,charset=None,errors=None):
''' read internal data file in memory using the right encoding or no encoding'''
f = opendata(filename,'rb',charset,errors)
content = f.read()
f.close()
return content
#**********************************************************/**
#*************************calling modules, programs***********************/**
#**********************************************************/**
def runscript(module,modulefile,functioninscript,**argv):
''' Execute user script. Functioninscript is supposed to be there; if not AttributeError is raised.
Often is checked in advance if Functioninscript does exist.
'''
botsglobal.logger.debug(u'run user script "%s" in "%s".',functioninscript,modulefile)
functiontorun = getattr(module, functioninscript)
try:
return functiontorun(**argv)
except:
txt=txtexc()
raise ScriptError(_(u'Script file "$filename": "$txt".'),filename=modulefile,txt=txt)
def tryrunscript(module,modulefile,functioninscript,**argv):
if module and hasattr(module,functioninscript):
runscript(module,modulefile,functioninscript,**argv)
return True
return False
def runscriptyield(module,modulefile,functioninscript,**argv):
botsglobal.logger.debug(u'run user (yield) script "%s" in "%s".',functioninscript,modulefile)
functiontorun = getattr(module, functioninscript)
try:
for result in functiontorun(**argv):
yield result
except:
txt=txtexc()
raise ScriptError(_(u'Script file "$filename": "$txt".'),filename=modulefile,txt=txt)
def runexternprogram(*args):
botsglobal.logger.debug(u'run external program "%s".',args)
path = os.path.dirname(args[0])
try:
subprocess.call(list(args),cwd=path)
except:
txt=txtexc()
raise OSError(_(u'error running extern program "%(program)s", error:\n%(error)s'%{'program':args,'error':txt}))
#**********************************************************/**
#***************############### mdn #############
#**********************************************************/**
def checkconfirmrules(confirmtype,**kwargs):
terug = False #boolean to return: ask a confirm of not?
for confirmdict in query(u'''SELECT ruletype,idroute,idchannel_id as idchannel,frompartner_id as frompartner,topartner_id as topartner,editype,messagetype,negativerule
FROM confirmrule
WHERE active=%(active)s
AND confirmtype=%(confirmtype)s
ORDER BY negativerule ASC
''',
{'active':True,'confirmtype':confirmtype}):
if confirmdict['ruletype']=='all':
terug = not confirmdict['negativerule']
elif confirmdict['ruletype']=='route':
if 'idroute' in kwargs and confirmdict['idroute'] == kwargs['idroute']:
terug = not confirmdict['negativerule']
elif confirmdict['ruletype']=='channel':
if 'idchannel' in kwargs and confirmdict['idchannel'] == kwargs['idchannel']:
terug = not confirmdict['negativerule']
elif confirmdict['ruletype']=='frompartner':
if 'frompartner' in kwargs and confirmdict['frompartner'] == kwargs['frompartner']:
terug = not confirmdict['negativerule']
elif confirmdict['ruletype']=='topartner':
if 'topartner' in kwargs and confirmdict['topartner'] == kwargs['topartner']:
terug = not confirmdict['negativerule']
elif confirmdict['ruletype']=='messagetype':
if 'editype' in kwargs and confirmdict['editype'] == kwargs['editype'] and 'messagetype' in kwargs and confirmdict['messagetype'] == kwargs['messagetype']:
terug = not confirmdict['negativerule']
#~ print '>>>>>>>>>>>>', terug,confirmtype,kwargs
return terug
#**********************************************************/**
#***************############### codecs #############
#**********************************************************/**
def getcodeccanonicalname(codecname):
c = codecs.lookup(codecname)
return c.name
def checkcodeciscompatible(charset1,charset2):
''' check if charset of edifile) is 'compatible' with charset of channel: OK; else: raise exception
'''
#some codecs are upward compatible (subsets); charsetcompatible is used to check if charsets are upward compatibel with each other.
#some charset are 1 byte (ascii, ISO-8859-*). others are more bytes (UTF-16, utf-32. UTF-8 is more bytes, but is ascii compatible.
charsetcompatible = {
'unoa':['unob','ascii','utf-8','iso8859-1','cp1252','iso8859-15'],
'unob':['ascii','utf-8','iso8859-1','cp1252','iso8859-15'],
'ascii':['utf-8','iso8859-1','cp1252','iso8859-15'],
}
charset_edifile = getcodeccanonicalname(charset1)
charset_channel = getcodeccanonicalname(charset2)
if charset_channel == charset_edifile:
return True
if charset_edifile in charsetcompatible and charset_channel in charsetcompatible[charset_edifile]:
return True
raise CommunicationOutError(_(u'Charset "$charset2" for channel not matching with charset "$charset1" for edi-file.'),charset1=charset1,charset2=charset2)
#**********************************************************/**
#***************############### misc. #############
#**********************************************************/**
class Uri(object):
''' generate uri from parts. '''
def __init__(self,**kw):
self.uriparts = dict(scheme='',username='',password='',host='',port='',path='',parameters='',filename='',query={},fragment='')
self.uriparts.update(**kw)
def update(self,**kw):
self.uriparts.update(kw)
return self.uri
@property #the getter
def uri(self):
if not self.uriparts['scheme']:
raise BotsError(_(u'No scheme in uri.'))
#assemble complete host name
fullhost = ''
if self.uriparts['username']: #always use both?
fullhost += self.uriparts['username'] + '@'
if self.uriparts['host']:
fullhost += self.uriparts['host']
if self.uriparts['port']:
fullhost += ':' + str(self.uriparts['port'])
#assemble complete path
if self.uriparts['path'].strip().endswith('/'):
fullpath = self.uriparts['path'] + self.uriparts['filename']
else:
fullpath = self.uriparts['path'] + '/' + self.uriparts['filename']
if fullpath.endswith('/'):
fullpath = fullpath[:-1]
_uri = urlparse.urlunparse((self.uriparts['scheme'],fullhost,fullpath,self.uriparts['parameters'],urllib.urlencode(self.uriparts['query']),self.uriparts['fragment']))
if not _uri:
raise BotsError(_(u'Uri is empty.'))
return _uri
def settimeout(milliseconds):
socket.setdefaulttimeout(milliseconds) #set a time-out for TCP-IP connections
def countunripchars(value,delchars):
return len([c for c in value if c not in delchars])
def updateunlessset(updatedict,fromdict):
for key, value in fromdict.items():
if key not in updatedict:
updatedict[key]=value
#**********************************************************/**
#************** Exception classes ***************************
#**********************************************************/**
class BotsError(Exception):
def __init__(self, msg,**kwargs):
self.msg = msg
self.kwargs = kwargs
def __str__(self):
s = string.Template(self.msg).safe_substitute(self.kwargs)
return s.encode(u'utf-8',u'ignore')
class CodeConversionError(BotsError):
pass
class CommunicationError(BotsError):
pass
class CommunicationInError(BotsError):
pass
class CommunicationOutError(BotsError):
pass
class EanError(BotsError):
pass
class GrammarError(BotsError): #grammar.py
pass
class InMessageError(BotsError):
pass
class InMessageFieldError(BotsError):
pass
class LockedFileError(BotsError):
pass
class MessageError(BotsError):
pass
class MappingRootError(BotsError):
pass
class MappingFormatError(BotsError): #mpath is not valid; mapth will mostly come from mapping-script
pass
class OutMessageError(BotsError):
pass
class PanicError(BotsError):
pass
class PersistError(BotsError):
pass
class PluginError(BotsError):
pass
class ScriptImportError(BotsError): #can not find script; not for errors in a script
pass
class ScriptError(BotsError): #runtime errors in a script
pass
class TraceError(BotsError):
pass
class TraceNotPickedUpError(BotsError):
pass
class TranslationNotFoundError(BotsError):
pass
| [
[
8,
0,
0.0013,
0.0013,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.0025,
0.0013,
0,
0.66,
0.0116,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0038,
0.0013,
0,
0.66... | [
"''' Base library for bots. Botslib should not import from other Bots-modules.'''",
"import sys",
"import os",
"import codecs",
"import traceback",
"import subprocess",
"import socket #to set a time-out for connections",
"import string",
"import urlparse",
"import urllib",
"import platform",
... |
""" Python Character Mapping Codec generated from CP1252.TXT with gencodec.py.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
(c) Copyright 2000 Guido van Rossum.
Adapted by Henk-Jan Ebbers for Bots open source EDI translator
Regular UNOB: UNOB char, CR, LF and Crtl-Z
"""
import codecs
import sys
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
def getregentry():
return codecs.CodecInfo(
name='unob',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
#decoding_map = codecs.make_identity_dict(range(128))
#decoding_map.update({
decoding_map = {
# 0x0000:0x0000, #NUL
# 0x0001:0x0000, #SOH
# 0x0002:0x0000, #STX
# 0x0003:0x0000, #ETX
# 0x0004:0x0000, #EOT
# 0x0005:0x0000, #ENQ
# 0x0006:0x0000, #ACK
# 0x0007:0x0000, #Bell
# 0x0008:0x0000, #BackSpace
# 0x0009:0x0000, #Tab
0x000a:0x000a, #lf
# 0x000b:0x0000, #Vertical Tab
# 0x000c:0x0000, #FormFeed
0x000d:0x000d, #cr
# 0x000e:0x0000, #SO
# 0x000f:0x0000, #SI
# 0x0010:0x0000, #DLE
# 0x0011:0x0000, #DC1
# 0x0012:0x0000, #DC2
# 0x0013:0x0000, #DC3
# 0x0014:0x0000, #DC4
# 0x0015:0x0000, #NAK
# 0x0016:0x0000, #SYN
# 0x0017:0x0000, #ETB
# 0x0018:0x0000, #CAN
# 0x0019:0x0000, #EM
0x001a:0x001a, #SUB, cntrl-Z
# 0x001b:0x0000, #ESC
0x001c:0x001c, #FS
0x001d:0x001d, #GS
# 0x001e:0x0000, #RS
0x001f:0x001f, #US
0x0020:0x0020, #<SPACE>
0x0021:0x0021, #!
0x0022:0x0022, #"
# 0x0023:0x0023, ##
# 0x0024:0x0024, #$
0x0025:0x0025, #%
0x0026:0x0026, #&
0x0027:0x0027, #'
0x0028:0x0028, #(
0x0029:0x0029, #)
0x002A:0x002A, #*
0x002B:0x002B, #+
0x002C:0x002C, #,
0x002D:0x002D, #-
0x002E:0x002E, #.
0x002F:0x002F, #/
0x0030:0x0030, #0
0x0031:0x0031, #1
0x0032:0x0032, #2
0x0033:0x0033, #3
0x0034:0x0034, #4
0x0035:0x0035, #5
0x0036:0x0036, #6
0x0037:0x0037, #7
0x0038:0x0038, #8
0x0039:0x0039, #9
0x003A:0x003A, #:
0x003B:0x003B, #;
0x003C:0x003C, #<
0x003D:0x003D, #=
0x003E:0x003E, #>
0x003F:0x003F, #?
# 0x0040:0x0040, #@
0x0041:0x0041, #A
0x0042:0x0042, #B
0x0043:0x0043, #C
0x0044:0x0044, #D
0x0045:0x0045, #E
0x0046:0x0046, #F
0x0047:0x0047, #G
0x0048:0x0048, #H
0x0049:0x0049, #I
0x004A:0x004A, #J
0x004B:0x004B, #K
0x004C:0x004C, #L
0x004D:0x004D, #M
0x004E:0x004E, #N
0x004F:0x004F, #O
0x0050:0x0050, #P
0x0051:0x0051, #Q
0x0052:0x0052, #R
0x0053:0x0053, #S
0x0054:0x0054, #T
0x0055:0x0055, #U
0x0056:0x0056, #V
0x0057:0x0057, #W
0x0058:0x0058, #X
0x0059:0x0059, #Y
0x005A:0x005A, #Z
# 0x005B:0x005B, #[
# 0x005C:0x005C, #\
# 0x005D:0x005D, #]
# 0x005E:0x005E, #^
# 0x005F:0x005F, #_
# 0x0060:0x0060, #`
0x0061:0x0061, #a
0x0062:0x0062, #b
0x0063:0x0063, #c
0x0064:0x0064, #d
0x0065:0x0065, #e
0x0066:0x0066, #f
0x0067:0x0067, #g
0x0068:0x0068, #h
0x0069:0x0069, #i
0x006a:0x006a, #j
0x006b:0x006b, #k
0x006c:0x006c, #l
0x006d:0x006d, #m
0x006e:0x006e, #n
0x006f:0x006f, #o
0x0070:0x0070, #p
0x0071:0x0071, #q
0x0072:0x0072, #r
0x0073:0x0073, #s
0x0074:0x0074, #t
0x0075:0x0075, #u
0x0076:0x0076, #v
0x0077:0x0077, #w
0x0078:0x0078, #x
0x0079:0x0079, #y
0x007a:0x007a, #z
# 0x007B:0x007B, #{
# 0x007C:0x007C, #|
# 0x007D:0x007D, #}
# 0x007E:0x007E, #~
# 0x007F:0x007F, #DEL
}
### Encoding Map
encoding_map = codecs.make_encoding_map(decoding_map)
| [
[
8,
0,
0.0291,
0.0529,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.0635,
0.0053,
0,
0.66,
0.1,
220,
0,
1,
0,
0,
220,
0,
0
],
[
1,
0,
0.0688,
0.0053,
0,
0.66,
... | [
"\"\"\" Python Character Mapping Codec generated from CP1252.TXT with gencodec.py.\n\nWritten by Marc-Andre Lemburg (mal@lemburg.com).\n\n(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.\n(c) Copyright 2000 Guido van Rossum.\n\nAdapted by Henk-Jan Ebbers for Bots open source EDI translator",
"import codecs",... |
""" Python Character Mapping Codec generated from CP1252.TXT with gencodec.py.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
(c) Copyright 2000 Guido van Rossum.
Adapted by Henk-Jan Ebbers for Bots open source EDI translator
Regular UNOA: UNOA char, CR, LF and Crtl-Z
"""
import codecs
import sys
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
def getregentry():
return codecs.CodecInfo(
name='unoa',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
#decoding_map = codecs.make_identity_dict(range(128))
#decoding_map.update({
decoding_map = {
# 0x0000:0x0000, #NUL
# 0x0001:0x0000, #SOH
# 0x0002:0x0000, #STX
# 0x0003:0x0000, #ETX
# 0x0004:0x0000, #EOT
# 0x0005:0x0000, #ENQ
# 0x0006:0x0000, #ACK
# 0x0007:0x0000, #Bell
# 0x0008:0x0000, #BackSpace
# 0x0009:0x0000, #Tab
0x000a:0x000a, #lf
# 0x000b:0x0000, #Vertical Tab
# 0x000c:0x0000, #FormFeed
0x000d:0x000d, #cr
# 0x000e:0x0000, #SO
# 0x000f:0x0000, #SI
# 0x0010:0x0000, #DLE
# 0x0011:0x0000, #DC1
# 0x0012:0x0000, #DC2
# 0x0013:0x0000, #DC3
# 0x0014:0x0000, #DC4
# 0x0015:0x0000, #NAK
# 0x0016:0x0000, #SYN
# 0x0017:0x0000, #ETB
# 0x0018:0x0000, #CAN
# 0x0019:0x0000, #EM
0x001a:0x001a, #SUB, cntrl-Z
# 0x001b:0x0000, #ESC
# 0x001c:0x0000, #FS
# 0x001d:0x0000, #GS
# 0x001e:0x0000, #RS
# 0x001f:0x0000, #US
0x0020:0x0020, #<space>
0x0021:0x0021, #!
0x0022:0x0022, #"
# 0x0023:0x0023, ##
# 0x0024:0x0024, #$
0x0025:0x0025, #%
0x0026:0x0026, #&
0x0027:0x0027, #'
0x0028:0x0028, #(
0x0029:0x0029, #)
0x002a:0x002a, #*
0x002b:0x002b, #+
0x002c:0x002c, #,
0x002d:0x002d, #-
0x002e:0x002e, #.
0x002f:0x002f, #/
0x0030:0x0030, #0
0x0031:0x0031, #1
0x0032:0x0032, #2
0x0033:0x0033, #3
0x0034:0x0034, #4
0x0035:0x0035, #5
0x0036:0x0036, #6
0x0037:0x0037, #7
0x0038:0x0038, #8
0x0039:0x0039, #9
0x003a:0x003a, #:
0x003b:0x003b, #;
0x003c:0x003c, #<
0x003d:0x003d, #=
0x003e:0x003e, #>
0x003f:0x003f, #?
# 0x0040:0x0040, #@
0X0041:0X0041, #A
0X0042:0X0042, #B
0X0043:0X0043, #C
0X0044:0X0044, #D
0X0045:0X0045, #E
0X0046:0X0046, #F
0X0047:0X0047, #G
0X0048:0X0048, #H
0X0049:0X0049, #I
0X004A:0X004A, #J
0X004B:0X004B, #K
0X004C:0X004C, #L
0X004D:0X004D, #M
0X004E:0X004E, #N
0X004F:0X004F, #O
0X0050:0X0050, #P
0X0051:0X0051, #Q
0X0052:0X0052, #R
0X0053:0X0053, #S
0X0054:0X0054, #T
0X0055:0X0055, #U
0X0056:0X0056, #V
0X0057:0X0057, #W
0X0058:0X0058, #X
0X0059:0X0059, #Y
0X005A:0X005A, #Z
# 0x005b:0x005b, #[
# 0x005c:0x005c, #\
# 0x005d:0x005d, #]
# 0x005e:0x005e, #^
# 0x005f:0x005f, #_
# 0x0060:0x0060, #`
# 0x0061:0x0041, #a
# 0x0062:0x0042, #b
# 0x0063:0x0043, #c
# 0x0064:0x0044, #d
# 0x0065:0x0045, #e
# 0x0066:0x0046, #f
# 0x0067:0x0047, #g
# 0x0068:0x0048, #h
# 0x0069:0x0049, #i
# 0x006a:0x004a, #j
# 0x006b:0x004b, #k
# 0x006c:0x004c, #l
# 0x006d:0x004d, #m
# 0x006e:0x004e, #n
# 0x006f:0x004f, #o
# 0x0070:0x0050, #p
# 0x0071:0x0051, #q
# 0x0072:0x0052, #r
# 0x0073:0x0053, #s
# 0x0074:0x0054, #t
# 0x0075:0x0055, #u
# 0x0076:0x0056, #v
# 0x0077:0x0057, #w
# 0x0078:0x0058, #x
# 0x0079:0x0059, #y
# 0x007a:0x005a, #z
# 0x007b:0x007b, #{
# 0x007c:0x007c, #|
# 0x007d:0x007d, #}
# 0x007e:0x007e, #~
# 0x007f:0x007f, #del
}
### Encoding Map
encoding_map = codecs.make_encoding_map(decoding_map)
| [
[
8,
0,
0.0291,
0.0529,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.0635,
0.0053,
0,
0.66,
0.1,
220,
0,
1,
0,
0,
220,
0,
0
],
[
1,
0,
0.0688,
0.0053,
0,
0.66,
... | [
"\"\"\" Python Character Mapping Codec generated from CP1252.TXT with gencodec.py.\n\nWritten by Marc-Andre Lemburg (mal@lemburg.com).\n\n(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.\n(c) Copyright 2000 Guido van Rossum.\n\nAdapted by Henk-Jan Ebbers for Bots open source EDI translator",
"import codecs",... |
codeconversions = {
'351':'AAK',
'35E':'AAK',
'220':'ON',
'224':'ON',
'50E':'ON',
'83':'IV',
'380':'IV',
'384':'IV',
'TESTIN':'TESTOUT',
}
| [
[
14,
0,
0.5455,
1,
0,
0.66,
0,
543,
0,
0,
0,
0,
0,
6,
0
]
] | [
"codeconversions = {\n'351':'AAK',\n'35E':'AAK',\n'220':'ON',\n'224':'ON',\n'50E':'ON',\n'83':'IV',\n'380':'IV',"
] |
import time
import sys
try:
import cPickle as pickle
except:
import pickle
import decimal
NODECIMAL = decimal.Decimal(1)
try:
import cElementTree as ET
#~ print 'imported cElementTree'
except ImportError:
try:
import elementtree.ElementTree as ET
#~ print 'imported elementtree.ElementTree'
except ImportError:
try:
from xml.etree import cElementTree as ET
#~ print 'imported xml.etree.cElementTree'
except ImportError:
from xml.etree import ElementTree as ET
#~ print 'imported xml.etree.ElementTree'
#~ print ET.VERSION
try:
import elementtree.ElementInclude as ETI
except ImportError:
from xml.etree import ElementInclude as ETI
try:
import json as simplejson
except ImportError:
import simplejson
from django.utils.translation import ugettext as _
#bots-modules
import botslib
import botsglobal
import grammar
import message
import node
from botsconfig import *
def outmessage_init(**ta_info):
''' dispatch function class Outmessage or subclass
ta_info: needed is editype, messagetype, filename, charset, merge
'''
try:
classtocall = globals()[ta_info['editype']]
except KeyError:
raise botslib.OutMessageError(_(u'Unknown editype for outgoing message: $editype'),editype=ta_info['editype'])
return classtocall(ta_info)
class Outmessage(message.Message):
''' abstract class; represents a outgoing edi message.
subclassing is necessary for the editype (csv, edi, x12, etc)
A tree of nodes is build form the mpaths received from put()or putloop(). tree starts at self.root.
Put() recieves mpaths from mappingscript
The next algorithm is used to 'map' a mpath into the tree:
For each part of a mpath: search node in 'current' level of tree
If part already as a node:
recursively search node-children
If part not as a node:
append new node to tree;
recursively append next parts to tree
After the mapping-script is finished, the resulting tree is converted to records (self.records).
These records are written to file.
Structure of self.records:
list of record;
record is list of field
field is dict. Keys in field:
- ID field ID (id within this record). For in-file
- VALUE value, content of field
- MPATH mpath of record, only for first field(=recordID)
- LIN linenr of field in in-file
- POS positionnr within line in in-file
- SFIELD True if subfield (edifact-only)
first field for record is recordID.
'''
def __init__(self,ta_info):
self.ta_info = ta_info
self.root = node.Node(record={}) #message tree; build via put()-interface in mapping-script. Initialise with empty dict
super(Outmessage,self).__init__()
def outmessagegrammarread(self,editype,messagetype):
''' read the grammar for a out-message.
try to read the topartner dependent grammar syntax.
'''
self.defmessage = grammar.grammarread(editype,messagetype)
self.defmessage.display(self.defmessage.structure)
#~ print 'self.ta_info',self.ta_info
#~ print 'self.defmessage.syntax',self.defmessage.syntax
botslib.updateunlessset(self.ta_info,self.defmessage.syntax) #write values from grammar to self.ta_info - unless these values are already set eg by mapping script
if self.ta_info['topartner']: #read syntax-file for partner dependent syntax
try:
partnersyntax = grammar.syntaxread('partners',editype,self.ta_info['topartner'])
self.ta_info.update(partnersyntax.syntax) #partner syntax overrules!
except ImportError:
pass #No partner specific syntax found (is not an error).
def writeall(self):
''' writeall is called for writing all 'real' outmessage objects; but not for envelopes.
writeall is call from transform.translate()
'''
self.outmessagegrammarread(self.ta_info['editype'],self.ta_info['messagetype'])
self.nrmessagewritten = 0
if self.root.record: #root record contains information; write whole tree in one time
self.multiplewrite = False
self.normalisetree(self.root)
self._initwrite()
self._write(self.root)
self.nrmessagewritten = 1
self._closewrite()
elif not self.root.children:
raise botslib.OutMessageError(_(u'No outgoing message')) #then there is nothing to write...
else:
self.multiplewrite = True
for childnode in self.root.children:
self.normalisetree(childnode)
self._initwrite()
for childnode in self.root.children:
self._write(childnode)
self.nrmessagewritten += 1
self._closewrite()
def _initwrite(self):
botsglobal.logger.debug(u'Start writing to file "%s".',self.ta_info['filename'])
self._outstream = botslib.opendata(self.ta_info['filename'],'wb',charset=self.ta_info['charset'],errors=self.ta_info['checkcharsetout'])
def _closewrite(self):
botsglobal.logger.debug(u'End writing to file "%s".',self.ta_info['filename'])
self._outstream.close()
def _write(self,node):
''' the write method for most classes.
tree is serialised to sequential records; records are written to file.
Classses that write using other libraries (xml, json, template, db) use specific write methods.
'''
self.tree2records(node)
self._records2file()
def tree2records(self,node):
self.records = [] #tree of nodes is flattened to these records
self._tree2recordscore(node,self.defmessage.structure[0])
def _tree2recordscore(self,node,structure):
''' Write tree of nodes to flat records.
The nodes are already sorted
'''
self._tree2recordfields(node.record,structure) #write root node->first record
for childnode in node.children: #for every node in mpathtree, these are already sorted#SPEED: node.children is already sorted!
for structure_record in structure[LEVEL]: #for structure_record of this level in grammar
if childnode.record['BOTSID'] == structure_record[ID] and childnode.record['BOTSIDnr'] == structure_record[BOTSIDnr]: #if is is the right node:
self._tree2recordscore(childnode,structure_record) #use rest of index in deeper level
def _tree2recordfields(self,noderecord,structure_record):
''' appends fields in noderecord to (raw)record; use structure_record as guide.
complex because is is used for: editypes that have compression rules (edifact), var editypes without compression, fixed protocols
'''
buildrecord = [] #the record that is going to be build; list of dicts. Each dict is a field.
buffer = []
for grammarfield in structure_record[FIELDS]: #loop all fields in grammar-definition
if grammarfield[ISFIELD]: #if field (no composite)
if grammarfield[ID] in noderecord and noderecord[grammarfield[ID]]: #field exists in outgoing message and has data
buildrecord += buffer #write the buffer to buildrecord
buffer=[] #clear the buffer
buildrecord += [{VALUE:noderecord[grammarfield[ID]],SFIELD:False,FORMATFROMGRAMMAR:grammarfield[FORMAT]}] #append new field
else: #there is no data for this field
if self.ta_info['stripfield_sep']:
buffer += [{VALUE:'',SFIELD:False,FORMATFROMGRAMMAR:grammarfield[FORMAT]}] #append new empty to buffer;
else:
value = self._formatfield('',grammarfield,structure_record) #generate field
buildrecord += [{VALUE:value,SFIELD:False,FORMATFROMGRAMMAR:grammarfield[FORMAT]}] #append new field
else: #if composite
donefirst = False #used because first subfield in composite is marked as a field (not a subfield).
subbuffer=[] #buffer for this composite.
subiswritten=False #check if composite contains data
for grammarsubfield in grammarfield[SUBFIELDS]: #loop subfields
if grammarsubfield[ID] in noderecord and noderecord[grammarsubfield[ID]]: #field exists in outgoing message and has data
buildrecord += buffer #write buffer
buffer=[] #clear buffer
buildrecord += subbuffer #write subbuffer
subbuffer=[] #clear subbuffer
buildrecord += [{VALUE:noderecord[grammarsubfield[ID]],SFIELD:donefirst}] #append field
subiswritten = True
else:
if self.ta_info['stripfield_sep']:
subbuffer += [{VALUE:'',SFIELD:donefirst}] #append new empty to buffer;
else:
value = self._formatfield('',grammarsubfield,structure_record) #generate & append new field. For eg fixed and csv: all field have to be present
subbuffer += [{VALUE:value,SFIELD:donefirst}] #generate & append new field
donefirst = True
if not subiswritten: #if composite has no data: write placeholder for composite (stripping is done later)
buffer += [{VALUE:'',SFIELD:False}]
#~ print [buildrecord]
self.records += [buildrecord]
def _formatfield(self,value, grammarfield,record):
''' Input: value (as a string) and field definition.
Some parameters of self.syntax are used: decimaal
Format is checked and converted (if needed).
return the formatted value
'''
if grammarfield[BFORMAT] == 'A':
if isinstance(self,fixed): #check length fields in variable records
if grammarfield[FORMAT] == 'AR': #if field format is alfanumeric right aligned
value = value.rjust(grammarfield[MINLENGTH])
else:
value = value.ljust(grammarfield[MINLENGTH]) #add spaces (left, because A-field is right aligned)
valuelength=len(value)
if valuelength > grammarfield[LENGTH]:
raise botslib.OutMessageError(_(u'record "$mpath" field "$field" too big (max $max): "$content".'),field=grammarfield[ID],content=value,mpath=record[MPATH],max=grammarfield[LENGTH])
if valuelength < grammarfield[MINLENGTH]:
raise botslib.OutMessageError(_(u'record "$mpath" field "$field" too small (min $min): "$content".'),field=grammarfield[ID],content=value,mpath=record[MPATH],min=grammarfield[MINLENGTH])
elif grammarfield[BFORMAT] == 'D':
try:
lenght = len(value)
if lenght==6:
time.strptime(value,'%y%m%d')
elif lenght==8:
time.strptime(value,'%Y%m%d')
else:
raise ValueError(u'To be catched')
except ValueError:
raise botslib.OutMessageError(_(u'record "$mpath" field "$field" no valid date: "$content".'),field=grammarfield[ID],content=value,mpath=record[MPATH])
valuelength=len(value)
if valuelength > grammarfield[LENGTH]:
raise botslib.OutMessageError(_(u'record "$mpath" field "$field" too big (max $max): "$content".'),field=grammarfield[ID],content=value,mpath=record[MPATH],max=grammarfield[LENGTH])
if valuelength < grammarfield[MINLENGTH]:
raise botslib.OutMessageError(_(u'record "$mpath" field "$field" too small (min $min): "$content".'),field=grammarfield[ID],content=value,mpath=record[MPATH],min=grammarfield[MINLENGTH])
elif grammarfield[BFORMAT] == 'T':
try:
lenght = len(value)
if lenght==4:
time.strptime(value,'%H%M')
elif lenght==6:
time.strptime(value,'%H%M%S')
else: #lenght==8: #tsja...just use first part of field
raise ValueError(u'To be catched')
except ValueError:
raise botslib.OutMessageError(_(u'record "$mpath" field "$field" no valid time: "$content".'),field=grammarfield[ID],content=value,mpath=record[MPATH])
valuelength=len(value)
if valuelength > grammarfield[LENGTH]:
raise botslib.OutMessageError(_(u'record "$mpath" field "$field" too big (max $max): "$content".'),field=grammarfield[ID],content=value,mpath=record[MPATH],max=grammarfield[LENGTH])
if valuelength < grammarfield[MINLENGTH]:
raise botslib.OutMessageError(_(u'record "$mpath" field "$field" too small (min $min): "$content".'),field=grammarfield[ID],content=value,mpath=record[MPATH],min=grammarfield[MINLENGTH])
else: #numerics
if value or isinstance(self,fixed): #if empty string for non-fixed: just return. Later on, ta_info[stripemptyfield] determines what to do with them
if not value: #see last if; if a numerical fixed field has content '' , change this to '0' (init)
value='0'
else:
value = value.strip()
if value[0]=='-':
minussign = '-'
absvalue = value[1:]
else:
minussign = ''
absvalue = value
digits,decimalsign,decimals = absvalue.partition('.')
if not digits and not decimals:# and decimalsign:
raise botslib.OutMessageError(_(u'record "$mpath" field "$field" numerical format not valid: "$content".'),field=grammarfield[ID],content=value,mpath=record[MPATH])
if not digits:
digits = '0'
lengthcorrection = 0 #for some formats (if self.ta_info['lengthnumericbare']=True; eg edifact) length is calculated without decimal sing and/or minus sign.
if grammarfield[BFORMAT] == 'R': #floating point: use all decimals received
if self.ta_info['lengthnumericbare']:
if minussign:
lengthcorrection += 1
if decimalsign:
lengthcorrection += 1
try:
value = str(decimal.Decimal(minussign + digits + decimalsign + decimals).quantize(decimal.Decimal(10) ** -len(decimals)))
except:
raise botslib.OutMessageError(_(u'record "$mpath" field "$field" numerical format not valid: "$content".'),field=grammarfield[ID],content=value,mpath=record[MPATH])
if grammarfield[FORMAT] == 'RL': #if field format is numeric right aligned
value = value.ljust(grammarfield[MINLENGTH] + lengthcorrection)
elif grammarfield[FORMAT] == 'RR': #if field format is numeric right aligned
value = value.rjust(grammarfield[MINLENGTH] + lengthcorrection)
else:
value = value.zfill(grammarfield[MINLENGTH] + lengthcorrection)
value = value.replace('.',self.ta_info['decimaal'],1) #replace '.' by required decimal sep.
elif grammarfield[BFORMAT] == 'N': #fixed decimals; round
if self.ta_info['lengthnumericbare']:
if minussign:
lengthcorrection += 1
if grammarfield[DECIMALS]:
lengthcorrection += 1
try:
value = str(decimal.Decimal(minussign + digits + decimalsign + decimals).quantize(decimal.Decimal(10) ** -grammarfield[DECIMALS]))
except:
raise botslib.OutMessageError(_(u'record "$mpath" field "$field" numerical format not valid: "$content".'),field=grammarfield[ID],content=value,mpath=record[MPATH])
if grammarfield[FORMAT] == 'NL': #if field format is numeric right aligned
value = value.ljust(grammarfield[MINLENGTH] + lengthcorrection)
elif grammarfield[FORMAT] == 'NR': #if field format is numeric right aligned
value = value.rjust(grammarfield[MINLENGTH] + lengthcorrection)
else:
value = value.zfill(grammarfield[MINLENGTH] + lengthcorrection)
value = value.replace('.',self.ta_info['decimaal'],1) #replace '.' by required decimal sep.
elif grammarfield[BFORMAT] == 'I': #implicit decimals
if self.ta_info['lengthnumericbare']:
if minussign:
lengthcorrection += 1
try:
d = decimal.Decimal(minussign + digits + decimalsign + decimals) * 10**grammarfield[DECIMALS]
except:
raise botslib.OutMessageError(_(u'record "$mpath" field "$field" numerical format not valid: "$content".'),field=grammarfield[ID],content=value,mpath=record[MPATH])
value = str(d.quantize(NODECIMAL ))
value = value.zfill(grammarfield[MINLENGTH] + lengthcorrection)
if len(value)-lengthcorrection > grammarfield[LENGTH]:
raise botslib.OutMessageError(_(u'record "$mpath" field "$field": content to large: "$content".'),field=grammarfield[ID],content=value,mpath=record[MPATH])
return value
def _records2file(self):
''' convert self.records to a file.
using the right editype (edifact, x12, etc) and charset.
'''
wrap_length = int(self.ta_info.get('wrap_length', 0))
if wrap_length:
s = ''.join(self._record2string(r) for r in self.records) # join all records
for i in range(0,len(s),wrap_length): # then split in fixed lengths
try:
self._outstream.write(s[i:i+wrap_length] + '\r\n')
except UnicodeEncodeError:
raise botslib.OutMessageError(_(u'Chars in outmessage not in charset "$char": $content'),char=self.ta_info['charset'],content=s[i:i+wrap_length])
else:
for record in self.records: #loop all records
try:
self._outstream.write(self._record2string(record))
except UnicodeEncodeError: #, flup: testing with 2.7: flup did not contain the content.
raise botslib.OutMessageError(_(u'Chars in outmessage not in charset "$char": $content'),char=self.ta_info['charset'],content=str(record))
#code before 7 aug 2007 had other handling for flup. May have changed because python2.4->2.5?
def _record2string(self,record):
''' write (all fields of) a record using the right separators, escape etc
'''
sfield_sep = self.ta_info['sfield_sep']
if self.ta_info['record_tag_sep']:
record_tag_sep = self.ta_info['record_tag_sep']
else:
record_tag_sep = self.ta_info['field_sep']
field_sep = self.ta_info['field_sep']
quote_char = self.ta_info['quote_char']
escape = self.ta_info['escape']
record_sep = self.ta_info['record_sep'] + self.ta_info['add_crlfafterrecord_sep']
forcequote = self.ta_info['forcequote']
escapechars = self.getescapechars()
value = u'' #to collect separator/escape plus field content
fieldcount = 0
mode_quote = False
if self.ta_info['noBOTSID']: #for some csv-files: do not write BOTSID so remove it
del record[0]
for field in record: #loop all fields in record
if field[SFIELD]:
value += sfield_sep
else: #is a field:
if fieldcount == 0: #do nothing because first field in record is not preceded by a separator
fieldcount = 1
elif fieldcount == 1:
value += record_tag_sep
fieldcount = 2
else:
value += field_sep
if quote_char: #quote char only used for csv
start_to__quote=False
if forcequote == 2:
if field[FORMATFROMGRAMMAR] in ['AN','A','AR']:
start_to__quote=True
elif forcequote: #always quote; this catches values 1, '1', '0'
start_to__quote=True
else:
if field_sep in field[VALUE] or quote_char in field[VALUE] or record_sep in field[VALUE]:
start_to__quote=True
#TO DO test. if quote_char='' this works OK. Alt: check first if quote_char
if start_to__quote:
value += quote_char
mode_quote = True
for char in field[VALUE]: #use escape (edifact, tradacom). For x12 is warned if content contains separator
if char in escapechars:
if isinstance(self,x12):
if self.ta_info['replacechar']:
char = self.ta_info['replacechar']
else:
raise botslib.OutMessageError(_(u'Character "$char" is in use as separator in this x12 file. Field: "$data".'),char=char,data=field[VALUE])
else:
value +=escape
elif mode_quote and char==quote_char:
value +=quote_char
value += char
if mode_quote:
value += quote_char
mode_quote = False
value += record_sep
return value
def getescapechars(self):
return ''
class fixed(Outmessage):
pass
class idoc(fixed):
def _canonicalfields(self,noderecord,structure_record,headerrecordnumber):
if self.ta_info['automaticcount']:
noderecord.update({'MANDT':self.ta_info['MANDT'],'DOCNUM':self.ta_info['DOCNUM'],'SEGNUM':str(self.recordnumber),'PSGNUM':str(headerrecordnumber),'HLEVEL':str(len(structure_record[MPATH]))})
else:
noderecord.update({'MANDT':self.ta_info['MANDT'],'DOCNUM':self.ta_info['DOCNUM']})
super(idoc,self)._canonicalfields(noderecord,structure_record,headerrecordnumber)
self.recordnumber += 1 #tricky. EDI_DC is not counted, so I count after writing.
class var(Outmessage):
pass
class csv(var):
def getescapechars(self):
return self.ta_info['escape']
class edifact(var):
def getescapechars(self):
terug = self.ta_info['record_sep']+self.ta_info['field_sep']+self.ta_info['sfield_sep']+self.ta_info['escape']
if self.ta_info['version']>='4':
terug += self.ta_info['reserve']
return terug
class tradacoms(var):
def getescapechars(self):
terug = self.ta_info['record_sep']+self.ta_info['field_sep']+self.ta_info['sfield_sep']+self.ta_info['escape']+self.ta_info['record_tag_sep']
return terug
def writeall(self):
''' writeall is called for writing all 'real' outmessage objects; but not for enveloping.
writeall is call from transform.translate()
'''
self.nrmessagewritten = 0
if not self.root.children:
raise botslib.OutMessageError(_(u'No outgoing message')) #then there is nothing to write...
for message in self.root.getloop({'BOTSID':'STX'},{'BOTSID':'MHD'}):
self.outmessagegrammarread(self.ta_info['editype'],message.get({'BOTSID':'MHD','TYPE.01':None}) + message.get({'BOTSID':'MHD','TYPE.02':None}))
if not self.nrmessagewritten:
self._initwrite()
self.normalisetree(message)
self._write(message)
self.nrmessagewritten += 1
self._closewrite()
self.ta_info['nrmessages'] = self.nrmessagewritten
class x12(var):
def getescapechars(self):
terug = self.ta_info['record_sep']+self.ta_info['field_sep']+self.ta_info['sfield_sep']
if self.ta_info['version']>='00403':
terug += self.ta_info['reserve']
return terug
class xml(Outmessage):
''' 20110919: code for _write is almost the same as for envelopewrite.
this could be one method.
Some problems with right xml prolog, standalone, DOCTYPE, processing instructons: Different ET versions give different results:
celementtree in 2.7 is version 1.0.6, but different implementation in 2.6??
So: this works OK for python 2.7
For python <2.7: do not generate standalone, DOCTYPE, processing instructions for encoding !=utf-8,ascii OR if elementtree package is installed (version 1.3.0 or bigger)
'''
def _write(self,node):
''' write normal XML messages (no envelope)'''
xmltree = ET.ElementTree(self._node2xml(node))
root = xmltree.getroot()
self._xmlcorewrite(xmltree,root)
def envelopewrite(self,node):
''' write envelope for XML messages'''
self._initwrite()
self.normalisetree(node)
xmltree = ET.ElementTree(self._node2xml(node))
root = xmltree.getroot()
ETI.include(root)
self._xmlcorewrite(xmltree,root)
self._closewrite()
def _xmlcorewrite(self,xmltree,root):
#xml prolog: always use.*********************************
#standalone, DOCTYPE, processing instructions: only possible in python >= 2.7 or if encoding is utf-8/ascii
if sys.version >= '2.7.0' or self.ta_info['charset'] in ['us-ascii','utf-8'] or ET.VERSION >= '1.3.0':
if self.ta_info['indented']:
indentstring = '\n'
else:
indentstring = ''
if self.ta_info['standalone']:
standalonestring = 'standalone="%s" '%(self.ta_info['standalone'])
else:
standalonestring = ''
PI = ET.ProcessingInstruction('xml', 'version="%s" encoding="%s" %s'%(self.ta_info['version'],self.ta_info['charset'], standalonestring))
self._outstream.write(ET.tostring(PI) + indentstring) #do not use encoding here. gives double xml prolog; possibly because ET.ElementTree.write i used again by write()
#doctype /DTD **************************************
if self.ta_info['DOCTYPE']:
self._outstream.write('<!DOCTYPE %s>'%(self.ta_info['DOCTYPE']) + indentstring)
#processing instructions (other than prolog) ************
if self.ta_info['processing_instructions']:
for pi in self.ta_info['processing_instructions']:
PI = ET.ProcessingInstruction(pi[0], pi[1])
self._outstream.write(ET.tostring(PI) + indentstring) #do not use encoding here. gives double xml prolog; possibly because ET.ElementTree.write i used again by write()
#indent the xml elements
if self.ta_info['indented']:
self.botsindent(root)
#write tree to file; this is differnt for different python/elementtree versions
if sys.version < '2.7.0' and ET.VERSION < '1.3.0':
xmltree.write(self._outstream,encoding=self.ta_info['charset'])
else:
xmltree.write(self._outstream,encoding=self.ta_info['charset'],xml_declaration=False)
def botsindent(self,elem, level=0,indentstring=' '):
i = "\n" + level*indentstring
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + indentstring
for e in elem:
self.botsindent(e, level+1)
if not e.tail or not e.tail.strip():
e.tail = i + indentstring
if not e.tail or not e.tail.strip():
e.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
def _node2xml(self,node):
''' recursive method.
'''
newnode = self._node2xmlfields(node.record)
for childnode in node.children:
newnode.append(self._node2xml(childnode))
return newnode
def _node2xmlfields(self,noderecord):
''' fields in a node are written to xml fields; output is sorted according to grammar
'''
#first generate the xml-'record'
#~ print 'record',noderecord['BOTSID']
attributedict = {}
recordtag = noderecord['BOTSID']
attributemarker = recordtag + self.ta_info['attributemarker'] #attributemarker is a marker in the fieldname used to find out if field is an attribute of either xml-'record' or xml-element
#~ print ' rec_att_mark',attributemarker
for key,value in noderecord.items(): #find attributes belonging to xml-'record' and store in attributedict
if key.startswith(attributemarker):
#~ print ' record attribute',key,value
attributedict[key[len(attributemarker):]] = value
xmlrecord = ET.Element(recordtag,attributedict) #make the xml ET node
if 'BOTSCONTENT' in noderecord: #BOTSCONTENT is used to store the value/text of the xml-record itself.
xmlrecord.text = noderecord['BOTSCONTENT']
del noderecord['BOTSCONTENT']
for key in attributedict.keys(): #remove used fields
del noderecord[attributemarker+key]
del noderecord['BOTSID'] #remove 'record' tag
#generate xml-'fields' in xml-'record'; sort these by looping over records definition
for field_def in self.defmessage.recorddefs[recordtag]: #loop over fields in 'record'
if field_def[ID] not in noderecord: #if field not in outmessage: skip
continue
#~ print ' field',field_def
attributedict = {}
attributemarker = field_def[ID] + self.ta_info['attributemarker']
#~ print ' field_att_mark',attributemarker
for key,value in noderecord.items():
if key.startswith(attributemarker):
#~ print ' field attribute',key,value
attributedict[key[len(attributemarker):]] = value
ET.SubElement(xmlrecord, field_def[ID],attributedict).text=noderecord[field_def[ID]] #add xml element to xml record
for key in attributedict.keys(): #remove used fields
del noderecord[attributemarker+key]
del noderecord[field_def[ID]] #remove xml entity tag
return xmlrecord
def _initwrite(self):
botsglobal.logger.debug(u'Start writing to file "%s".',self.ta_info['filename'])
self._outstream = botslib.opendata(self.ta_info['filename'],"wb")
class xmlnocheck(xml):
def normalisetree(self,node):
pass
def _node2xmlfields(self,noderecord):
''' fields in a node are written to xml fields; output is sorted according to grammar
'''
if 'BOTSID' not in noderecord:
raise botslib.OutMessageError(_(u'No field "BOTSID" in xml-output in: "$record"'),record=noderecord)
#first generate the xml-'record'
attributedict = {}
recordtag = noderecord['BOTSID']
attributemarker = recordtag + self.ta_info['attributemarker']
for key,value in noderecord.items(): #find the attributes for the xml-record, put these in attributedict
if key.startswith(attributemarker):
attributedict[key[len(attributemarker):]] = value
xmlrecord = ET.Element(recordtag,attributedict) #make the xml ET node
if 'BOTSCONTENT' in noderecord:
xmlrecord.text = noderecord['BOTSCONTENT']
del noderecord['BOTSCONTENT']
for key in attributedict.keys(): #remove used fields
del noderecord[attributemarker+key]
del noderecord['BOTSID'] #remove 'record' tag
#generate xml-'fields' in xml-'record'; not sorted
noderecordcopy = noderecord.copy()
for key,value in noderecordcopy.items():
if key not in noderecord or self.ta_info['attributemarker'] in key: #if field not in outmessage: skip
continue
attributedict = {}
attributemarker = key + self.ta_info['attributemarker']
for key2,value2 in noderecord.items():
if key2.startswith(attributemarker):
attributedict[key2[len(attributemarker):]] = value2
ET.SubElement(xmlrecord, key,attributedict).text=value #add xml element to xml record
for key2 in attributedict.keys(): #remove used fields
del noderecord[attributemarker+key2]
del noderecord[key] #remove xml entity tag
return xmlrecord
class json(Outmessage):
def _initwrite(self):
super(json,self)._initwrite()
if self.multiplewrite:
self._outstream.write(u'[')
def _write(self,node):
''' convert node tree to appropriate python object.
python objects are written to json by simplejson.
'''
if self.nrmessagewritten:
self._outstream.write(u',')
jsonobject = {node.record['BOTSID']:self._node2json(node)}
if self.ta_info['indented']:
indent=2
else:
indent=None
simplejson.dump(jsonobject, self._outstream, skipkeys=False, ensure_ascii=False, check_circular=False, indent=indent)
def _closewrite(self):
if self.multiplewrite:
self._outstream.write(u']')
super(json,self)._closewrite()
def _node2json(self,node):
''' recursive method.
'''
#newjsonobject is the json object assembled in the function.
newjsonobject = node.record.copy() #init newjsonobject with record fields from node
for childnode in node.children: #fill newjsonobject with the records from childnodes.
key=childnode.record['BOTSID']
if key in newjsonobject:
newjsonobject[key].append(self._node2json(childnode))
else:
newjsonobject[key]=[self._node2json(childnode)]
del newjsonobject['BOTSID']
return newjsonobject
def _node2jsonold(self,node):
''' recursive method.
'''
newdict = node.record.copy()
if node.children: #if this node has records in it.
sortedchildren={} #empty dict
for childnode in node.children:
botsid=childnode.record['BOTSID']
if botsid in sortedchildren:
sortedchildren[botsid].append(self._node2json(childnode))
else:
sortedchildren[botsid]=[self._node2json(childnode)]
for key,value in sortedchildren.items():
if len(value)==1:
newdict[key]=value[0]
else:
newdict[key]=value
del newdict['BOTSID']
return newdict
class jsonnocheck(json):
def normalisetree(self,node):
pass
class template(Outmessage):
''' uses Kid library for templating.'''
class TemplateData(object):
pass
def __init__(self,ta_info):
self.data = template.TemplateData() #self.data is used by mapping script as container for content
super(template,self).__init__(ta_info)
def writeall(self):
''' Very different writeall:
there is no tree of nodes; there is no grammar.structure/recorddefs; kid opens file by itself.
'''
try:
import kid
except:
txt=botslib.txtexc()
raise ImportError(_(u'Dependency failure: editype "template" requires python library "kid". Error:\n%s'%txt))
#for template-grammar: only syntax is used. Section 'syntax' has to have 'template'
self.outmessagegrammarread(self.ta_info['editype'],self.ta_info['messagetype'])
templatefile = botslib.abspath(u'templates',self.ta_info['template'])
try:
botsglobal.logger.debug(u'Start writing to file "%s".',self.ta_info['filename'])
ediprint = kid.Template(file=templatefile, data=self.data)
except:
txt=botslib.txtexc()
raise botslib.OutMessageError(_(u'While templating "$editype.$messagetype", error:\n$txt'),editype=self.ta_info['editype'],messagetype=self.ta_info['messagetype'],txt=txt)
try:
f = botslib.opendata(self.ta_info['filename'],'wb')
ediprint.write(f,
#~ ediprint.write(botslib.abspathdata(self.ta_info['filename']),
encoding=self.ta_info['charset'],
output=self.ta_info['output'], #output is specific parameter for class; init from grammar.syntax
fragment=self.ta_info['merge'])
except:
txt=botslib.txtexc()
raise botslib.OutMessageError(_(u'While templating "$editype.$messagetype", error:\n$txt'),editype=self.ta_info['editype'],messagetype=self.ta_info['messagetype'],txt=txt)
botsglobal.logger.debug(_(u'End writing to file "%s".'),self.ta_info['filename'])
class templatehtml(Outmessage):
''' uses Genshi library for templating. Genshi is very similar to Kid, and is the fork/follow-up of Kid.
Kid is not being deveolped further; in time Kid will not be in repositories etc.
Templates for Genshi are like Kid templates. Changes:
- other namespace: xmlns:py="http://genshi.edgewall.org/" instead of xmlns:py="http://purl.org/kid/ns#"
- enveloping is different: <xi:include href="${message}" /> instead of <div py:replace="document(message)"/>
'''
class TemplateData(object):
pass
def __init__(self,ta_info):
self.data = template.TemplateData() #self.data is used by mapping script as container for content
super(templatehtml,self).__init__(ta_info)
def writeall(self):
''' Very different writeall:
there is no tree of nodes; there is no grammar.structure/recorddefs; kid opens file by itself.
'''
try:
from genshi.template import TemplateLoader
except:
txt=botslib.txtexc()
raise ImportError(_(u'Dependency failure: editype "template" requires python library "genshi". Error:\n%s'%txt))
#for template-grammar: only syntax is used. Section 'syntax' has to have 'template'
self.outmessagegrammarread(self.ta_info['editype'],self.ta_info['messagetype'])
templatefile = botslib.abspath(u'templateshtml',self.ta_info['template'])
try:
botsglobal.logger.debug(u'Start writing to file "%s".',self.ta_info['filename'])
loader = TemplateLoader(auto_reload=False)
tmpl = loader.load(templatefile)
except:
txt=botslib.txtexc()
raise botslib.OutMessageError(_(u'While templating "$editype.$messagetype", error:\n$txt'),editype=self.ta_info['editype'],messagetype=self.ta_info['messagetype'],txt=txt)
try:
f = botslib.opendata(self.ta_info['filename'],'wb')
stream = tmpl.generate(data=self.data)
stream.render(method='xhtml',encoding=self.ta_info['charset'],out=f)
except:
txt=botslib.txtexc()
raise botslib.OutMessageError(_(u'While templating "$editype.$messagetype", error:\n$txt'),editype=self.ta_info['editype'],messagetype=self.ta_info['messagetype'],txt=txt)
botsglobal.logger.debug(_(u'End writing to file "%s".'),self.ta_info['filename'])
class database(jsonnocheck):
pass
class db(Outmessage):
''' out.root is pickled, and saved.
'''
def __init__(self,ta_info):
super(db,self).__init__(ta_info)
self.root = None #make root None; root is not a Node-object anyway; None can easy be tested when writing.
def writeall(self):
if self.root is None:
raise botslib.OutMessageError(_(u'No outgoing message')) #then there is nothing to write...
botsglobal.logger.debug(u'Start writing to file "%s".',self.ta_info['filename'])
self._outstream = botslib.opendata(self.ta_info['filename'],'wb')
db_object = pickle.dump(self.root,self._outstream,2)
self._outstream.close()
botsglobal.logger.debug(u'End writing to file "%s".',self.ta_info['filename'])
self.ta_info['envelope'] = 'db' #use right enveloping for db: no coping etc, use same file.
class raw(Outmessage):
''' out.root is just saved.
'''
def __init__(self,ta_info):
super(raw,self).__init__(ta_info)
self.root = None #make root None; root is not a Node-object anyway; None can easy be tested when writing.
def writeall(self):
if self.root is None:
raise botslib.OutMessageError(_(u'No outgoing message')) #then there is nothing to write...
botsglobal.logger.debug(u'Start writing to file "%s".',self.ta_info['filename'])
self._outstream = botslib.opendata(self.ta_info['filename'],'wb')
self._outstream.write(self.root)
self._outstream.close()
botsglobal.logger.debug(u'End writing to file "%s".',self.ta_info['filename'])
self.ta_info['envelope'] = 'raw' #use right enveloping for raw: no coping etc, use same file.
| [
[
1,
0,
0.0012,
0.0012,
0,
0.66,
0,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.0025,
0.0012,
0,
0.66,
0.0312,
509,
0,
1,
0,
0,
509,
0,
0
],
[
7,
0,
0.0056,
0.005,
0,
0... | [
"import time",
"import sys",
"try:\n import cPickle as pickle\nexcept:\n import pickle",
" import cPickle as pickle",
" import pickle",
"import decimal",
"NODECIMAL = decimal.Decimal(1)",
"try:\n import cElementTree as ET\n #~ print 'imported cElementTree'\nexcept ImportError:\n t... |
from django.conf.urls.defaults import *
from django.contrib import admin,auth
from django.views.generic.simple import redirect_to
from django.contrib.auth.decorators import login_required,user_passes_test
from bots import views
admin.autodiscover()
staff_required = user_passes_test(lambda u: u.is_staff)
superuser_required = user_passes_test(lambda u: u.is_superuser)
urlpatterns = patterns('',
(r'^login.*', 'django.contrib.auth.views.login', {'template_name': 'admin/login.html'}),
(r'^logout.*', 'django.contrib.auth.views.logout',{'next_page': '/'}),
#login required
(r'^home.*', login_required(views.home)),
(r'^incoming.*', login_required(views.incoming)),
(r'^detail.*', login_required(views.detail)),
(r'^process.*', login_required(views.process)),
(r'^outgoing.*', login_required(views.outgoing)),
(r'^document.*', login_required(views.document)),
(r'^reports.*', login_required(views.reports)),
(r'^confirm.*', login_required(views.confirm)),
(r'^filer.*', login_required(views.filer)),
#only staff
(r'^admin/$', login_required(views.home)), #do not show django admin root page
(r'^admin/bots/$', login_required(views.home)), #do not show django admin root page
(r'^admin/bots/uniek/.+$', redirect_to, {'url': '/admin/bots/uniek/'}), #hack. uniek counters can be changed (on main page), but never added. This rule disables the edit/add uniek pages.
(r'^admin/', include(admin.site.urls)),
(r'^runengine.+', staff_required(views.runengine)),
#only superuser
(r'^delete.*', superuser_required(views.delete)),
(r'^plugin.*', superuser_required(views.plugin)),
(r'^plugout.*', superuser_required(views.plugout)),
(r'^unlock.*', superuser_required(views.unlock)),
(r'^sendtestmail.*', superuser_required(views.sendtestmailmanagers)),
#catch-all
(r'^.*', 'bots.views.index'),
)
handler500='bots.views.server_error'
| [
[
1,
0,
0.025,
0.025,
0,
0.66,
0,
341,
0,
1,
0,
0,
341,
0,
0
],
[
1,
0,
0.05,
0.025,
0,
0.66,
0.1111,
302,
0,
2,
0,
0,
302,
0,
0
],
[
1,
0,
0.075,
0.025,
0,
0.66,
... | [
"from django.conf.urls.defaults import *",
"from django.contrib import admin,auth",
"from django.views.generic.simple import redirect_to",
"from django.contrib.auth.decorators import login_required,user_passes_test",
"from bots import views",
"admin.autodiscover()",
"staff_required = user_passes_test(la... |
from django import template
register = template.Library()
@register.filter
def url2path(value):
if value.startswith('/admin/bots/'):
value = value[12:]
else:
value = value[1:]
if value:
if value[-1] == '/':
value = value[:-1]
else:
value = 'home'
return value
| [
[
1,
0,
0.0588,
0.0588,
0,
0.66,
0,
294,
0,
1,
0,
0,
294,
0,
0
],
[
14,
0,
0.1765,
0.0588,
0,
0.66,
0.5,
276,
3,
0,
0,
0,
77,
10,
1
],
[
2,
0,
0.6471,
0.6471,
0,
0.... | [
"from django import template",
"register = template.Library()",
"def url2path(value):\n if value.startswith('/admin/bots/'):\n value = value[12:]\n else:\n value = value[1:]\n if value:\n if value[-1] == '/':\n value = value[:-1]",
" if value.startswith('/admin/bots... |
import sys
from django.utils.translation import ugettext as _
#bots-modules
import communication
import envelope
import transform
import botslib
import botsglobal
import preprocess
from botsconfig import *
@botslib.log_session
def prepareretransmit():
''' prepare the retransmittable files. Return: indication if files should be retransmitted.'''
retransmit = False #indicate retransmit
#for rereceive
for row in botslib.query('''SELECT idta,reportidta
FROM filereport
WHERE retransmit=%(retransmit)s ''',
{'retransmit':True}):
retransmit = True
botslib.change('''UPDATE filereport
SET retransmit=%(retransmit)s
WHERE idta=%(idta)s
AND reportidta=%(reportidta)s ''',
{'idta':row['idta'],'reportidta':row['reportidta'],'retransmit':False})
for row2 in botslib.query('''SELECT idta
FROM ta
WHERE parent=%(parent)s
AND status=%(status)s''',
{'parent':row['idta'],
'status':RAWIN}):
ta_rereceive = botslib.OldTransaction(row2['idta'])
ta_externin = ta_rereceive.copyta(status=EXTERNIN,statust=DONE,parent=0) #inject; status is DONE so this ta is not used further
ta_raw = ta_externin.copyta(status=RAWIN,statust=OK) #reinjected file is ready as new input
#for resend; this one is slow. Can be improved by having a separate list of idta to resend
for row in botslib.query('''SELECT idta,parent
FROM ta
WHERE retransmit=%(retransmit)s
AND status=%(status)s''',
{'retransmit':True,
'status':EXTERNOUT}):
retransmit = True
ta_outgoing = botslib.OldTransaction(row['idta'])
ta_outgoing.update(retransmit=False) #is reinjected; set retransmit back to False
ta_resend = botslib.OldTransaction(row['parent']) #parent ta with status RAWOUT; this is where the outgoing file is kept
ta_externin = ta_resend.copyta(status=EXTERNIN,statust=DONE,parent=0) #inject; status is DONE so this ta is not used further
ta_raw = ta_externin.copyta(status=RAWOUT,statust=OK) #reinjected file is ready as new input
return retransmit
@botslib.log_session
def preparerecommunication():
#for each out-communication process that went wrong:
retransmit = False #indicate retransmit
for row in botslib.query('''SELECT idta,tochannel
FROM ta
WHERE statust!=%(statust)s
AND status=%(status)s
AND retransmit=%(retransmit)s ''',
{'status':PROCESS,'retransmit':True,'statust':DONE}):
run_outgoing = botslib.OldTransaction(row['idta'])
run_outgoing.update(retransmit=False) #set retransmit back to False
#get rootidta of run where communication failed
for row2 in botslib.query('''SELECT max(idta) as rootidta
FROM ta
WHERE script=%(script)s
AND idta<%(thisidta)s ''',
{'script':0,'thisidta':row['idta']}):
rootidta = row2['rootidta']
#get endidta of run where communication failed
for row3 in botslib.query('''SELECT min(idta) as endidta
FROM ta
WHERE script=%(script)s
AND idta>%(thisidta)s ''',
{'script':0,'thisidta':row['idta']}):
endidta = row3['endidta']
if not endidta:
endidta = sys.maxint - 1
#reinject
for row4 in botslib.query('''SELECT idta
FROM ta
WHERE idta<%(endidta)s
AND idta>%(rootidta)s
AND status=%(status)s
AND statust=%(statust)s
AND tochannel=%(tochannel)s ''',
{'statust':OK,'status':RAWOUT,'rootidta':rootidta,'endidta':endidta,'tochannel':row['tochannel']}):
retransmit = True
ta_outgoing = botslib.OldTransaction(row4['idta'])
ta_outgoing_copy = ta_outgoing.copyta(status=RAWOUT,statust=OK)
ta_outgoing.update(statust=DONE)
return retransmit
@botslib.log_session
def prepareautomaticrecommunication():
''' reinjects all files for which communication failed (status = RAWOUT)
'''
retransmit = False #indicate retransmit
#bots keeps track of last time automaticretrycommunication was done; reason is mainly performance
startidta = max(botslib.keeptrackoflastretry('bots__automaticretrycommunication',botslib.getlastrun()),botslib.get_idta_last_error())
#reinject
for row4 in botslib.query('''SELECT idta
FROM ta
WHERE idta>%(startidta)s
AND status=%(status)s
AND statust=%(statust)s ''',
{'statust':OK,'status':RAWOUT,'startidta':startidta}):
retransmit = True
ta_outgoing = botslib.OldTransaction(row4['idta'])
ta_outgoing_copy = ta_outgoing.copyta(status=RAWOUT,statust=OK)
ta_outgoing.update(statust=DONE)
return retransmit
@botslib.log_session
def prepareretry():
''' reinjects all files for which communication failed (status = RAWOUT)
'''
retransmit = False #indicate retransmit
#bots keeps track of last time retry was done; reason is mainly performance
startidta = max(botslib.keeptrackoflastretry('bots__retry',botslib.getlastrun()),botslib.get_idta_last_error())
#reinject
for row4 in botslib.query('''SELECT idta,status
FROM ta
WHERE idta>%(startidta)s
AND statust=%(statust)s ''',
{'statust':OK,'startidta':startidta}):
retransmit = True
ta_outgoing = botslib.OldTransaction(row4['idta'])
ta_outgoing_copy = ta_outgoing.copyta(status=row4['status'],statust=OK)
ta_outgoing.update(statust=DONE)
return retransmit
@botslib.log_session
def routedispatcher(routestorun,type=None):
''' run all route(s). '''
if type == '--retransmit':
if not prepareretransmit():
return 0
elif type == '--retrycommunication':
if not preparerecommunication():
return 0
elif type == '--automaticretrycommunication':
if not prepareautomaticrecommunication():
return 0
elif type == '--retry':
if not prepareretry():
return 0
stuff2evaluate = botslib.getlastrun()
botslib.set_minta4query()
for route in routestorun:
foundroute=False
botslib.setpreprocessnumber(SET_FOR_PROCESSING)
for routedict in botslib.query('''SELECT idroute ,
fromchannel_id as fromchannel,
tochannel_id as tochannel,
fromeditype,
frommessagetype,
alt,
frompartner_id as frompartner,
topartner_id as topartner,
toeditype,
tomessagetype,
seq,
frompartner_tochannel_id,
topartner_tochannel_id,
testindicator,
translateind,
defer
FROM routes
WHERE idroute=%(idroute)s
AND active=%(active)s
ORDER BY seq''',
{'idroute':route,'active':True}):
botsglobal.logger.info(_(u'running route %(idroute)s %(seq)s'),{'idroute':routedict['idroute'],'seq':routedict['seq']})
botslib.setrouteid(routedict['idroute'])
foundroute=True
router(routedict)
botslib.setrouteid('')
botsglobal.logger.debug(u'finished route %s %s',routedict['idroute'],routedict['seq'])
if not foundroute:
botsglobal.logger.warning(_(u'there is no (active) route "%s".'),route)
return stuff2evaluate
@botslib.log_session
def router(routedict):
''' communication.run one route. variants:
- a route can be just script;
- a route can do only incoming
- a route can do only outgoing
- a route can do both incoming and outgoing
- at several points functions from a route script are called - if function is in route script
'''
#is there a user route script?
try:
userscript,scriptname = botslib.botsimport('routescripts',routedict['idroute'])
except ImportError: #other errors, eg syntax errors are just passed
userscript = scriptname = None
#if user route script has function 'main': communication.run 'main' (and do nothing else)
if botslib.tryrunscript(userscript,scriptname,'main',routedict=routedict):
return #so: if function ' main' : communication.run only the routescript, nothing else.
if not (userscript or routedict['fromchannel'] or routedict['tochannel'] or routedict['translateind']):
raise botslib.ScriptError(_(u'Route "$route" is empty: no script, not enough parameters.'),route=routedict['idroute'])
botslib.tryrunscript(userscript,scriptname,'start',routedict=routedict)
#communication.run incoming channel
if routedict['fromchannel']: #do incoming part of route: in-communication; set ready for translation; translate
botslib.tryrunscript(userscript,scriptname,'preincommunication',routedict=routedict)
communication.run(idchannel=routedict['fromchannel'],idroute=routedict['idroute']) #communication.run incommunication
#add attributes from route to the received files
where={'status':FILEIN,'fromchannel':routedict['fromchannel'],'idroute':routedict['idroute']}
change={'editype':routedict['fromeditype'],'messagetype':routedict['frommessagetype'],'frompartner':routedict['frompartner'],'topartner':routedict['topartner'],'alt':routedict['alt']}
botslib.updateinfo(change=change,where=where)
#all received files have status FILEIN
botslib.tryrunscript(userscript,scriptname,'postincommunication',routedict=routedict)
if routedict['fromeditype'] == 'mailbag': #mailbag for the route.
preprocess.preprocess(routedict,preprocess.mailbag)
#communication.run translation
if routedict['translateind']:
botslib.tryrunscript(userscript,scriptname,'pretranslation',routedict=routedict)
botslib.addinfo(change={'status':TRANSLATE},where={'status':FILEIN,'idroute':routedict['idroute']})
transform.translate(idroute=routedict['idroute'])
botslib.tryrunscript(userscript,scriptname,'posttranslation',routedict=routedict)
#merge messages & communication.run outgoing channel
if routedict['tochannel']: #do outgoing part of route
botslib.tryrunscript(userscript,scriptname,'premerge',routedict=routedict)
envelope.mergemessages(idroute=routedict['idroute'])
botslib.tryrunscript(userscript,scriptname,'postmerge',routedict=routedict)
#communication.run outgoing channel
#build for query: towhere (dict) and wherestring
towhere=dict(status=MERGED,
idroute=routedict['idroute'],
editype=routedict['toeditype'],
messagetype=routedict['tomessagetype'],
testindicator=routedict['testindicator'])
towhere=dict([(key, value) for (key, value) in towhere.iteritems() if value]) #remove nul-values from dict
wherestring = ' AND '.join([key+'=%('+key+')s' for key in towhere])
if routedict['frompartner_tochannel_id']: #use frompartner_tochannel in where-clause of query (partner/group dependent outchannel
towhere['frompartner_tochannel_id']=routedict['frompartner_tochannel_id']
wherestring += ''' AND (frompartner=%(frompartner_tochannel_id)s
OR frompartner in (SELECT from_partner_id
FROM partnergroup
WHERE to_partner_id =%(frompartner_tochannel_id)s ))'''
if routedict['topartner_tochannel_id']: #use topartner_tochannel in where-clause of query (partner/group dependent outchannel
towhere['topartner_tochannel_id']=routedict['topartner_tochannel_id']
wherestring += ''' AND (topartner=%(topartner_tochannel_id)s
OR topartner in (SELECT from_partner_id
FROM partnergroup
WHERE to_partner_id=%(topartner_tochannel_id)s ))'''
toset={'tochannel':routedict['tochannel'],'status':FILEOUT}
botslib.addinfocore(change=toset,where=towhere,wherestring=wherestring)
if not routedict['defer']: #do outgoing part of route
botslib.tryrunscript(userscript,scriptname,'preoutcommunication',routedict=routedict)
communication.run(idchannel=routedict['tochannel'],idroute=routedict['idroute']) #communication.run outcommunication
botslib.tryrunscript(userscript,scriptname,'postoutcommunication',routedict=routedict)
botslib.tryrunscript(userscript,scriptname,'end',routedict=routedict)
| [
[
1,
0,
0.0037,
0.0037,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0075,
0.0037,
0,
0.66,
0.0714,
389,
0,
1,
0,
0,
389,
0,
0
],
[
1,
0,
0.0149,
0.0037,
0,
... | [
"import sys",
"from django.utils.translation import ugettext as _",
"import communication",
"import envelope",
"import transform",
"import botslib",
"import botsglobal",
"import preprocess",
"from botsconfig import *",
"def prepareretransmit():\n ''' prepare the retransmittable files. Return: i... |
import sys
import os
import botsinit
import botslib
import grammar
def showusage():
print
print " Usage: %s -c<directory> <editype> <messagetype>"%os.path.basename(sys.argv[0])
print
print " Checks a Bots grammar."
print " Same checks are used as in translations with bots-engine."
print " Searches for grammar in regular place: bots/usersys/grammars/<editype>/<messagetype>.py"
print " Options:"
print " -c<directory> directory for configuration files (default: config)."
print " Example:"
print " %s -cconfig edifact ORDERSD96AUNEAN008"%os.path.basename(sys.argv[0])
print
sys.exit(0)
def startmulti(grammardir,editype):
''' used in seperate tool for bulk checking of gramamrs while developing edifact->botsgramamrs '''
import glob
botslib.generalinit('config')
botslib.initenginelogging()
for g in glob.glob(grammardir):
g1 = os.path.basename(g)
g2 = os.path.splitext(g1)[0]
if g1 in ['__init__.py']:
continue
if g1.startswith('edifact'):
continue
if g1.startswith('records') or g1.endswith('records.py'):
continue
try:
grammar.grammarread(editype,g2)
except:
#~ print 'Found error in grammar:',g
print botslib.txtexc()
print '\n'
else:
print 'OK - no error found in grammar',g,'\n'
def start():
#********command line arguments**************************
editype =''
messagetype = ''
configdir = 'config'
for arg in sys.argv[1:]:
if not arg:
continue
if arg.startswith('-c'):
configdir = arg[2:]
if not configdir:
print ' !!Indicated Bots should use specific .ini file but no file name was given.'
showusage()
elif arg in ["?", "/?"] or arg.startswith('-'):
showusage()
else:
if not editype:
editype = arg
else:
messagetype = arg
if not (editype and messagetype):
print ' !!Both editype and messagetype are required.'
showusage()
#********end handling command line arguments**************************
try:
botsinit.generalinit(configdir)
botsinit.initenginelogging()
grammar.grammarread(editype,messagetype)
except:
print 'Found error in grammar:'
print botslib.txtexc()
else:
print 'OK - no error found in grammar'
if __name__=='__main__':
start()
| [
[
1,
0,
0.0132,
0.0132,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0263,
0.0132,
0,
0.66,
0.125,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0395,
0.0132,
0,
0... | [
"import sys",
"import os",
"import botsinit",
"import botslib",
"import grammar",
"def showusage():\n print(\" Same checks are used as in translations with bots-engine.\")\n print(\" Searches for grammar in regular place: bots/usersys/grammars/<editype>/<messagetype>.py\")\n print(\" Opt... |
import os
import sys
import atexit
import traceback
import logging
#import bots-modules
import bots.botslib as botslib
import bots.botsglobal as botsglobal
def showusage():
print ' Update existing bots database for new release 1.6.0'
print ' Options:'
print " -c<directory> directory for configuration files (default: config)."
def start(configdir = 'config'):
#********command line arguments**************************
for arg in sys.argv[1:]:
if not arg:
continue
if arg.startswith('-c'):
configdir = arg[2:]
if not configdir:
print 'Indicated Bots should use specific .ini file but no file name was given.'
sys.exit(1)
elif arg in ["?", "/?"] or arg.startswith('-'):
showusage()
sys.exit(0)
else: #pick up names of routes to run
showusage()
#**************initialise configuration file******************************
try:
botsinit.generalinit(configdir)
botslib.settimeout(botsglobal.ini.getint('settings','globaltimeout',10)) #
except:
traceback.print_exc()
print 'Error in reading/initializing ini-file.'
sys.exit(1)
#**************initialise logging******************************
try:
botsinit.initenginelogging()
except:
traceback.print_exc()
print 'Error in initialising logging system.'
sys.exit(1)
else:
atexit.register(logging.shutdown)
botsglobal.logger.info('Python version: "%s".',sys.version)
botsglobal.logger.info('Bots configuration file: "%s".',botsinifile)
botsglobal.logger.info('Bots database configuration file: "%s".',botslib.join('config',os.path.basename(botsglobal.ini.get('directories','tgconfig','botstg.cfg'))))
#**************connect to database**********************************
try:
botslib.connect()
except:
traceback.print_exc()
print 'Error connecting to database.'
sys.exit(1)
else:
atexit.register(botsglobal.db.close)
try:
cursor = botsglobal.db.cursor()
cursor.execute('''ALTER TABLE routes ADD COLUMN notindefaultrun BOOLEAN''',None)
cursor.execute('''ALTER TABLE channel ADD COLUMN archivepath VARCHAR(256)''',None)
cursor.execute('''ALTER TABLE partner ADD COLUMN mail VARCHAR(256)''',None)
cursor.execute('''ALTER TABLE partner ADD COLUMN cc VARCHAR(256)''',None)
cursor.execute('''ALTER TABLE chanpar ADD COLUMN cc VARCHAR(256)''',None)
cursor.execute('''ALTER TABLE ta ADD COLUMN confirmasked BOOLEAN''',None)
cursor.execute('''ALTER TABLE ta ADD COLUMN confirmed BOOLEAN''',None)
cursor.execute('''ALTER TABLE ta ADD COLUMN confirmtype VARCHAR(35) DEFAULT '' ''',None)
cursor.execute('''ALTER TABLE ta ADD COLUMN confirmidta INTEGER DEFAULT 0''',None)
cursor.execute('''ALTER TABLE ta ADD COLUMN envelope VARCHAR(35) DEFAULT '' ''',None)
cursor.execute('''ALTER TABLE ta ADD COLUMN botskey VARCHAR(35) DEFAULT '' ''',None)
cursor.execute('''ALTER TABLE ta ADD COLUMN cc VARCHAR(512) DEFAULT '' ''',None)
if botsglobal.dbinfo.drivername == 'mysql':
cursor.execute('''ALTER TABLE ta MODIFY errortext VARCHAR(2048)''',None)
elif botsglobal.dbinfo.drivername == 'postgres':
cursor.execute('''ALTER TABLE ta ALTER COLUMN errortext type VARCHAR(2048)''',None)
#else: #sqlite does not allow modifying existing field, but does not check lengths either so this works.
cursor.execute('''CREATE TABLE confirmrule (
id INTEGER PRIMARY KEY,
active BOOLEAN,
confirmtype VARCHAR(35),
ruletype VARCHAR(35),
negativerule BOOLEAN,
frompartner VARCHAR(35),
topartner VARCHAR(35),
idchannel VARCHAR(35),
idroute VARCHAR(35),
editype VARCHAR(35),
messagetype VARCHAR(35) )
''',None)
except:
traceback.print_exc()
print 'Error while updating the database. Database is not updated.'
botsglobal.db.rollback()
sys.exit(1)
botsglobal.db.commit()
cursor.close()
print 'Database is updated.'
sys.exit(0)
| [
[
1,
0,
0.0096,
0.0096,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0192,
0.0096,
0,
0.66,
0.125,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0288,
0.0096,
0,
0... | [
"import os",
"import sys",
"import atexit",
"import traceback",
"import logging",
"import bots.botslib as botslib",
"import bots.botsglobal as botsglobal",
"def showusage():\n print(' Update existing bots database for new release 1.6.0')\n print(' Options:')\n print(\" -c<directo... |
import decimal
import copy
from django.utils.translation import ugettext as _
import botslib
import botsglobal
from botsconfig import *
comparekey=None
def nodecompare(node):
global comparekey
return node.get(*comparekey)
class Node(object):
''' Node class for building trees in inmessage and outmessage
'''
def __init__(self,record=None,BOTSIDnr=None):
self.record = record #record is a dict with fields
if self.record is not None:
if BOTSIDnr is None:
if not 'BOTSIDnr' in self.record:
self.record['BOTSIDnr'] = '1'
else:
self.record['BOTSIDnr'] = BOTSIDnr
self.children = []
self._queries = None
def getquerie(self):
''' get queries of a node '''
if self._queries:
return self._queries
else:
return {}
def updatequerie(self,updatequeries):
''' set/update queries of a node with dict queries.
'''
if updatequeries:
if self._queries is None:
self._queries = updatequeries.copy()
else:
self._queries.update(updatequeries)
queries = property(getquerie,updatequerie)
def processqueries(self,queries,maxlevel):
''' copies values for queries 'down the tree' untill right level.
So when edi file is split up in messages,
querie-info from higher levels is copied to message.'''
self.queries = queries
if self.record and not maxlevel:
return
for child in self.children:
child.processqueries(self.queries,maxlevel-1)
def append(self,node):
'''append child to node'''
self.children += [node]
def display(self,level=0):
'''for debugging
usage: in mapping script: inn.root.display()
'''
if level==0:
print 'displaying all nodes in node tree:'
print ' '*level,self.record
for child in self.children:
child.display(level+1)
def displayqueries(self,level=0):
'''for debugging
usage: in mapping script: inn.root.displayqueries()
'''
if level==0:
print 'displaying queries for nodes in tree'
print ' '*level,'node:',
if self.record:
print self.record['BOTSID'],
else:
print 'None',
print '',
print self.queries
for child in self.children:
child.displayqueries(level+1)
def enhancedget(self,mpaths,replace=False):
''' to get QUERIES or SUBTRANSLATION while parsing edifile;
mpath can be
- dict: do get(mpath); can not be a mpath with multiple
- tuple: do get(mpath); can be multiple dicts in mapth
- list: for each listmembr do a get(); append the results
Used by:
- QUERIES
- SUBTRANSLATION
'''
if isinstance(mpaths,dict):
return self.get(mpaths)
elif isinstance(mpaths,tuple):
return self.get(*mpaths)
elif isinstance(mpaths,list):
collect = u''
for mpath in mpaths:
found = self.get(mpath)
if found:
if replace:
found = found.replace('.','_')
collect += found
return collect
else:
raise botslib.MappingFormatError(_(u'must be dict, list or tuple: enhancedget($mpath)'),mpath=mpaths)
def change(self,where,change):
''' '''
#find first matching node using 'where'. Do not look at other matching nodes (is a feature)
#prohibit change of BOTSID?
mpaths = where #diff from getcore
if not mpaths or not isinstance(mpaths,tuple):
raise botslib.MappingFormatError(_(u'parameter "where" must be tuple: change(where=$where,change=$change)'),where=where,change=change)
#check: 'BOTSID' is required
#check: all values should be strings
for part in mpaths:
if not isinstance(part,dict):
raise botslib.MappingFormatError(_(u'parameter "where" must be dicts in a tuple: change(where=$where,change=$change)'),where=where,change=change)
if not 'BOTSID' in part:
raise botslib.MappingFormatError(_(u'section without "BOTSID": change(where=$where,change=$change)'),where=where,change=change)
for key,value in part.iteritems():
if not isinstance(key,basestring):
raise botslib.MappingFormatError(_(u'keys must be strings: change(where=$where,change=$change)'),where=where,change=change)
if not isinstance(value,basestring):
raise botslib.MappingFormatError(_(u'values must be strings: change(where=$where,change=$change)'),where=where,change=change)
#check change parameter
if not change or not isinstance(change,dict):
raise botslib.MappingFormatError(_(u'parameter "change" must be dict: change(where=$where,change=$change)'),where=where,change=change)
#remove 'BOTSID' from change.
#check: all values should be strings
change.pop('BOTSID','nep')
for key,value in change.iteritems():
if not isinstance(key,basestring):
raise botslib.MappingFormatError(_(u'keys in "change" must be strings: change(where=$where,change=$change)'),where=where,change=change)
if not isinstance(value,basestring) and value is not None:
raise botslib.MappingFormatError(_(u'values in "change" must be strings or "None": change(where=$where,change=$change)'),where=where,change=change)
#go get it!
terug = self._changecore(where,change)
botsglobal.logmap.debug(u'"%s" for change(where=%s,change=%s)',terug,str(where),str(change))
return terug
def _changecore(self,where,change): #diff from getcore
mpaths = where #diff from getcore
mpath = mpaths[0]
if self.record['BOTSID'] == mpath['BOTSID']: #is record-id equal to mpath-botsid? Not strictly needed, but gives much beter performance...
for part in mpath: #check all mpath-parts;
if part in self.record:
if mpath[part] == self.record[part]:
continue
else: #content of record-field and mpath-part do not match
return False
else: #not all parts of mpath are in record, so no match:
return False
else: #all parts are matched, and OK.
if len(mpaths) == 1: #mpath is exhausted; so we are there!!! #replace values with values in 'change'; delete if None
for key,value in change.iteritems():
if value is None:
self.record.pop(key,'dummy for pop')
else:
self.record[key]=value
return True
else:
for childnode in self.children:
terug = childnode._changecore(mpaths[1:],change) #search recursive for rest of mpaths #diff from getcore
if terug:
return terug
else: #no child has given a valid return
return False
else: #record-id is not equal to mpath-botsid, so no match
return False
def delete(self,*mpaths):
''' delete the last record of mpath if found (first: find/identify, than delete. '''
if not mpaths or not isinstance(mpaths,tuple):
raise botslib.MappingFormatError(_(u'must be dicts in tuple: delete($mpath)'),mpath=mpaths)
if len(mpaths) ==1:
raise botslib.MappingFormatError(_(u'only one dict: not allowed. Use different solution: delete($mpath)'),mpath=mpaths)
#check: None only allowed in last section of Mpath (check firsts sections)
#check: 'BOTSID' is required
#check: all values should be strings
for part in mpaths:
if not isinstance(part,dict):
raise botslib.MappingFormatError(_(u'must be dicts in tuple: delete($mpath)'),mpath=mpaths)
if not 'BOTSID' in part:
raise botslib.MappingFormatError(_(u'section without "BOTSID": delete($mpath)'),mpath=mpaths)
for key,value in part.iteritems():
if not isinstance(key,basestring):
raise botslib.MappingFormatError(_(u'keys must be strings: delete($mpath)'),mpath=mpaths)
if not isinstance(value,basestring):
raise botslib.MappingFormatError(_(u'values must be strings: delete($mpath)'),mpath=mpaths)
#go get it!
terug = bool(self._deletecore(*mpaths))
botsglobal.logmap.debug(u'"%s" for delete%s',terug,str(mpaths))
return terug #return False if not removed, return True if removed
def _deletecore(self,*mpaths):
mpath = mpaths[0]
if self.record['BOTSID'] == mpath['BOTSID']: #is record-id equal to mpath-botsid? Not strictly needed, but gives much beter performance...
for part in mpath: #check all mpath-parts;
if part in self.record:
if mpath[part] == self.record[part]:
continue
else: #content of record-field and mpath-part do not match
return 0
else: #not all parts of mpath are in record, so no match:
return 0
else: #all parts are matched, and OK.
if len(mpaths) == 1: #mpath is exhausted; so we are there!!!
return 2
else:
for i, childnode in enumerate(self.children):
terug = childnode._deletecore(*mpaths[1:]) #search recursive for rest of mpaths
if terug == 2: #indicates node should be removed
del self.children[i] #remove node
return 1 #this indicates: deleted successfull, do not remove anymore (no removal of parents)
if terug:
return terug
else: #no child has given a valid return
return 0
else: #record-id is not equal to mpath-botsid, so no match
return 0
def get(self,*mpaths):
''' get value of a field in a record from a edi-message
mpath is xpath-alike query to identify the record/field
function returns 1 value; return None if nothing found.
if more than one value can be found: first one is returned
starts searching in current node, then deeper
'''
if not mpaths or not isinstance(mpaths,tuple):
raise botslib.MappingFormatError(_(u'must be dicts in tuple: get($mpath)'),mpath=mpaths)
for part in mpaths:
if not isinstance(part,dict):
raise botslib.MappingFormatError(_(u'must be dicts in tuple: get($mpath)'),mpath=mpaths)
#check: None only allowed in last section of Mpath (check firsts sections)
#check: 'BOTSID' is required
#check: all values should be strings
for part in mpaths[:-1]:
if not 'BOTSID' in part:
raise botslib.MappingFormatError(_(u'section without "BOTSID": get($mpath)'),mpath=mpaths)
if not 'BOTSIDnr' in part:
part['BOTSIDnr'] = '1'
for key,value in part.iteritems():
if not isinstance(key,basestring):
raise botslib.MappingFormatError(_(u'keys must be strings: get($mpath)'),mpath=mpaths)
if not isinstance(value,basestring):
raise botslib.MappingFormatError(_(u'values must be strings: get($mpath)'),mpath=mpaths)
#check: None only allowed in last section of Mpath (check last section)
#check: 'BOTSID' is required
#check: all values should be strings
if not 'BOTSID' in mpaths[-1]:
raise botslib.MappingFormatError(_(u'last section without "BOTSID": get($mpath)'),mpath=mpaths)
if not 'BOTSIDnr' in mpaths[-1]:
mpaths[-1]['BOTSIDnr'] = '1'
count = 0
for key,value in mpaths[-1].iteritems():
if not isinstance(key,basestring):
raise botslib.MappingFormatError(_(u'keys must be strings in last section: get($mpath)'),mpath=mpaths)
if value is None:
count += 1
elif not isinstance(value,basestring):
raise botslib.MappingFormatError(_(u'values must be strings (or none) in last section: get($mpath)'),mpath=mpaths)
if count > 1:
raise botslib.MappingFormatError(_(u'max one "None" in last section: get($mpath)'),mpath=mpaths)
#go get it!
terug = self._getcore(*mpaths)
botsglobal.logmap.debug(u'"%s" for get%s',terug,str(mpaths))
return terug
def _getcore(self,*mpaths):
mpath = mpaths[0]
terug = 1 #if there is no 'None' in the mpath, but everything is matched, 1 is returned (like True)
if self.record['BOTSID'] == mpath['BOTSID']: #is record-id equal to mpath-botsid? Not strictly needed, but gives much beter performance...
for part in mpath: #check all mpath-parts;
if part in self.record:
if mpath[part] is None: #this is the field we are looking for; but not all matches have been made so remember value
terug = self.record[part][:] #copy to avoid memory problems
else: #compare values of mpath-part and recordfield
if mpath[part] == self.record[part]:
continue
else: #content of record-field and mpath-part do not match
return None
else: #not all parts of mpath are in record, so no match:
return None
else: #all parts are matched, and OK.
if len(mpaths) == 1: #mpath is exhausted; so we are there!!!
return terug
else:
for childnode in self.children:
terug = childnode._getcore(*mpaths[1:]) #search recursive for rest of mpaths
if terug:
return terug
else: #no child has given a valid return
return None
else: #record-id is not equal to mpath-botsid, so no match
return None
def getcount(self):
'''count the number of nodes/records uner the node/in whole tree'''
count = 0
if self.record:
count += 1 #count itself
for child in self.children:
count += child.getcount()
return count
def getcountoccurrences(self,*mpaths):
''' count number of occurences of mpath. Eg count nr of LIN's'''
count = 0
for value in self.getloop(*mpaths):
count += 1
return count
def getcountsum(self,*mpaths):
''' return the sum for all values found in mpath. Eg total number of ordered quantities.'''
count = decimal.Decimal(0)
mpathscopy = copy.deepcopy(mpaths)
for key,value in mpaths[-1].items():
if value is None:
del mpathscopy[-1][key]
for i in self.getloop(*mpathscopy):
value = i.get(mpaths[-1])
if value:
count += decimal.Decimal(value)
return unicode(count)
def getloop(self,*mpaths):
''' generator. Returns one by one the nodes as indicated in mpath
'''
#check validity mpaths
if not mpaths or not isinstance(mpaths,tuple):
raise botslib.MappingFormatError(_(u'must be dicts in tuple: getloop($mpath)'),mpath=mpaths)
for part in mpaths:
if not isinstance(part,dict):
raise botslib.MappingFormatError(_(u'must be dicts in tuple: getloop($mpath)'),mpath=mpaths)
if not 'BOTSID' in part:
raise botslib.MappingFormatError(_(u'section without "BOTSID": getloop($mpath)'),mpath=mpaths)
if not 'BOTSIDnr' in part:
part['BOTSIDnr'] = '1'
for key,value in part.iteritems():
if not isinstance(key,basestring):
raise botslib.MappingFormatError(_(u'keys must be strings: getloop($mpath)'),mpath=mpaths)
if not isinstance(value,basestring):
raise botslib.MappingFormatError(_(u'values must be strings: getloop($mpath)'),mpath=mpaths)
for terug in self._getloopcore(*mpaths):
botsglobal.logmap.debug(u'getloop %s returns "%s".',mpaths,terug.record)
yield terug
def _getloopcore(self,*mpaths):
''' recursive part of getloop()
'''
mpath = mpaths[0]
if self.record['BOTSID'] == mpath['BOTSID']: #found right record
for part in mpath:
if not part in self.record or mpath[part] != self.record[part]:
return
else: #all parts are checked, and OK.
if len(mpaths) == 1:
yield self
else:
for childnode in self.children:
for terug in childnode._getloopcore(*mpaths[1:]): #search recursive for rest of mpaths
yield terug
return
def getnozero(self,*mpaths):
terug = self.get(*mpaths)
try:
value = float(terug)
except TypeError:
return None
except ValueError:
return None
if value == 0:
return None
return terug
def put(self,*mpaths,**kwargs):
if not mpaths or not isinstance(mpaths,tuple):
raise botslib.MappingFormatError(_(u'must be dicts in tuple: put($mpath)'),mpath=mpaths)
for part in mpaths:
if not isinstance(part,dict):
raise botslib.MappingFormatError(_(u'must be dicts in tuple: put($mpath)'),mpath=mpaths)
#check: 'BOTSID' is required
#check: all values should be strings
for part in mpaths:
if not 'BOTSID' in part:
raise botslib.MappingFormatError(_(u'section without "BOTSID": put($mpath)'),mpath=mpaths)
if not 'BOTSIDnr' in part:
part['BOTSIDnr'] = '1'
for key,value in part.iteritems():
if value is None:
botsglobal.logmap.debug(u'"None" in put %s.',str(mpaths))
return False
if not isinstance(key,basestring):
raise botslib.MappingFormatError(_(u'keys must be strings: put($mpath)'),mpath=mpaths)
if kwargs and 'strip' in kwargs and kwargs['strip'] == False:
part[key] = unicode(value) #used for fixed ISA header of x12
else:
part[key] = unicode(value).strip() #leading and trailing spaces are stripped from the values
if self.sameoccurence(mpaths[0]):
self._putcore(*mpaths[1:])
else:
raise botslib.MappingRootError(_(u'error in root put "$mpath".'),mpath=mpaths[0])
botsglobal.logmap.debug(u'"True" for put %s',str(mpaths))
return True
def _putcore(self,*mpaths):
if not mpaths: #newmpath is exhausted, stop searching.
return True
for node in self.children:
if node.record['BOTSID']==mpaths[0]['BOTSID'] and node.sameoccurence(mpaths[0]):
node._putcore(*mpaths[1:])
return
else: #is not present in children, so append
self.append(Node(mpaths[0]))
self.children[-1]._putcore(*mpaths[1:])
def putloop(self,*mpaths):
if not mpaths or not isinstance(mpaths,tuple):
raise botslib.MappingFormatError(_(u'must be dicts in tuple: putloop($mpath)'),mpath=mpaths)
for part in mpaths:
if not isinstance(part,dict):
raise botslib.MappingFormatError(_(u'must be dicts in tuple: putloop($mpath)'),mpath=mpaths)
#check: 'BOTSID' is required
#check: all values should be strings
for part in mpaths:
if not 'BOTSID' in part:
raise botslib.MappingFormatError(_(u'section without "BOTSID": putloop($mpath)'),mpath=mpaths)
if not 'BOTSIDnr' in part:
part['BOTSIDnr'] = '1'
for key,value in part.iteritems():
if not isinstance(key,basestring):
raise botslib.MappingFormatError(_(u'keys must be strings: putloop($mpath)'),mpath=mpaths)
if value is None:
return False
#~ if not isinstance(value,basestring):
#~ raise botslib.MappingFormatError(_(u'values must be strings in putloop%s'%(str(mpaths)))
part[key] = unicode(value).strip()
if self.sameoccurence(mpaths[0]):
if len(mpaths)==1:
return self
return self._putloopcore(*mpaths[1:])
else:
raise botslib.MappingRootError(_(u'error in root putloop "$mpath".'),mpath=mpaths[0])
def _putloopcore(self,*mpaths):
if len(mpaths) ==1: #end of mpath reached; always make new child-node
self.append(Node(mpaths[0]))
return self.children[-1]
for node in self.children: #if first part of mpaths exists already in children go recursive
if node.record['BOTSID']==mpaths[0]['BOTSID'] and node.record['BOTSIDnr']==mpaths[0]['BOTSIDnr'] and node.sameoccurence(mpaths[0]):
return node._putloopcore(*mpaths[1:])
else: #is not present in children, so append a child, and go recursive
self.append(Node(mpaths[0]))
return self.children[-1]._putloopcore(*mpaths[1:])
def sameoccurence(self, mpath):
for key,value in self.record.iteritems():
if (key in mpath) and (mpath[key]!=value):
return False
else: #all equal keys have same values, thus both are 'equal'.
self.record.update(mpath)
return True
def sort(self,*mpaths):
global comparekey
comparekey = mpaths[1:]
self.children.sort(key=nodecompare)
| [
[
1,
0,
0.0021,
0.0021,
0,
0.66,
0,
349,
0,
1,
0,
0,
349,
0,
0
],
[
1,
0,
0.0042,
0.0021,
0,
0.66,
0.125,
739,
0,
1,
0,
0,
739,
0,
0
],
[
1,
0,
0.0063,
0.0021,
0,
0... | [
"import decimal",
"import copy",
"from django.utils.translation import ugettext as _",
"import botslib",
"import botsglobal",
"from botsconfig import *",
"comparekey=None",
"def nodecompare(node):\n global comparekey\n return node.get(*comparekey)",
" return node.get(*comparekey)",
"class... |
import django
from django.contrib import admin
from django.utils.translation import ugettext as _
from django.http import Http404, HttpResponse, HttpResponseRedirect
from django.contrib.admin.util import unquote, flatten_fieldsets, get_deleted_objects, model_ngettext, model_format_dict
from django.core.exceptions import PermissionDenied
from django.utils.encoding import force_unicode
from django.utils.html import escape
from django.utils.safestring import mark_safe
from django.utils.text import capfirst, get_text_list
#***********
import models
import botsglobal
admin.site.disable_action('delete_selected')
class BotsAdmin(admin.ModelAdmin):
list_per_page = botsglobal.ini.getint('settings','adminlimit',botsglobal.ini.getint('settings','limit',30))
save_as = True
def delete_view(self, request, object_id, extra_context=None):
''' copy from admin.ModelAdmin; adapted: do not check references: no cascading deletes; no confirmation.'''
opts = self.model._meta
app_label = opts.app_label
try:
obj = self.queryset(request).get(pk=unquote(object_id))
except self.model.DoesNotExist:
obj = None
if not self.has_delete_permission(request, obj):
raise PermissionDenied(_(u'Permission denied'))
if obj is None:
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {'name': force_unicode(opts.verbose_name), 'key': escape(object_id)})
obj_display = force_unicode(obj)
self.log_deletion(request, obj, obj_display)
obj.delete()
self.message_user(request, _('The %(name)s "%(obj)s" was deleted successfully.') % {'name': force_unicode(opts.verbose_name), 'obj': force_unicode(obj_display)})
if not self.has_change_permission(request, None):
return HttpResponseRedirect("../../../../")
return HttpResponseRedirect("../../")
def activate(self, request, queryset):
''' admin action.'''
for obj in queryset:
obj.active = not obj.active
obj.save()
activate.short_description = _(u'activate/de-activate')
def bulk_delete(self, request, queryset):
''' admin action.'''
for obj in queryset:
obj.delete()
bulk_delete.short_description = _(u'delete selected')
#*****************************************************************************************************
class CcodeAdmin(BotsAdmin):
actions = ('bulk_delete',)
list_display = ('ccodeid','leftcode','rightcode','attr1','attr2','attr3','attr4','attr5','attr6','attr7','attr8')
list_display_links = ('ccodeid',)
list_filter = ('ccodeid',)
ordering = ('ccodeid','leftcode')
search_fields = ('ccodeid__ccodeid','leftcode','rightcode','attr1','attr2','attr3','attr4','attr5','attr6','attr7','attr8')
def lookup_allowed(self, lookup, *args, **kwargs):
if lookup.startswith('ccodeid'):
return True
return super(CcodeAdmin, self).lookup_allowed(lookup, *args, **kwargs)
admin.site.register(models.ccode,CcodeAdmin)
class CcodetriggerAdmin(BotsAdmin):
actions = ('bulk_delete',)
list_display = ('ccodeid','ccodeid_desc',)
list_display_links = ('ccodeid',)
ordering = ('ccodeid',)
search_fields = ('ccodeid','ccodeid_desc')
admin.site.register(models.ccodetrigger,CcodetriggerAdmin)
class ChannelAdmin(BotsAdmin):
actions = ('bulk_delete',)
list_display = ('idchannel', 'inorout', 'type','host', 'port', 'username', 'secret', 'path', 'filename', 'remove', 'charset', 'archivepath','rsrv2','ftpactive', 'ftpbinary','askmdn', 'syslock', 'starttls','apop')
list_filter = ('inorout','type')
ordering = ('idchannel',)
search_fields = ('idchannel', 'inorout', 'type','host', 'username', 'path', 'filename', 'archivepath', 'charset')
fieldsets = (
(None, {'fields': ('idchannel', ('inorout','type'), ('host','port'), ('username', 'secret'), ('path', 'filename'), 'remove', 'archivepath', 'charset','desc')
}),
(_(u'FTP specific data'),{'fields': ('ftpactive', 'ftpbinary', 'ftpaccount' ),
'classes': ('collapse',)
}),
(_(u'Advanced'),{'fields': (('lockname', 'syslock'), 'parameters', 'starttls','apop','askmdn','rsrv2'),
'classes': ('collapse',)
}),
)
admin.site.register(models.channel,ChannelAdmin)
class ConfirmruleAdmin(BotsAdmin):
actions = ('activate','bulk_delete')
list_display = ('active','negativerule','confirmtype','ruletype', 'frompartner', 'topartner','idroute','idchannel','editype','messagetype')
list_display_links = ('confirmtype',)
list_filter = ('active','confirmtype','ruletype')
search_fields = ('confirmtype','ruletype', 'frompartner__idpartner', 'topartner__idpartner', 'idroute', 'idchannel__idchannel', 'editype', 'messagetype')
ordering = ('confirmtype','ruletype')
fieldsets = (
(None, {'fields': ('active','negativerule','confirmtype','ruletype','frompartner', 'topartner','idroute','idchannel',('editype','messagetype'))}),
)
admin.site.register(models.confirmrule,ConfirmruleAdmin)
class MailInline(admin.TabularInline):
model = models.chanpar
fields = ('idchannel','mail', 'cc')
extra = 1
class MyPartnerAdminForm(django.forms.ModelForm):
''' customs form for partners to check if group has groups'''
class Meta:
model = models.partner
def clean(self):
super(MyPartnerAdminForm, self).clean()
if self.cleaned_data['isgroup'] and self.cleaned_data['group']:
raise django.forms.util.ValidationError(_(u'A group can not be part of a group.'))
return self.cleaned_data
class PartnerAdmin(BotsAdmin):
actions = ('bulk_delete','activate')
form = MyPartnerAdminForm
fields = ('active', 'isgroup', 'idpartner', 'name','mail','cc','group')
filter_horizontal = ('group',)
inlines = (MailInline,)
list_display = ('active','isgroup','idpartner', 'name','mail','cc')
list_display_links = ('idpartner',)
list_filter = ('active','isgroup')
ordering = ('idpartner',)
search_fields = ('idpartner','name','mail','cc')
admin.site.register(models.partner,PartnerAdmin)
class RoutesAdmin(BotsAdmin):
actions = ('bulk_delete','activate')
list_display = ('active', 'idroute', 'seq', 'fromchannel', 'fromeditype', 'frommessagetype', 'alt', 'frompartner', 'topartner', 'translateind', 'tochannel', 'defer', 'toeditype', 'tomessagetype', 'frompartner_tochannel', 'topartner_tochannel', 'testindicator', 'notindefaultrun')
list_display_links = ('idroute',)
list_filter = ('idroute','active','fromeditype')
ordering = ('idroute','seq')
search_fields = ('idroute', 'fromchannel__idchannel','fromeditype', 'frommessagetype', 'alt', 'tochannel__idchannel','toeditype', 'tomessagetype')
fieldsets = (
(None, {'fields': ('active',('idroute', 'seq'),'fromchannel', ('fromeditype', 'frommessagetype'),'translateind','tochannel','desc')}),
(_(u'Filtering for outchannel'),{'fields':('toeditype', 'tomessagetype','frompartner_tochannel', 'topartner_tochannel', 'testindicator'),
'classes': ('collapse',)
}),
(_(u'Advanced'),{'fields': ('alt', 'frompartner', 'topartner', 'notindefaultrun','defer'),
'classes': ('collapse',)
}),
)
admin.site.register(models.routes,RoutesAdmin)
class MyTranslateAdminForm(django.forms.ModelForm):
''' customs form for translations to check if entry exists (unique_together not validated right (because of null values in partner fields))'''
class Meta:
model = models.translate
def clean(self):
super(MyTranslateAdminForm, self).clean()
b = models.translate.objects.filter(fromeditype=self.cleaned_data['fromeditype'],
frommessagetype=self.cleaned_data['frommessagetype'],
alt=self.cleaned_data['alt'],
frompartner=self.cleaned_data['frompartner'],
topartner=self.cleaned_data['topartner'])
if b and (self.instance.pk is None or self.instance.pk != b[0].id):
raise django.forms.util.ValidationError(_(u'Combination of fromeditype,frommessagetype,alt,frompartner,topartner already exists.'))
return self.cleaned_data
class TranslateAdmin(BotsAdmin):
actions = ('bulk_delete','activate')
form = MyTranslateAdminForm
list_display = ('active', 'fromeditype', 'frommessagetype', 'alt', 'frompartner', 'topartner', 'tscript', 'toeditype', 'tomessagetype')
list_display_links = ('fromeditype',)
list_filter = ('active','fromeditype','toeditype')
ordering = ('fromeditype','frommessagetype')
search_fields = ('fromeditype', 'frommessagetype', 'alt', 'frompartner__idpartner', 'topartner__idpartner', 'tscript', 'toeditype', 'tomessagetype')
fieldsets = (
(None, {'fields': ('active', ('fromeditype', 'frommessagetype'),'tscript', ('toeditype', 'tomessagetype','desc'))
}),
(_(u'Advanced - multiple translations per editype/messagetype'),{'fields': ('alt', 'frompartner', 'topartner'),
'classes': ('collapse',)
}),
)
admin.site.register(models.translate,TranslateAdmin)
class UniekAdmin(BotsAdmin): #AKA counters
actions = None
list_display = ('domein', 'nummer')
list_editable = ('nummer',)
ordering = ('domein',)
search_fields = ('domein',)
admin.site.register(models.uniek,UniekAdmin)
#User - change the default display of user screen
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
UserAdmin.list_display = ('username', 'first_name', 'last_name','email', 'is_active', 'is_staff', 'is_superuser', 'date_joined','last_login')
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
| [
[
1,
0,
0.005,
0.005,
0,
0.66,
0,
294,
0,
1,
0,
0,
294,
0,
0
],
[
1,
0,
0.0101,
0.005,
0,
0.66,
0.027,
302,
0,
1,
0,
0,
302,
0,
0
],
[
1,
0,
0.0151,
0.005,
0,
0.66,... | [
"import django",
"from django.contrib import admin",
"from django.utils.translation import ugettext as _",
"from django.http import Http404, HttpResponse, HttpResponseRedirect",
"from django.contrib.admin.util import unquote, flatten_fieldsets, get_deleted_objects, model_ngettext, model_format_dict",
"fro... |
import sys
import os
import encodings
import codecs
import ConfigParser
import logging, logging.handlers
from django.utils.translation import ugettext as _
#Bots-modules
from botsconfig import *
import botsglobal
import botslib
class BotsConfig(ConfigParser.SafeConfigParser):
''' See SafeConfigParser.
'''
def get(self,section, option, default=''):
try:
return ConfigParser.SafeConfigParser.get(self,section,option)
except: #if there is no such section,option
if default == '':
raise botslib.BotsError(_(u'No entry "$entry" in section "$section" in "bots.ini".'),entry=option,section=section)
return default
def getint(self,section, option, default):
try:
return ConfigParser.SafeConfigParser.getint(self,section,option)
except:
return default
def getboolean(self,section, option, default):
try:
return ConfigParser.SafeConfigParser.getboolean(self,section,option)
except:
return default
def generalinit(configdir):
#Set Configdir
#Configdir MUST be importable. So configdir is relative to PYTHONPATH. Try several options for this import.
try: #configdir outside bots-directory: import configdir.settings.py
importnameforsettings = os.path.normpath(os.path.join(configdir,'settings')).replace(os.sep,'.')
settings = botslib.botsbaseimport(importnameforsettings)
except ImportError: #configdir is in bots directory: import bots.configdir.settings.py
try:
importnameforsettings = os.path.normpath(os.path.join('bots',configdir,'settings')).replace(os.sep,'.')
settings = botslib.botsbaseimport(importnameforsettings)
except ImportError: #set pythonpath to config directory first
if not os.path.exists(configdir): #check if configdir exists.
raise botslib.BotsError(_(u'In initilisation: path to configuration does not exists: "$path".'),path=configdir)
addtopythonpath = os.path.abspath(os.path.dirname(configdir))
#~ print 'add pythonpath for usersys',addtopythonpath
moduletoimport = os.path.basename(configdir)
sys.path.append(addtopythonpath)
importnameforsettings = os.path.normpath(os.path.join(moduletoimport,'settings')).replace(os.sep,'.')
settings = botslib.botsbaseimport(importnameforsettings)
#settings are accessed using botsglobal
botsglobal.settings = settings
#Find pathname configdir using imported settings.py.
configdirectory = os.path.abspath(os.path.dirname(settings.__file__))
#Read configuration-file bots.ini.
botsglobal.ini = BotsConfig()
cfgfile = open(os.path.join(configdirectory,'bots.ini'), 'r')
botsglobal.ini.readfp(cfgfile)
cfgfile.close()
#Set usersys.
#usersys MUST be importable. So usersys is relative to PYTHONPATH. Try several options for this import.
usersys = botsglobal.ini.get('directories','usersys','usersys')
try: #usersys outside bots-directory: import usersys
importnameforusersys = os.path.normpath(usersys).replace(os.sep,'.')
importedusersys = botslib.botsbaseimport(importnameforusersys)
except ImportError: #usersys is in bots directory: import bots.usersys
try:
importnameforusersys = os.path.normpath(os.path.join('bots',usersys)).replace(os.sep,'.')
importedusersys = botslib.botsbaseimport(importnameforusersys)
except ImportError: #set pythonpath to usersys directory first
if not os.path.exists(usersys): #check if configdir exists.
raise botslib.BotsError(_(u'In initilisation: path to configuration does not exists: "$path".'),path=usersys)
addtopythonpath = os.path.abspath(os.path.dirname(usersys)) #????
moduletoimport = os.path.basename(usersys)
#~ print 'add pythonpath for usersys',addtopythonpath
sys.path.append(addtopythonpath)
importnameforusersys = os.path.normpath(usersys).replace(os.sep,'.')
importedusersys = botslib.botsbaseimport(importnameforusersys)
#set directory settings in bots.ini************************************************************
botsglobal.ini.set('directories','botspath',botsglobal.settings.PROJECT_PATH)
botsglobal.ini.set('directories','config',configdirectory)
botsglobal.ini.set('directories','usersysabs',os.path.abspath(os.path.dirname(importedusersys.__file__))) #???Find pathname usersys using imported usersys
botsglobal.usersysimportpath = importnameforusersys
botssys = botsglobal.ini.get('directories','botssys','botssys')
botsglobal.ini.set('directories','botssys',botslib.join(botssys))
botsglobal.ini.set('directories','data',botslib.join(botssys,'data'))
botslib.dirshouldbethere(botsglobal.ini.get('directories','data'))
botsglobal.ini.set('directories','logging',botslib.join(botssys,'logging'))
botslib.dirshouldbethere(botsglobal.ini.get('directories','logging'))
botsglobal.ini.set('directories','templates',botslib.join(botsglobal.ini.get('directories','usersysabs'),'grammars/template/templates'))
botsglobal.ini.set('directories','templateshtml',botslib.join(botsglobal.ini.get('directories','usersysabs'),'grammars/templatehtml/templates'))
#set values in setting.py**********************************************************************
if botsglobal.ini.get('webserver','environment','development') == 'development': #values in bots.ini are also used in setting up cherrypy
settings.DEBUG = True
else:
settings.DEBUG = False
settings.TEMPLATE_DEBUG = settings.DEBUG
#set paths in settings.py:
#~ settings.FILE_UPLOAD_TEMP_DIR = os.path.join(settings.PROJECT_PATH, 'botssys/pluginsuploaded')
#start initializing bots charsets
initbotscharsets()
#set environment for django to start***************************************************************************************************
os.environ['DJANGO_SETTINGS_MODULE'] = importnameforsettings
initbotscharsets()
botslib.settimeout(botsglobal.ini.getint('settings','globaltimeout',10)) #
def initbotscharsets():
'''set up right charset handling for specific charsets (UNOA, UNOB, UNOC, etc).'''
codecs.register(codec_search_function) #tell python how to search a codec defined by bots. These are the codecs in usersys/charset
botsglobal.botsreplacechar = unicode(botsglobal.ini.get('settings','botsreplacechar',u' '))
codecs.register_error('botsreplace', botscharsetreplace) #define the ' botsreplace' error handling for codecs/charsets.
for key, value in botsglobal.ini.items('charsets'): #set aliases for charsets in bots.ini
encodings.aliases.aliases[key]=value
def codec_search_function(encoding):
try:
module,filename = botslib.botsimport('charsets',encoding)
except:
return None
else:
if hasattr(module,'getregentry'):
return module.getregentry()
else:
return None
def botscharsetreplace(info):
'''replaces an char outside a charset by a user defined char. Useful eg for fixed records: recordlength does not change. Do not know if this works for eg UTF-8...'''
return (botsglobal.botsreplacechar, info.start+1)
def initenginelogging():
convertini2logger={'DEBUG':logging.DEBUG,'INFO':logging.INFO,'WARNING':logging.WARNING,'ERROR':logging.ERROR,'CRITICAL':logging.CRITICAL}
# create main logger 'bots'
botsglobal.logger = logging.getLogger('bots')
botsglobal.logger.setLevel(logging.DEBUG)
# create rotating file handler
log_file = botslib.join(botsglobal.ini.get('directories','logging'),'engine.log')
rotatingfile = logging.handlers.RotatingFileHandler(log_file,backupCount=botsglobal.ini.getint('settings','log_file_number',10))
rotatingfile.setLevel(convertini2logger[botsglobal.ini.get('settings','log_file_level','ERROR')])
fileformat = logging.Formatter("%(asctime)s %(levelname)-8s %(name)s : %(message)s",'%Y%m%d %H:%M:%S')
rotatingfile.setFormatter(fileformat)
rotatingfile.doRollover() #each run a new log file is used; old one is rotated
# add rotating file handler to main logger
botsglobal.logger.addHandler(rotatingfile)
#logger for trace of mapping; tried to use filters but got this not to work.....
botsglobal.logmap = logging.getLogger('bots.map')
if not botsglobal.ini.getboolean('settings','mappingdebug',False):
botsglobal.logmap.setLevel(logging.CRITICAL)
#logger for reading edifile. is now used only very limited (1 place); is done with 'if'
#~ botsglobal.ini.getboolean('settings','readrecorddebug',False)
# create console handler
if botsglobal.ini.getboolean('settings','log_console',True):
console = logging.StreamHandler()
console.setLevel(logging.INFO)
consuleformat = logging.Formatter("%(levelname)-8s %(message)s")
console.setFormatter(consuleformat) # add formatter to console
botsglobal.logger.addHandler(console) # add console to logger
def connect():
#different connect code per tyoe of database
if botsglobal.settings.DATABASE_ENGINE == 'sqlite3':
#sqlite has some more fiddling; in separate file. Mainly because of some other method of parameter passing.
if not os.path.isfile(botsglobal.settings.DATABASE_NAME):
raise botslib.PanicError(_(u'Could not find database file for SQLite'))
import botssqlite
botsglobal.db = botssqlite.connect(database = botsglobal.settings.DATABASE_NAME)
elif botsglobal.settings.DATABASE_ENGINE == 'mysql':
import MySQLdb
from MySQLdb import cursors
botsglobal.db = MySQLdb.connect(host=botsglobal.settings.DATABASE_HOST,
port=int(botsglobal.settings.DATABASE_PORT),
db=botsglobal.settings.DATABASE_NAME,
user=botsglobal.settings.DATABASE_USER,
passwd=botsglobal.settings.DATABASE_PASSWORD,
cursorclass=cursors.DictCursor,
**botsglobal.settings.DATABASE_OPTIONS)
elif botsglobal.settings.DATABASE_ENGINE == 'postgresql_psycopg2':
import psycopg2
import psycopg2.extensions
import psycopg2.extras
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
botsglobal.db = psycopg2.connect( 'host=%s dbname=%s user=%s password=%s'%( botsglobal.settings.DATABASE_HOST,
botsglobal.settings.DATABASE_NAME,
botsglobal.settings.DATABASE_USER,
botsglobal.settings.DATABASE_PASSWORD),connection_factory=psycopg2.extras.DictConnection)
botsglobal.db.set_client_encoding('UNICODE')
| [
[
1,
0,
0.0051,
0.0051,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0102,
0.0051,
0,
0.66,
0.0625,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0153,
0.0051,
0,
... | [
"import sys",
"import os",
"import encodings",
"import codecs",
"import ConfigParser",
"import logging, logging.handlers",
"from django.utils.translation import ugettext as _",
"from botsconfig import *",
"import botsglobal",
"import botslib",
"class BotsConfig(ConfigParser.SafeConfigParser):\n ... |
import shutil
import time
from django.utils.translation import ugettext as _
#bots-modules
import botslib
import botsglobal
import grammar
import outmessage
from botsconfig import *
@botslib.log_session
def mergemessages(startstatus=TRANSLATED,endstatus=MERGED,idroute=''):
''' Merges en envelopes several messages to one file;
In db-ta: attribute 'merge' indicates message should be merged with similar messages; 'merge' is generated in translation from messagetype-grammar
If merge==False: 1 message per envelope - no merging, else append all similar messages to one file
Implementation as separate loops: one for merge&envelope, another for enveloping only
db-ta status TRANSLATED---->MERGED
'''
outerqueryparameters = {'status':startstatus,'statust':OK,'idroute':idroute,'rootidta':botslib.get_minta4query(),'merge':False}
#**********for messages only to envelope (no merging)
for row in botslib.query(u'''SELECT editype,messagetype,frompartner,topartner,testindicator,charset,contenttype,tochannel,envelope,nrmessages,idta,filename,idroute,merge
FROM ta
WHERE idta>%(rootidta)s
AND status=%(status)s
AND statust=%(statust)s
AND idroute=%(idroute)s
AND merge=%(merge)s
''',
outerqueryparameters):
try:
ta_info = dict([(key,row[key]) for key in row.keys()])
#~ ta_info={'merge':False,'idroute':idroute}
#~ for key in row.keys():
#~ ta_info[key] = row[key]
ta_fromfile = botslib.OldTransaction(row['idta']) #edi message to envelope
ta_tofile=ta_fromfile.copyta(status=endstatus) #edifile for enveloped message; attributes of not-enveloped message are copied...
#~ ta_fromfile.update(child=ta_tofile.idta) #??there is already a parent-child relation (1-1)...
ta_info['filename'] = str(ta_tofile.idta) #create filename for enveloped message
botsglobal.logger.debug(u'Envelope 1 message editype: %s, messagetype: %s.',ta_info['editype'],ta_info['messagetype'])
envelope(ta_info,[row['filename']])
except:
txt=botslib.txtexc()
ta_tofile.update(statust=ERROR,errortext=txt)
else:
ta_fromfile.update(statust=DONE)
ta_tofile.update(statust=OK,**ta_info) #selection is used to update enveloped message;
#**********for messages to merge & envelope
#all GROUP BY fields must be used in SELECT!
#as files get merged: can not copy idta; must extract relevant attributes.
outerqueryparameters['merge']=True
for row in botslib.query(u'''SELECT editype,messagetype,frompartner,topartner,tochannel,testindicator,charset,contenttype,envelope,sum(nrmessages) as nrmessages
FROM ta
WHERE idta>%(rootidta)s
AND status=%(status)s
AND statust=%(statust)s
AND idroute=%(idroute)s
AND merge=%(merge)s
GROUP BY editype,messagetype,frompartner,topartner,tochannel,testindicator,charset,contenttype,envelope
''',
outerqueryparameters):
try:
ta_info = dict([(key,row[key]) for key in row.keys()])
ta_info.update({'merge':False,'idroute':idroute})
#~ for key in row.keys():
#~ ta_info[key] = row[key]
ta_tofile=botslib.NewTransaction(status=endstatus,idroute=idroute) #edifile for enveloped messages
ta_info['filename'] = str(ta_tofile.idta) #create filename for enveloped message
innerqueryparameters = ta_info.copy()
innerqueryparameters.update(outerqueryparameters)
ta_list=[]
#gather individual idta and filenames
#explicitly allow formpartner/topartner to be None/NULL
for row2 in botslib.query(u'''SELECT idta, filename
FROM ta
WHERE idta>%(rootidta)s
AND status=%(status)s
AND statust=%(statust)s
AND merge=%(merge)s
AND editype=%(editype)s
AND messagetype=%(messagetype)s
AND (frompartner=%(frompartner)s OR frompartner IS NULL)
AND (topartner=%(topartner)s OR topartner IS NULL)
AND tochannel=%(tochannel)s
AND testindicator=%(testindicator)s
AND charset=%(charset)s
AND idroute=%(idroute)s
''',
innerqueryparameters):
ta_fromfile = botslib.OldTransaction(row2['idta']) #edi message to envelope
ta_fromfile.update(statust=DONE,child=ta_tofile.idta) #st child because of n->1 relation
ta_list.append(row2['filename'])
botsglobal.logger.debug(u'Merge and envelope: editype: %s, messagetype: %s, %s messages',ta_info['editype'],ta_info['messagetype'],ta_info['nrmessages'])
envelope(ta_info,ta_list)
except:
txt=botslib.txtexc()
ta_tofile.mergefailure()
ta_tofile.update(statust=ERROR,errortext=txt)
else:
ta_tofile.update(statust=OK,**ta_info)
def envelope(ta_info,ta_list):
''' dispatch function for class Envelope and subclasses.
editype, edimessage and envelope essential for enveloping.
determine the class for enveloping:
1. empty string: no enveloping (class noenvelope); file(s) is/are just copied. No user scripting for envelope.
2. if envelope is a class in this module, use it
3. if editype is a class in this module, use it
4. if user defined enveloping in usersys/envelope/<editype>/<envelope>.<envelope>, use it (user defined scripting overrides)
Always check if user envelope script. user exits extends/replaces default enveloping.
'''
#determine which class to use for enveloping
userscript = scriptname = None
if not ta_info['envelope']: #used when enveloping is just appending files.
classtocall = noenvelope
else:
try: #see if the is user scripted enveloping
userscript,scriptname = botslib.botsimport('envelopescripts',ta_info['editype'] + '.' + ta_info['envelope'])
except ImportError: #other errors, eg syntax errors are just passed
pass
#first: check if there is a class with name ta_info['envelope'] in the user scripting
#this allows complete enveloping in user scripting
if userscript and hasattr(userscript,ta_info['envelope']):
classtocall = getattr(userscript,ta_info['envelope'])
else:
try: #check if there is a envelope class with name ta_info['envelope'] in this file (envelope.py)
classtocall = globals()[ta_info['envelope']]
except KeyError:
try: #check if there is a envelope class with name ta_info['editype'] in this file (envelope.py).
#20110919: this should disappear in the long run....use this now for orders2printenvelope and myxmlenvelop
#reason to disappear: confusing when setting up.
classtocall = globals()[ta_info['editype']]
except KeyError:
raise botslib.OutMessageError(_(u'Not found envelope "$envelope".'),envelope=ta_info['editype'])
env = classtocall(ta_info,ta_list,userscript,scriptname)
env.run()
class Envelope(object):
''' Base Class for enveloping; use subclasses.'''
def __init__(self,ta_info,ta_list,userscript,scriptname):
self.ta_info = ta_info
self.ta_list = ta_list
self.userscript = userscript
self.scriptname = scriptname
def _openoutenvelope(self,editype, messagetype_or_envelope):
''' make an outmessage object; read the grammar.'''
#self.ta_info now contains information from ta: editype, messagetype,testindicator,charset,envelope, contenttype
self.out = outmessage.outmessage_init(**self.ta_info) #make outmessage object. Init with self.out.ta_info
#read grammar for envelopesyntax. Remark: self.ta_info is not updated now
self.out.outmessagegrammarread(editype, messagetype_or_envelope)
#self.out.ta_info can contain partner dependent parameters. the partner dependent parameters have overwritten parameters fro mmessage/envelope
def writefilelist(self,tofile):
for filename in self.ta_list:
fromfile = botslib.opendata(filename, 'rb',self.ta_info['charset'])
shutil.copyfileobj(fromfile,tofile)
fromfile.close()
def filelist2absolutepaths(self):
''' utility function; some classes need absolute filenames eg for xml-including'''
return [botslib.abspathdata(filename) for filename in self.ta_list]
class noenvelope(Envelope):
''' Only copies the input files to one output file.'''
def run(self):
botslib.tryrunscript(self.userscript,self.scriptname,'ta_infocontent',ta_info=self.ta_info)
tofile = botslib.opendata(self.ta_info['filename'],'wb',self.ta_info['charset'])
self.writefilelist(tofile)
tofile.close()
class fixed(noenvelope):
pass
class csv(noenvelope):
pass
class csvheader(Envelope):
def run(self):
self._openoutenvelope(self.ta_info['editype'],self.ta_info['messagetype'])
botslib.tryrunscript(self.userscript,self.scriptname,'ta_infocontent',ta_info=self.ta_info)
#self.ta_info is not overwritten
tofile = botslib.opendata(self.ta_info['filename'],'wb',self.ta_info['charset'])
headers = dict([(field[ID],field[ID]) for field in self.out.defmessage.structure[0][FIELDS]])
self.out.put(headers)
self.out.tree2records(self.out.root)
tofile.write(self.out._record2string(self.out.records[0]))
self.writefilelist(tofile)
tofile.close()
class edifact(Envelope):
''' Generate UNB and UNZ segment; fill with data, write to interchange-file.'''
def run(self):
if not self.ta_info['topartner'] or not self.ta_info['frompartner']:
raise botslib.OutMessageError(_(u'In enveloping "frompartner" or "topartner" unknown: "$ta_info".'),ta_info=self.ta_info)
self._openoutenvelope(self.ta_info['editype'],self.ta_info['envelope'])
self.ta_info.update(self.out.ta_info)
botslib.tryrunscript(self.userscript,self.scriptname,'ta_infocontent',ta_info=self.ta_info)
#version dependent enveloping
writeUNA = False
if self.ta_info['version']<'4':
date = time.strftime('%y%m%d')
reserve = ' '
if self.ta_info['charset'] != 'UNOA':
writeUNA = True
else:
date = time.strftime('%Y%m%d')
reserve = self.ta_info['reserve']
if self.ta_info['charset'] not in ['UNOA','UNOB']:
writeUNA = True
#UNB counter is per sender or receiver
if botsglobal.ini.getboolean('settings','interchangecontrolperpartner',False):
self.ta_info['reference'] = str(botslib.unique('unbcounter_' + self.ta_info['topartner']))
else:
self.ta_info['reference'] = str(botslib.unique('unbcounter_' + self.ta_info['frompartner']))
#testindicator is more complex:
if self.ta_info['testindicator'] and self.ta_info['testindicator']!='0': #first check value from ta; do not use default
testindicator = '1'
elif self.ta_info['UNB.0035'] != '0': #than check values from grammar
testindicator = '1'
else:
testindicator = ''
#build the envelope segments (that is, the tree from which the segments will be generated)
self.out.put({'BOTSID':'UNB',
'S001.0001':self.ta_info['charset'],
'S001.0002':self.ta_info['version'],
'S002.0004':self.ta_info['frompartner'],
'S003.0010':self.ta_info['topartner'],
'S004.0017':date,
'S004.0019':time.strftime('%H%M'),
'0020':self.ta_info['reference']})
#the following fields are conditional; do not write these when empty string (separator compression does take empty strings into account)
if self.ta_info['UNB.S002.0007']:
self.out.put({'BOTSID':'UNB','S002.0007': self.ta_info['UNB.S002.0007']})
if self.ta_info['UNB.S003.0007']:
self.out.put({'BOTSID':'UNB','S003.0007': self.ta_info['UNB.S003.0007']})
if self.ta_info['UNB.0026']:
self.out.put({'BOTSID':'UNB','0026': self.ta_info['UNB.0026']})
if testindicator:
self.out.put({'BOTSID':'UNB','0035': testindicator})
self.out.put({'BOTSID':'UNB'},{'BOTSID':'UNZ','0036':self.ta_info['nrmessages'],'0020':self.ta_info['reference']}) #dummy segment; is not used
#user exit
botslib.tryrunscript(self.userscript,self.scriptname,'envelopecontent',ta_info=self.ta_info,out=self.out)
#convert the tree into segments; here only the UNB is written (first segment)
self.out.normalisetree(self.out.root)
self.out.tree2records(self.out.root)
#start doing the actual writing:
tofile = botslib.opendata(self.ta_info['filename'],'wb',self.ta_info['charset'])
if writeUNA or self.ta_info['forceUNA']:
tofile.write('UNA'+self.ta_info['sfield_sep']+self.ta_info['field_sep']+self.ta_info['decimaal']+self.ta_info['escape']+ reserve +self.ta_info['record_sep']+self.ta_info['add_crlfafterrecord_sep'])
tofile.write(self.out._record2string(self.out.records[0]))
self.writefilelist(tofile)
tofile.write(self.out._record2string(self.out.records[-1]))
tofile.close()
if self.ta_info['messagetype'][:6]!='CONTRL' and botslib.checkconfirmrules('ask-edifact-CONTRL',idroute=self.ta_info['idroute'],idchannel=self.ta_info['tochannel'],
topartner=self.ta_info['topartner'],frompartner=self.ta_info['frompartner'],
editype=self.ta_info['editype'],messagetype=self.ta_info['messagetype']):
self.ta_info['confirmtype'] = u'ask-edifact-CONTRL'
self.ta_info['confirmasked'] = True
class tradacoms(Envelope):
''' Generate STX and END segment; fill with appropriate data, write to interchange file.'''
def run(self):
if not self.ta_info['topartner'] or not self.ta_info['frompartner']:
raise botslib.OutMessageError(_(u'In enveloping "frompartner" or "topartner" unknown: "$ta_info".'),ta_info=self.ta_info)
self._openoutenvelope(self.ta_info['editype'],self.ta_info['envelope'])
self.ta_info.update(self.out.ta_info)
botslib.tryrunscript(self.userscript,self.scriptname,'ta_infocontent',ta_info=self.ta_info)
#prepare data for envelope
if botsglobal.ini.getboolean('settings','interchangecontrolperpartner',False):
self.ta_info['reference'] = str(botslib.unique('stxcounter_' + self.ta_info['topartner']))
else:
self.ta_info['reference'] = str(botslib.unique('stxcounter_' + self.ta_info['frompartner']))
#build the envelope segments (that is, the tree from which the segments will be generated)
self.out.put({'BOTSID':'STX',
'STDS1':self.ta_info['STX.STDS1'],
'STDS2':self.ta_info['STX.STDS2'],
'FROM.01':self.ta_info['frompartner'],
'UNTO.01':self.ta_info['topartner'],
'TRDT.01':time.strftime('%y%m%d'),
'TRDT.02':time.strftime('%H%M%S'),
'SNRF':self.ta_info['reference']})
if self.ta_info['STX.FROM.02']:
self.out.put({'BOTSID':'STX','FROM.02':self.ta_info['STX.FROM.02']})
if self.ta_info['STX.UNTO.02']:
self.out.put({'BOTSID':'STX','UNTO.02':self.ta_info['STX.UNTO.02']})
if self.ta_info['STX.APRF']:
self.out.put({'BOTSID':'STX','APRF':self.ta_info['STX.APRF']})
if self.ta_info['STX.PRCD']:
self.out.put({'BOTSID':'STX','PRCD':self.ta_info['STX.PRCD']})
self.out.put({'BOTSID':'STX'},{'BOTSID':'END','NMST':self.ta_info['nrmessages']}) #dummy segment; is not used
#user exit
botslib.tryrunscript(self.userscript,self.scriptname,'envelopecontent',ta_info=self.ta_info,out=self.out)
#convert the tree into segments; here only the STX is written (first segment)
self.out.normalisetree(self.out.root)
self.out.tree2records(self.out.root)
#start doing the actual writing:
tofile = botslib.opendata(self.ta_info['filename'],'wb',self.ta_info['charset'])
tofile.write(self.out._record2string(self.out.records[0]))
self.writefilelist(tofile)
tofile.write(self.out._record2string(self.out.records[-1]))
tofile.close()
class template(Envelope):
def run(self):
''' class for (test) orderprint; delevers a valid html-file.
Uses a kid-template for the enveloping/merging.
use kid to write; no envelope grammar is used
'''
try:
import kid
except:
txt=botslib.txtexc()
raise ImportError(_(u'Dependency failure: editype "template" requires python library "kid". Error:\n%s'%txt))
defmessage = grammar.grammarread(self.ta_info['editype'],self.ta_info['messagetype']) #needed because we do not know envelope; read syntax for editype/messagetype
self.ta_info.update(defmessage.syntax)
botslib.tryrunscript(self.userscript,self.scriptname,'ta_infocontent',ta_info=self.ta_info)
if not self.ta_info['envelope-template']:
raise botslib.OutMessageError(_(u'While enveloping in "$editype.$messagetype": syntax option "envelope-template" not filled; is required.'),editype=self.ta_info['editype'],messagetype=self.ta_info['messagetype'])
templatefile = botslib.abspath('templates',self.ta_info['envelope-template'])
ta_list = self.filelist2absolutepaths()
try:
botsglobal.logger.debug(u'Start writing envelope to file "%s".',self.ta_info['filename'])
ediprint = kid.Template(file=templatefile, data=ta_list) #init template; pass list with filenames
except:
txt=botslib.txtexc()
raise botslib.OutMessageError(_(u'While enveloping in "$editype.$messagetype", error:\n$txt'),editype=self.ta_info['editype'],messagetype=self.ta_info['messagetype'],txt=txt)
try:
f = botslib.opendata(self.ta_info['filename'],'wb')
ediprint.write(f,
encoding=self.ta_info['charset'],
output=self.ta_info['output'])
except:
txt=botslib.txtexc()
raise botslib.OutMessageError(_(u'While enveloping in "$editype.$messagetype", error:\n$txt'),editype=self.ta_info['editype'],messagetype=self.ta_info['messagetype'],txt=txt)
class orders2printenvelope(template):
pass
class templatehtml(Envelope):
def run(self):
''' class for (test) orderprint; delevers a valid html-file.
Uses a kid-template for the enveloping/merging.
use kid to write; no envelope grammar is used
'''
try:
from genshi.template import TemplateLoader
except:
txt=botslib.txtexc()
raise ImportError(_(u'Dependency failure: editype "template" requires python library "genshi". Error:\n%s'%txt))
defmessage = grammar.grammarread(self.ta_info['editype'],self.ta_info['messagetype']) #needed because we do not know envelope; read syntax for editype/messagetype
self.ta_info.update(defmessage.syntax)
botslib.tryrunscript(self.userscript,self.scriptname,'ta_infocontent',ta_info=self.ta_info)
if not self.ta_info['envelope-template']:
raise botslib.OutMessageError(_(u'While enveloping in "$editype.$messagetype": syntax option "envelope-template" not filled; is required.'),editype=self.ta_info['editype'],messagetype=self.ta_info['messagetype'])
templatefile = botslib.abspath('templateshtml',self.ta_info['envelope-template'])
ta_list = self.filelist2absolutepaths()
try:
botsglobal.logger.debug(u'Start writing envelope to file "%s".',self.ta_info['filename'])
loader = TemplateLoader(auto_reload=False)
tmpl = loader.load(templatefile)
except:
txt=botslib.txtexc()
raise botslib.OutMessageError(_(u'While enveloping in "$editype.$messagetype", error:\n$txt'),editype=self.ta_info['editype'],messagetype=self.ta_info['messagetype'],txt=txt)
try:
f = botslib.opendata(self.ta_info['filename'],'wb')
stream = tmpl.generate(data=ta_list)
stream.render(method='xhtml',encoding=self.ta_info['charset'],out=f)
except:
txt=botslib.txtexc()
raise botslib.OutMessageError(_(u'While enveloping in "$editype.$messagetype", error:\n$txt'),editype=self.ta_info['editype'],messagetype=self.ta_info['messagetype'],txt=txt)
class x12(Envelope):
''' Generate envelope segments; fill with appropriate data, write to interchange-file.'''
def run(self):
if not self.ta_info['topartner'] or not self.ta_info['frompartner']:
raise botslib.OutMessageError(_(u'In enveloping "frompartner" or "topartner" unknown: "$ta_info".'),ta_info=self.ta_info)
self._openoutenvelope(self.ta_info['editype'],self.ta_info['envelope'])
self.ta_info.update(self.out.ta_info)
#need to know the functionalgroup code:
defmessage = grammar.grammarread(self.ta_info['editype'],self.ta_info['messagetype'])
self.ta_info['functionalgroup'] = defmessage.syntax['functionalgroup']
botslib.tryrunscript(self.userscript,self.scriptname,'ta_infocontent',ta_info=self.ta_info)
#prepare data for envelope
ISA09date = time.strftime('%y%m%d')
#test indicator can either be from configuration (self.ta_info['ISA15']) or by mapping (self.ta_info['testindicator'])
#mapping overrules.
if self.ta_info['testindicator'] and self.ta_info['testindicator']!='0': #'0' is default value (in db)
testindicator = self.ta_info['testindicator']
else:
testindicator = self.ta_info['ISA15']
#~ print self.ta_info['messagetype'], 'grammar:',self.ta_info['ISA15'],'ta:',self.ta_info['testindicator'],'out:',testindicator
if botsglobal.ini.getboolean('settings','interchangecontrolperpartner',False):
self.ta_info['reference'] = str(botslib.unique('isacounter_' + self.ta_info['topartner']))
else:
self.ta_info['reference'] = str(botslib.unique('isacounter_' + self.ta_info['frompartner']))
#ISA06 and GS02 can be different; eg ISA06 is a service provider.
#ISA06 and GS02 can be in the syntax....
ISA06 = self.ta_info.get('ISA06',self.ta_info['frompartner'])
ISA06 = ISA06.ljust(15) #add spaces; is fixed length
GS02 = self.ta_info.get('GS02',self.ta_info['frompartner'])
#also for ISA08 and GS03
ISA08 = self.ta_info.get('ISA08',self.ta_info['topartner'])
ISA08 = ISA08.ljust(15) #add spaces; is fixed length
GS03 = self.ta_info.get('GS03',self.ta_info['topartner'])
#build the envelope segments (that is, the tree from which the segments will be generated)
self.out.put({'BOTSID':'ISA',
'ISA01':self.ta_info['ISA01'],
'ISA02':self.ta_info['ISA02'],
'ISA03':self.ta_info['ISA03'],
'ISA04':self.ta_info['ISA04'],
'ISA05':self.ta_info['ISA05'],
'ISA06':ISA06,
'ISA07':self.ta_info['ISA07'],
'ISA08':ISA08,
'ISA09':ISA09date,
'ISA10':time.strftime('%H%M'),
'ISA11':self.ta_info['ISA11'], #if ISA version > 00403, replaced by reprtion separator
'ISA12':self.ta_info['version'],
'ISA13':self.ta_info['reference'],
'ISA14':self.ta_info['ISA14'],
'ISA15':testindicator},strip=False) #MIND: strip=False: ISA fields shoudl not be stripped as it is soemwhat like fixed-length
self.out.put({'BOTSID':'ISA'},{'BOTSID':'IEA','IEA01':'1','IEA02':self.ta_info['reference']})
GS08 = self.ta_info['messagetype'][3:]
if GS08[:6]<'004010':
GS04date = time.strftime('%y%m%d')
else:
GS04date = time.strftime('%Y%m%d')
self.out.put({'BOTSID':'ISA'},{'BOTSID':'GS',
'GS01':self.ta_info['functionalgroup'],
'GS02':GS02,
'GS03':GS03,
'GS04':GS04date,
'GS05':time.strftime('%H%M'),
'GS06':self.ta_info['reference'],
'GS07':self.ta_info['GS07'],
'GS08':GS08})
self.out.put({'BOTSID':'ISA'},{'BOTSID':'GS'},{'BOTSID':'GE','GE01':self.ta_info['nrmessages'],'GE02':self.ta_info['reference']}) #dummy segment; is not used
#user exit
botslib.tryrunscript(self.userscript,self.scriptname,'envelopecontent',ta_info=self.ta_info,out=self.out)
#convert the tree into segments; here only the UNB is written (first segment)
self.out.normalisetree(self.out.root)
self.out.tree2records(self.out.root)
#start doing the actual writing:
tofile = botslib.opendata(self.ta_info['filename'],'wb',self.ta_info['charset'])
ISAstring = self.out._record2string(self.out.records[0])
if self.ta_info['version']<'00403':
ISAstring = ISAstring[:103] + self.ta_info['field_sep']+ self.ta_info['sfield_sep'] + ISAstring[103:] #hack for strange characters at end of ISA; hardcoded
else:
ISAstring = ISAstring[:82] +self.ta_info['reserve'] + ISAstring[83:103] + self.ta_info['field_sep']+ self.ta_info['sfield_sep'] + ISAstring[103:] #hack for strange characters at end of ISA; hardcoded
tofile.write(ISAstring) #write ISA
tofile.write(self.out._record2string(self.out.records[1])) #write GS
self.writefilelist(tofile)
tofile.write(self.out._record2string(self.out.records[-2])) #write GE
tofile.write(self.out._record2string(self.out.records[-1])) #write IEA
tofile.close()
if self.ta_info['functionalgroup']!='FA' and botslib.checkconfirmrules('ask-x12-997',idroute=self.ta_info['idroute'],idchannel=self.ta_info['tochannel'],
topartner=self.ta_info['topartner'],frompartner=self.ta_info['frompartner'],
editype=self.ta_info['editype'],messagetype=self.ta_info['messagetype']):
self.ta_info['confirmtype'] = u'ask-x12-997'
self.ta_info['confirmasked'] = True
class jsonnocheck(noenvelope):
pass
class json(noenvelope):
pass
class xmlnocheck(noenvelope):
pass
class xml(noenvelope):
pass
class myxmlenvelop(xml):
''' old xml enveloping; name is kept for upward comp. & as example for xml enveloping'''
def run(self):
''' class for (test) xml envelope. There is no standardised XML-envelope!
writes a new XML-tree; uses places-holders for XML-files to include; real enveloping is done by ElementTree's include'''
include = '{http://www.w3.org/2001/XInclude}include'
self._openoutenvelope(self.ta_info['editype'],self.ta_info['envelope'])
botslib.tryrunscript(self.userscript,self.scriptname,'ta_infocontent',ta_info=self.ta_info)
#~ self.out.put({'BOTSID':'root','xmlns:xi':"http://www.w3.org/2001/XInclude"}) #works, but attribute is not removed bij ETI.include
self.out.put({'BOTSID':'root'}) #start filling out-tree
ta_list = self.filelist2absolutepaths()
for filename in ta_list:
self.out.put({'BOTSID':'root'},{'BOTSID':include,include + '__parse':'xml',include + '__href':filename})
self.out.envelopewrite(self.out.root) #'resolves' the included xml files
class db(Envelope):
''' Only copies the input files to one output file.'''
def run(self):
botslib.tryrunscript(self.userscript,self.scriptname,'ta_infocontent',ta_info=self.ta_info)
self.ta_info['filename'] = self.ta_list[0]
class raw(Envelope):
''' Only copies the input files to one output file.'''
def run(self):
botslib.tryrunscript(self.userscript,self.scriptname,'ta_infocontent',ta_info=self.ta_info)
self.ta_info['filename'] = self.ta_list[0]
| [
[
1,
0,
0.0019,
0.0019,
0,
0.66,
0,
614,
0,
1,
0,
0,
614,
0,
0
],
[
1,
0,
0.0039,
0.0019,
0,
0.66,
0.037,
654,
0,
1,
0,
0,
654,
0,
0
],
[
1,
0,
0.0058,
0.0019,
0,
0... | [
"import shutil",
"import time",
"from django.utils.translation import ugettext as _",
"import botslib",
"import botsglobal",
"import grammar",
"import outmessage",
"from botsconfig import *",
"def mergemessages(startstatus=TRANSLATED,endstatus=MERGED,idroute=''):\n ''' Merges en envelopes several... |
# Django settings for bots project.
import os
import bots
#*******settings for bots error reports**********************************
MANAGERS = ( #bots will send error reports to the MANAGERS
('name_manager', 'manager@domain.org'),
)
#~ EMAIL_HOST = 'smtp.gmail.com' #Default: 'localhost'
#~ EMAIL_PORT = '587' #Default: 25
#~ EMAIL_USE_TLS = True #Default: False
#~ EMAIL_HOST_USER = 'user@gmail.com' #Default: ''. Username to use for the SMTP server defined in EMAIL_HOST. If empty, Django won't attempt authentication.
#~ EMAIL_HOST_PASSWORD = '' #Default: ''. PASSWORD to use for the SMTP server defined in EMAIL_HOST. If empty, Django won't attempt authentication.
#~ SERVER_EMAIL = 'user@gmail.com' #Sender of bots error reports. Default: 'root@localhost'
#~ EMAIL_SUBJECT_PREFIX = '' #This is prepended on email subject.
#*********path settings*************************advised is not to change these values!!
PROJECT_PATH = os.path.abspath(os.path.dirname(bots.__file__))
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = PROJECT_PATH + '/'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
#~ FILE_UPLOAD_TEMP_DIR = os.path.join(PROJECT_PATH, 'botssys/pluginsuploaded') #set in bots.ini
ROOT_URLCONF = 'bots.urls'
LOGIN_URL = '/login/'
LOGIN_REDIRECT_URL = '/'
LOGOUT_URL = '/logout/'
#~ LOGOUT_REDIRECT_URL = #??not such parameter; is set in urls
TEMPLATE_DIRS = (
os.path.join(PROJECT_PATH, 'templates'),
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
#*********database settings*************************
#django-admin syncdb --pythonpath='/home/hje/botsup' --settings='bots.config.settings'
#SQLITE:
DATABASE_ENGINE = 'sqlite3' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = os.path.join(PROJECT_PATH, 'botssys/sqlitedb/botsdb') #path to database; if relative path: interpreted relative to bots root directory
DATABASE_USER = ''
DATABASE_PASSWORD = ''
DATABASE_HOST = ''
DATABASE_PORT = ''
DATABASE_OPTIONS = {}
#~ #MySQL:
#~ DATABASE_ENGINE = 'mysql'
#~ DATABASE_NAME = 'botsdb'
#~ DATABASE_USER = 'bots'
#~ DATABASE_PASSWORD = 'botsbots'
#~ DATABASE_HOST = '192.168.0.7'
#~ DATABASE_PORT = '3306'
#~ DATABASE_OPTIONS = {'use_unicode':True,'charset':'utf8',"init_command": 'SET storage_engine=INNODB'}
#PostgreSQL:
#~ DATABASE_ENGINE = 'postgresql_psycopg2'
#~ DATABASE_NAME = 'botsdb'
#~ DATABASE_USER = 'bots'
#~ DATABASE_PASSWORD = 'botsbots'
#~ DATABASE_HOST = '192.168.0.7'
#~ DATABASE_PORT = '5432'
#~ DATABASE_OPTIONS = {}
#*********sessions, cookies, log out time*************************
SESSION_EXPIRE_AT_BROWSER_CLOSE = True #True: always log in when browser is closed
SESSION_COOKIE_AGE = 3600 #seconds a user needs to login when no activity
SESSION_SAVE_EVERY_REQUEST = True #if True: SESSION_COOKIE_AGE is interpreted as: since last activity
#*********localization*************************
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/Amsterdam'
DATE_FORMAT = "Y-m-d"
DATETIME_FORMAT = "Y-m-d G:i"
TIME_FORMAT = "G:i"
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
#~ LANGUAGE_CODE = 'en-us'
LANGUAGE_CODE = 'en'
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
#*************************************************************************
#*********other django setting. please consult django docs.***************
#set in bots.ini
#~ DEBUG = True
#~ TEMPLATE_DEBUG = DEBUG
SITE_ID = 1
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'm@-u37qiujmeqfbu$daaaaz)sp^7an4u@h=wfx9dd$$$zl2i*x9#awojdc'
ADMINS = (
('bots', 'your_email@domain.com'),
)
#save uploaded file (=plugin) always to file. no path for temp storage is used, so system default is used.
FILE_UPLOAD_HANDLERS = (
"django.core.files.uploadhandler.TemporaryFileUploadHandler",
)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
#'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'bots.persistfilters.FilterPersistMiddleware',
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'bots',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.core.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
)
| [
[
1,
0,
0.0146,
0.0073,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0219,
0.0073,
0,
0.66,
0.0286,
261,
0,
1,
0,
0,
261,
0,
0
],
[
14,
0,
0.0511,
0.0219,
0,
... | [
"import os",
"import bots",
"MANAGERS = ( #bots will send error reports to the MANAGERS\n ('name_manager', 'manager@domain.org'),\n )",
"PROJECT_PATH = os.path.abspath(os.path.dirname(bots.__file__))",
"MEDIA_ROOT = PROJECT_PATH + '/'",
"MEDIA_URL = ''",
"ADMIN_MEDIA_PREFIX = '/media/'",
"ROO... |
'''
code found at code.djangoproject.com/ticket/3777
'''
from django import http
class FilterPersistMiddleware(object):
def _get_default(self, key):
""" Gets any set default filters for the admin. Returns None if no
default is set. """
default = None
#~ default = settings.ADMIN_DEFAULT_FILTERS.get(key, None)
# Filters are allowed to be functions. If this key is one, call it.
if hasattr(default, '__call__'):
default = default()
return default
def process_request(self, request):
if '/admin/' not in request.path or request.method == 'POST':
return None
if request.META.has_key('HTTP_REFERER'):
referrer = request.META['HTTP_REFERER'].split('?')[0]
referrer = referrer[referrer.find('/admin'):len(referrer)]
else:
referrer = u''
popup = 'pop=1' in request.META['QUERY_STRING']
path = request.path
query_string = request.META['QUERY_STRING']
session = request.session
if session.get('redirected', False):#so that we dont loop once redirected
del session['redirected']
return None
key = 'key'+path.replace('/','_')
if popup:
key = 'popup'+key
if path == referrer:
""" We are in the same page as before. We assume that filters were
changed and update them. """
if query_string == '': #Filter is empty, delete it
if session.has_key(key):
del session[key]
return None
else:
request.session[key] = query_string
else:
""" We are are coming from another page. Set querystring to
saved or default value. """
query_string=session.get(key, self._get_default(key))
if query_string is not None:
redirect_to = path+'?'+query_string
request.session['redirected'] = True
return http.HttpResponseRedirect(redirect_to)
else:
return None
'''
Sample default filters:
from datetime import date
def _today():
return 'starttime__gte=' + date.today().isoformat()
# Default filters. Format: 'key_$url', where $url has slashes replaced
# with underscores
# value can either be a function or a string
ADMIN_DEFAULT_FILTERS= {
# display only events starting today
'key_admin_event_calendar_event_': _today,
# display active members
'key_admin_users_member_': 'is_active__exact=1',
# only show new suggestions
'key_admin_suggestions_suggestion_': 'status__exact=new',
}
''' | [
[
8,
0,
0.025,
0.0375,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.05,
0.0125,
0,
0.66,
0.3333,
294,
0,
1,
0,
0,
294,
0,
0
],
[
3,
0,
0.4188,
0.675,
0,
0.66,
... | [
"'''\ncode found at code.djangoproject.com/ticket/3777\n'''",
"from django import http",
"class FilterPersistMiddleware(object):\n\n def _get_default(self, key):\n \"\"\" Gets any set default filters for the admin. Returns None if no \n default is set. \"\"\"\n default = None\n ... |
try:
from pysqlite2 import dbapi2 as sqlite #prefer external modules for pylite
except ImportError:
import sqlite3 as sqlite #works OK for python26
#~ #bots engine uses:
#~ ''' SELECT *
#~ FROM ta
#~ WHERE idta=%(idta)s ''',
#~ {'idta':12345})
#~ #SQLite wants:
#~ ''' SELECT *
#~ FROM ta
#~ WHERE idta=:idta ''',
#~ {'idta': 12345}
import re
reformatparamstyle = re.compile(u'%\((?P<name>[^)]+)\)s')
def adapter4bool(boolfrompython):
#SQLite expects a string
if boolfrompython:
return '1'
else:
return '0'
def converter4bool(strfromdb):
#SQLite returns a string
if strfromdb == '1':
return True
else:
return False
sqlite.register_adapter(bool,adapter4bool)
sqlite.register_converter('BOOLEAN',converter4bool)
def connect(database):
con = sqlite.connect(database, factory=BotsConnection,detect_types=sqlite.PARSE_DECLTYPES, timeout=99.0, isolation_level='IMMEDIATE')
con.row_factory = sqlite.Row
con.execute('''PRAGMA synchronous=OFF''')
return con
class BotsConnection(sqlite.Connection):
def cursor(self):
return sqlite.Connection.cursor(self, factory=BotsCursor)
class BotsCursor(sqlite.Cursor):
def execute(self,string,parameters=None):
if parameters is None:
sqlite.Cursor.execute(self,string)
else:
sqlite.Cursor.execute(self,reformatparamstyle.sub(u''':\g<name>''',string),parameters)
| [
[
7,
0,
0.0472,
0.0755,
0,
0.66,
0,
0,
0,
1,
0,
0,
0,
0,
0
],
[
1,
1,
0.0377,
0.0189,
1,
0.11,
0,
987,
0,
1,
0,
0,
987,
0,
0
],
[
1,
1,
0.0755,
0.0189,
1,
0.11,
... | [
"try:\n from pysqlite2 import dbapi2 as sqlite #prefer external modules for pylite\nexcept ImportError:\n import sqlite3 as sqlite #works OK for python26",
" from pysqlite2 import dbapi2 as sqlite #prefer external modules for pylite",
" import sqlite3 as sqlite #works OK for python26",
"im... |
''' Reading/lexing/parsing/splitting an edifile.'''
import StringIO
import time
import sys
try:
import cPickle as pickle
except:
import pickle
try:
import cElementTree as ET
except ImportError:
try:
import elementtree.ElementTree as ET
except ImportError:
try:
from xml.etree import cElementTree as ET
except ImportError:
from xml.etree import ElementTree as ET
try:
import json as simplejson
except ImportError:
import simplejson
from django.utils.translation import ugettext as _
import botslib
import botsglobal
import outmessage
import message
import node
import grammar
from botsconfig import *
def edifromfile(**ta_info):
''' Read,lex, parse edi-file. Is a dispatch function for Inmessage and subclasses.'''
try:
classtocall = globals()[ta_info['editype']] #get inmessage class to call (subclass of Inmessage)
except KeyError:
raise botslib.InMessageError(_(u'Unknown editype for incoming message: $editype'),editype=ta_info['editype'])
ediobject = classtocall(ta_info)
ediobject.initfromfile()
return ediobject
def _edifromparsed(editype,inode,ta_info):
''' Get a edi-message (inmessage-object) from node in tree.
is used in splitting edi-messages.'''
classtocall = globals()[editype]
ediobject = classtocall(ta_info)
ediobject.initfromparsed(inode)
return ediobject
#*****************************************************************************
class Inmessage(message.Message):
''' abstract class for incoming ediobject (file or message).
Can be initialised from a file or a tree.
'''
def __init__(self,ta_info):
super(Inmessage,self).__init__()
self.records = [] #init list of records
self.confirminfo = {}
self.ta_info = ta_info #here ta_info is only filled with parameters from db-ta
def initfromfile(self):
''' initialisation from a edi file '''
self.defmessage = grammar.grammarread(self.ta_info['editype'],self.ta_info['messagetype']) #read grammar, after sniffing. Information from sniffing can be used (eg name editype for edifact, using version info from UNB)
botslib.updateunlessset(self.ta_info,self.defmessage.syntax) #write values from grammar to self.ta_info - unless these values are already set
self.ta_info['charset'] =self.defmessage.syntax['charset'] #always use charset of edi file.
self._readcontent_edifile()
self._sniff() #some hard-coded parsing of edi file; eg ta_info can be overruled by syntax-parameters in edi-file
#start lexing and parsing
self._lex()
del self.rawinput
#~ self.display(self.records) #show lexed records (for protocol debugging)
self.root = node.Node() #make root Node None.
result = self._parse(self.defmessage.structure,self._nextrecord(self.records),self.root)
if result:
raise botslib.InMessageError(_(u'Unknown data beyond end of message; mostly problem with separators or message structure: "$content"'),content=result)
del self.records
#end parsing; self.root is root of a tree (of nodes).
self.checkenvelope()
#~ self.root.display() #show tree of nodes (for protocol debugging)
#~ self.root.displayqueries() #show queries in tree of nodes (for protocol debugging)
def initfromparsed(self,node):
''' initialisation from a tree (node is passed).
to initialise message in an envelope
'''
self.root = node
def handleconfirm(self,ta_fromfile,error):
''' end of edi file handling.
eg writing of confirmations etc.
'''
pass
def _formatfield(self,value,grammarfield,record):
''' Format of a field is checked and converted if needed.
Input: value (string), field definition.
Output: the formatted value (string)
Parameters of self.ta_info are used: triad, decimaal
for fixed field: same handling; length is not checked.
'''
if grammarfield[BFORMAT] in ['A','D','T']:
if isinstance(self,var): #check length fields in variable records
valuelength=len(value)
if valuelength > grammarfield[LENGTH]:
raise botslib.InMessageFieldError(_(u'Record "$record" field "$field" too big (max $max): "$content".'),record=record,field=grammarfield[ID],content=value,max=grammarfield[LENGTH])
if valuelength < grammarfield[MINLENGTH]:
raise botslib.InMessageFieldError(_(u'Record "$record" field "$field" too small (min $min): "$content".'),record=record,field=grammarfield[ID],content=value,min=grammarfield[MINLENGTH])
value = value.strip()
if not value:
pass
elif grammarfield[BFORMAT] == 'A':
pass
elif grammarfield[BFORMAT] == 'D':
try:
lenght = len(value)
if lenght==6:
time.strptime(value,'%y%m%d')
elif lenght==8:
time.strptime(value,'%Y%m%d')
else:
raise ValueError(u'To be catched')
except ValueError:
raise botslib.InMessageFieldError(_(u'Record "$record" date field "$field" not a valid date: "$content".'),record=record,field=grammarfield[ID],content=value)
elif grammarfield[BFORMAT] == 'T':
try:
lenght = len(value)
if lenght==4:
time.strptime(value,'%H%M')
elif lenght==6:
time.strptime(value,'%H%M%S')
elif lenght==7 or lenght==8:
time.strptime(value[0:6],'%H%M%S')
if not value[6:].isdigit():
raise ValueError(u'To be catched')
else:
raise ValueError(u'To be catched')
except ValueError:
raise botslib.InMessageFieldError(_(u'Record "$record" time field "$field" not a valid time: "$content".'),record=record,field=grammarfield[ID],content=value)
else: #numerics (R, N, I)
value = value.strip()
if not value:
if self.ta_info['acceptspaceinnumfield']:
value='0'
else:
raise botslib.InMessageFieldError(_(u'Record "$record" field "$field" has numeric format but contains only space.'),record=record,field=grammarfield[ID])
#~ return '' #when num field has spaces as content, spaces are stripped. Field should be numeric.
if value[-1] == u'-': #if minus-sign at the end, put it in front.
value = value[-1] + value[:-1]
value = value.replace(self.ta_info['triad'],u'') #strip triad-separators
value = value.replace(self.ta_info['decimaal'],u'.',1) #replace decimal sign by canonical decimal sign
if 'E' in value or 'e' in value:
raise botslib.InMessageFieldError(_(u'Record "$record" field "$field" format "$format" contains exponent: "$content".'),record=record,field=grammarfield[ID],content=value,format=grammarfield[BFORMAT])
if isinstance(self,var): #check length num fields in variable records
if self.ta_info['lengthnumericbare']:
length = botslib.countunripchars(value,'-+.')
else:
length = len(value)
if length > grammarfield[LENGTH]:
raise botslib.InMessageFieldError(_(u'Record "$record" field "$field" too big (max $max): "$content".'),record=record,field=grammarfield[ID],content=value,max=grammarfield[LENGTH])
if length < grammarfield[MINLENGTH]:
raise botslib.InMessageFieldError(_(u'Record "$record" field "$field" too small (min $min): "$content".'),record=record,field=grammarfield[ID],content=value,min=grammarfield[MINLENGTH])
if grammarfield[BFORMAT] == 'I':
if '.' in value:
raise botslib.InMessageFieldError(_(u'Record "$record" field "$field" has format "I" but contains decimal sign: "$content".'),record=record,field=grammarfield[ID],content=value)
try: #convert to decimal in order to check validity
valuedecimal = float(value)
valuedecimal = valuedecimal / 10**grammarfield[DECIMALS]
value = '%.*F'%(grammarfield[DECIMALS],valuedecimal)
except:
raise botslib.InMessageFieldError(_(u'Record "$record" numeric field "$field" has non-numerical content: "$content".'),record=record,field=grammarfield[ID],content=value)
elif grammarfield[BFORMAT] == 'N':
lendecimal = len(value[value.find('.'):])-1
if lendecimal != grammarfield[DECIMALS]:
raise botslib.InMessageFieldError(_(u'Record "$record" numeric field "$field" has invalid nr of decimals: "$content".'),record=record,field=grammarfield[ID],content=value)
try: #convert to decimal in order to check validity
valuedecimal = float(value)
value = '%.*F'%(lendecimal,valuedecimal)
except:
raise botslib.InMessageFieldError(_(u'Record "$record" numeric field "$field" has non-numerical content: "$content".'),record=record,field=grammarfield[ID],content=value)
elif grammarfield[BFORMAT] == 'R':
lendecimal = len(value[value.find('.'):])-1
try: #convert to decimal in order to check validity
valuedecimal = float(value)
value = '%.*F'%(lendecimal,valuedecimal)
except:
raise botslib.InMessageFieldError(_(u'Record "$record" numeric field "$field" has non-numerical content: "$content".'),record=record,field=grammarfield[ID],content=value)
return value
def _parse(self,tab,_nextrecord,inode,rec2parse=None,argmessagetype=None,argnewnode=None):
''' parse the lexed records. validate message against grammar.
add grammar-info to records in self.records: field-tag,mpath.
Tab: current grammar/segmentgroup of the grammar-structure.
Read the records one by one.
Lookup record in tab.
if found:
if headersegment (tabrecord has own tab):
go recursive.
if not found:
if trailer:
jump back recursive, returning the unparsed record.
'''
for tabrec in tab: #clear counts for tab-records (start fresh).
tabrec[COUNT] = 0
tabindex = 0
tabmax = len(tab)
if rec2parse is None:
parsenext = True
subparse=False
else: #only for subparsing
parsenext = False
subparse=True
while 1:
if parsenext:
try:
rec2parse = _nextrecord.next()
except StopIteration: #catch when no more rec2parse.
rec2parse = None
parsenext = False
if rec2parse is None or tab[tabindex][ID] != rec2parse[ID][VALUE]:
#for StopIteration(loop rest of grammar) or when rec2parse
if tab[tabindex][COUNT] < tab[tabindex][MIN]:
try:
raise botslib.InMessageError(_(u'line:$line pos:$pos; record:"$record" not in grammar; looked in grammar until mandatory record: "$looked".'),record=rec2parse[ID][VALUE],line=rec2parse[ID][LIN],pos=rec2parse[ID][POS],looked=tab[tabindex][MPATH])
except TypeError:
raise botslib.InMessageError(_(u'missing mandatory record at message-level: "$record"'),record=tab[tabindex][MPATH])
#TODO: line/pos of original file in error...when this is possible, XML?
tabindex += 1
if tabindex >= tabmax: #rec2parse is not in this level. Go level up
return rec2parse #return either None (for StopIteration) or the last record2parse (not found in this level)
#continue while-loop (parsenext is false)
else: #if found in grammar
tab[tabindex][COUNT] += 1
if tab[tabindex][COUNT] > tab[tabindex][MAX]:
raise botslib.InMessageError(_(u'line:$line pos:$pos; too many repeats record "$record".'),line=rec2parse[ID][LIN],pos=rec2parse[ID][POS],record=tab[tabindex][ID])
if argmessagetype: #that is, header segment of subtranslation
newnode = argnewnode #use old node that is already parsed
newnode.queries = {'messagetype':argmessagetype} #copy messagetype into 1st segment of subtranslation (eg UNH, ST)
argmessagetype=None
else:
newnode = node.Node(record=self._parsefields(rec2parse,tab[tabindex][FIELDS]),BOTSIDnr=tab[tabindex][BOTSIDnr]) #make new node
if botsglobal.ini.getboolean('settings','readrecorddebug',False):
botsglobal.logger.debug(u'read record "%s" (line %s pos %s):',tab[tabindex][ID],rec2parse[ID][LIN],rec2parse[ID][POS])
for key,value in newnode.record.items():
botsglobal.logger.debug(u' "%s" : "%s"',key,value)
if SUBTRANSLATION in tab[tabindex]: # subparse starts here: tree is build for this messagetype; the messagetype is read from the edifile
messagetype = self._getmessagetype(newnode.enhancedget(tab[tabindex][SUBTRANSLATION],replace=True),inode)
if not messagetype:
raise botslib.InMessageError(_(u'could not find SUBTRANSLATION "$sub" in (sub)message.'),sub=tab[tabindex][SUBTRANSLATION])
defmessage = grammar.grammarread(self.__class__.__name__,messagetype)
rec2parse = self._parse(defmessage.structure,_nextrecord,inode,rec2parse=rec2parse,argmessagetype=messagetype,argnewnode=newnode)
#~ end subparse for messagetype
else:
inode.append(newnode) #append new node to current node
if LEVEL in tab[tabindex]: #if header, go to subgroup
rec2parse = self._parse(tab[tabindex][LEVEL],_nextrecord,newnode)
if subparse: #back in top level of subparse: return (to motherparse)
return rec2parse
else:
parsenext = True
self.get_queries_from_edi(inode.children[-1],tab[tabindex])
def _getmessagetype(self,messagetypefromsubtranslation,inode):
return messagetypefromsubtranslation
def get_queries_from_edi(self,node,trecord):
''' extract information from edifile using QUERIES in grammar.structure; information will be placed in ta_info and in db-ta
'''
if QUERIES in trecord:
#~ print 'Print QUERIES'
tmpdict = {}
#~ print trecord[QUERIES]
for key,value in trecord[QUERIES].items():
found = node.enhancedget(value) #search in last added node
if found:
#~ print ' found',found,value
tmpdict[key] = found #copy key to avoid memory problems
#~ else:
#~ print ' not found',value
node.queries = tmpdict
def _readcontent_edifile(self):
''' read content of edi file to memory.
'''
#TODO test, catch exceptions
botsglobal.logger.debug(u'read edi file "%s".',self.ta_info['filename'])
self.rawinput = botslib.readdata(filename=self.ta_info['filename'],charset=self.ta_info['charset'],errors=self.ta_info['checkcharsetin'])
def _sniff(self):
''' sniffing: hard coded parsing of edi file.
method is specified in subclasses.
'''
pass
def checkenvelope(self):
pass
@staticmethod
def _nextrecord(records):
''' generator for records that are lexed.'''
for record in records:
yield record
def nextmessage(self):
''' Generates each message as a separate Inmessage.
'''
#~ self.root.display()
if self.defmessage.nextmessage is not None: #if nextmessage defined in grammar: split up messages
first = True
for message in self.getloop(*self.defmessage.nextmessage): #get node of each message
if first:
self.root.processqueries({},len(self.defmessage.nextmessage))
first = False
ta_info = self.ta_info.copy()
ta_info.update(message.queries)
#~ ta_info['botsroot']=self.root
yield _edifromparsed(self.__class__.__name__,message,ta_info)
if self.defmessage.nextmessage2 is not None: #edifact needs nextmessage2...OK
first = True
for message in self.getloop(*self.defmessage.nextmessage2):
if first:
self.root.processqueries({},len(self.defmessage.nextmessage2))
first = False
ta_info = self.ta_info.copy()
ta_info.update(message.queries)
#~ ta_info['botsroot']=self.root
yield _edifromparsed(self.__class__.__name__,message,ta_info)
elif self.defmessage.nextmessageblock is not None: #for csv/fixed: nextmessageblock indicates which field determines a message (as long as the field is the same, it is one message)
#there is only one recordtype (this is checked in grammar.py).
first = True
for line in self.root.children:
kriterium = line.get(self.defmessage.nextmessageblock)
if first:
first = False
newroot = node.Node() #make new empty root node.
oldkriterium = kriterium
elif kriterium != oldkriterium:
ta_info = self.ta_info.copy()
ta_info.update(oldline.queries) #update ta_info with information (from previous line) 20100905
#~ ta_info['botsroot']=self.root #give mapping script access to all information in edi file: all records
yield _edifromparsed(self.__class__.__name__,newroot,ta_info)
newroot = node.Node() #make new empty root node.
oldkriterium = kriterium
else:
pass #if kriterium is the same
newroot.append(line)
oldline = line #save line 20100905
else:
if not first:
ta_info = self.ta_info.copy()
ta_info.update(line.queries) #update ta_info with information (from last line) 20100904
#~ ta_info['botsroot']=self.root
yield _edifromparsed(self.__class__.__name__,newroot,ta_info)
else: #no split up indicated in grammar;
if self.root.record or self.ta_info['pass_all']: #if contains root-record or explicitly indicated (csv): pass whole tree
ta_info = self.ta_info.copy()
ta_info.update(self.root.queries)
#~ ta_info['botsroot']=None #??is the same as self.root, so I use None??.
yield _edifromparsed(self.__class__.__name__,self.root,ta_info)
else: #pass nodes under root one by one
for child in self.root.children:
ta_info = self.ta_info.copy()
ta_info.update(child.queries)
#~ ta_info['botsroot']=self.root #give mapping script access to all information in edi file: all roots
yield _edifromparsed(self.__class__.__name__,child,ta_info)
class fixed(Inmessage):
''' class for record of fixed length.'''
def _lex(self):
''' lexes file with fixed records to list of records (self.records).'''
linenr = 0
startrecordID = self.ta_info['startrecordID']
endrecordID = self.ta_info['endrecordID']
self.rawinputfile = StringIO.StringIO(self.rawinput) #self.rawinputfile is an iterator
for line in self.rawinputfile:
linenr += 1
line=line.rstrip('\r\n')
self.records += [ [{VALUE:line[startrecordID:endrecordID].strip(),LIN:linenr,POS:0,FIXEDLINE:line}] ] #append record to recordlist
self.rawinputfile.close()
def _parsefields(self,recordEdiFile,trecord):
''' Parse fields from one fixed message-record (from recordEdiFile[ID][FIXEDLINE] using positions.
fields are placed in dict, where key=field-info from grammar and value is from fixedrecord.'''
recorddict = {} #start with empty dict
fixedrecord = recordEdiFile[ID][FIXEDLINE] #shortcut to fixed record we are parsing
lenfixed = len(fixedrecord)
recordlength = 0
for field in trecord: #calculate total length of record from field lengths
recordlength += field[LENGTH]
if recordlength > lenfixed and self.ta_info['checkfixedrecordtooshort']:
raise botslib.InMessageError(_(u'line $line record "$record" too short; is $pos pos, defined is $defpos pos: "$content".'),line=recordEdiFile[ID][LIN],record=recordEdiFile[ID][VALUE],pos=lenfixed,defpos=recordlength,content=fixedrecord)
if recordlength < lenfixed and self.ta_info['checkfixedrecordtoolong']:
raise botslib.InMessageError(_(u'line $line record "$record" too long; is $pos pos, defined is $defpos pos: "$content".'),line=recordEdiFile[ID][LIN],record=recordEdiFile[ID][VALUE],pos=lenfixed,defpos=recordlength,content=fixedrecord)
pos = 0
for field in trecord: #for fields in this record
value = fixedrecord[pos:pos+field[LENGTH]]
try:
value = self._formatfield(value,field,fixedrecord)
except botslib.InMessageFieldError:
txt=botslib.txtexc()
raise botslib.InMessageFieldError(_(u'line:$line pos:$pos. Error:\n$txt'),line=recordEdiFile[ID][LIN],pos=pos,txt=txt)
if value:
recorddict[field[ID][:]] = value #copy id string to avoid memory problem ; value is already a copy
else:
if field[MANDATORY]==u'M':
raise botslib.InMessageFieldError(_(u'line:$line pos:$pos; mandatory field "$field" not in record "$record".'),line=recordEdiFile[ID][LIN],pos=pos,field=field[ID],record=recordEdiFile[ID][VALUE])
pos += field[LENGTH]
#~ if pos > lenfixed:
#~ break
return recorddict
class idoc(fixed):
''' class for idoc ediobjects.
for incoming the same as fixed.
SAP does strip all empty fields for record; is catered for in grammar.defaultsyntax
'''
def _sniff(self):
''' examine a read file for syntax parameters and correctness of protocol
eg parse UNA, find UNB, get charset and version
'''
#goto char that is not whitespace
for count,c in enumerate(self.rawinput):
if not c.isspace():
self.rawinput = self.rawinput[count:] #here the interchange should start
break
else:
raise botslib.InMessageError(_(u'edi file only contains whitespace.'))
if self.rawinput[:6] != 'EDI_DC':
raise botslib.InMessageError(_(u'expect "EDI_DC", found "$content". Probably no SAP idoc.'),content=self.rawinput[:6])
class var(Inmessage):
''' abstract class for ediobjects with records of variabele length.'''
def _lex(self):
''' lexes file with variable records to list of records, fields and subfields (self.records).'''
quote_char = self.ta_info['quote_char']
skip_char = self.ta_info['skip_char'] #skip char (ignore);
escape = self.ta_info['escape'] #char after escape-char is not interpreted as seperator
field_sep = self.ta_info['field_sep'] + self.ta_info['record_tag_sep'] #for tradacoms; field_sep and record_tag_sep have same function.
sfield_sep = self.ta_info['sfield_sep']
record_sep = self.ta_info['record_sep']
mode_escape = 0 #0=not escaping, 1=escaping
mode_quote = 0 #0=not in quote, 1=in quote
mode_2quote = 0 #0=not escaping quote, 1=escaping quote.
mode_inrecord = 0 #indicates if lexing a record. If mode_inrecord==0: skip whitespace
sfield = False # True: is subveld, False is geen subveld
value = u'' #the value of the current token
record = []
valueline = 1 #starting line of token
valuepos = 1 #starting position of token
countline = 1
countpos = 0
#bepaal tekenset, separators etc adhv UNA/UNOB
for c in self.rawinput: #get next char
if c == u'\n': #line within file
countline += 1
countpos = 0 #new line, pos back to 0
#no continue, because \n can be record separator. In edifact: catched with skip_char
else:
countpos += 1 #position within line
if mode_quote: #within a quote: quote-char is also escape-char
if mode_2quote and c == quote_char: #thus we were escaping quote_char
mode_2quote = 0
value += c #append quote_char
continue
elif mode_escape: #tricky: escaping a quote char
mode_escape = 0
value += c
continue
elif mode_2quote: #thus is was a end-quote
mode_2quote = 0
mode_quote= 0
#go on parsing
elif c==quote_char: #either end-quote or escaping quote_char,we do not know yet
mode_2quote = 1
continue
elif c == escape:
mode_escape = 1
continue
else:
value += c
continue
if mode_inrecord:
pass #do nothing, is already in mode_inrecord
else:
if c.isspace():
continue #not in mode_inrecord, and a space: ignore space between records.
else:
mode_inrecord = 1
if c in skip_char: #after mode_quote, but before mode_escape!!
continue
if mode_escape: #always append in escaped_mode
mode_escape = 0
value += c
continue
if not value: #if no char in token: this is a new token, get line and pos for (new) token
valueline = countline
valuepos = countpos
if c == quote_char:
mode_quote = 1
continue
if c == escape:
mode_escape = 1
continue
if c in field_sep: #for tradacoms: record_tag_sep is appended to field_sep; in lexing they have the same function
record += [{VALUE:value,SFIELD:sfield,LIN:valueline,POS:valuepos}] #append element in record
value = u''
sfield = False
continue
if c == sfield_sep:
record += [{VALUE:value,SFIELD:sfield,LIN:valueline,POS:valuepos}] #append element in record
value = u''
sfield = True
continue
if c in record_sep:
record += [{VALUE:value,SFIELD:sfield,LIN:valueline,POS:valuepos}] #append element in record
self.records += [record] #write record to recordlist
record=[]
value = u''
sfield = False
mode_inrecord=0
continue
value += c #just a char: append char to value
#end of for-loop. all characters have been processed.
#in a perfect world, value should always be empty now, but:
#it appears a csv record is not always closed properly, so force the closing of the last record of csv file:
if mode_inrecord and isinstance(self,csv) and self.ta_info['allow_lastrecordnotclosedproperly']:
record += [{VALUE:value,SFIELD:sfield,LIN:valueline,POS:valuepos}] #append element in record
self.records += [record] #write record to recordlist
elif value.strip('\x00\x1a'):
raise botslib.InMessageError(_(u'translation problem with lexing; probably a seperator-problem, or extra characters after interchange'))
def _striprecord(self,recordEdiFile):
#~ return [field[VALUE] for field in recordEdiFile]
terug = ''
for field in recordEdiFile:
terug += field[VALUE] + ' '
if len(terug) > 35:
terug = terug[:35] + ' (etc)'
return terug
def _parsefields(self,recordEdiFile,trecord):
''' Check all fields in message-record with field-info in grammar
Build a dictionary of fields (field-IDs are unique within record), and return this.
'''
recorddict = {}
#****************** first: identify fields: assign field id to lexed fields
tindex = -1 #elementcounter; composites count as one
tsubindex=0 #sub-element couner (witin composite))
for rfield in recordEdiFile: #handle both fields and sub-fields
if rfield[SFIELD]:
tsubindex += 1
try:
field = trecord[tindex][SUBFIELDS][tsubindex]
except TypeError: #field has no SUBFIELDS
raise botslib.InMessageFieldError(_(u'line:$line pos:$pos; expect field, is a subfield; record "$record".'),line=rfield[LIN],pos=rfield[POS],record=self._striprecord(recordEdiFile))
except IndexError: #tsubindex is not in the subfields
raise botslib.InMessageFieldError(_(u'line:$line pos:$pos; too many subfields; record "$record".'),line=rfield[LIN],pos=rfield[POS],record=self._striprecord(recordEdiFile))
else:
tindex += 1
try:
field = trecord[tindex]
except IndexError:
raise botslib.InMessageFieldError(_(u'line:$line pos:$pos; too many fields; record "$record".'),line=rfield[LIN],pos=rfield[POS],record=self._striprecord(recordEdiFile))
if not field[ISFIELD]: #if field is subfield
tsubindex = 0
field = trecord[tindex][SUBFIELDS][tsubindex]
#*********if field has content: check format and add to recorddictionary
if rfield[VALUE]:
try:
rfield[VALUE] = self._formatfield(rfield[VALUE],field,recordEdiFile[0][VALUE])
except botslib.InMessageFieldError:
txt=botslib.txtexc()
raise botslib.InMessageFieldError(_(u'line:$line pos:$pos. Error:\n$txt'),line=rfield[LIN],pos=rfield[POS],txt=txt)
recorddict[field[ID][:]]=rfield[VALUE][:] #copy string to avoid memory problems
#****************** then: check M/C
for tfield in trecord:
if tfield[ISFIELD]: #tfield is normal field (not a composite)
if tfield[MANDATORY]==u'M' and tfield[ID] not in recorddict:
raise botslib.InMessageError(_(u'line:$line mandatory field "$field" not in record "$record".'),line=recordEdiFile[0][LIN],field=tfield[ID],record=self._striprecord(recordEdiFile))
else:
compositefilled = False
for sfield in tfield[SUBFIELDS]: #t[2]: subfields in grammar
if sfield[ID] in recorddict:
compositefilled = True
break
if compositefilled:
for sfield in tfield[SUBFIELDS]: #t[2]: subfields in grammar
if sfield[MANDATORY]==u'M' and sfield[ID] not in recorddict:
raise botslib.InMessageError(_(u'line:$line mandatory subfield "$field" not in composite, record "$record".'),line=recordEdiFile[0][LIN],field=sfield[ID],record=self._striprecord(recordEdiFile))
if not compositefilled and tfield[MANDATORY]==u'M':
raise botslib.InMessageError(_(u'line:$line mandatory composite "$field" not in record "$record".'),line=recordEdiFile[0][LIN],field=tfield[ID],record=self._striprecord(recordEdiFile))
return recorddict
class csv(var):
''' class for ediobjects with Comma Separated Values'''
def _lex(self):
super(csv,self)._lex()
if self.ta_info['skip_firstline']: #if first line for CSV should be skipped (contains field names)
del self.records[0]
if self.ta_info['noBOTSID']: #if read records contain no BOTSID: add it
botsid = self.defmessage.structure[0][ID] #add the recordname as BOTSID
for record in self.records:
record[0:0]=[{VALUE: botsid, POS: 0, LIN: 0, SFIELD: False}]
class edifact(var):
''' class for edifact inmessage objects.'''
def _readcontent_edifile(self):
''' read content of edi file in memory.
For edifact: not unicode. after sniffing unicode is used to check charset (UNOA etc)
In sniff: determine charset; then decode according to charset
'''
botsglobal.logger.debug(u'read edi file "%s".',self.ta_info['filename'])
self.rawinput = botslib.readdata(filename=self.ta_info['filename'],errors=self.ta_info['checkcharsetin'])
def _sniff(self):
''' examine a read file for syntax parameters and correctness of protocol
eg parse UNA, find UNB, get charset and version
'''
#goto char that is alphanumeric
for count,c in enumerate(self.rawinput):
if c.isalnum():
break
else:
raise botslib.InMessageError(_(u'edi file only contains whitespace.'))
if self.rawinput[count:count+3] == 'UNA':
unacharset=True
self.ta_info['sfield_sep'] = self.rawinput[count+3]
self.ta_info['field_sep'] = self.rawinput[count+4]
self.ta_info['decimaal'] = self.rawinput[count+5]
self.ta_info['escape'] = self.rawinput[count+6]
self.ta_info['reserve'] = '' #self.rawinput[count+7] #for now: no support of repeating dataelements
self.ta_info['record_sep'] = self.rawinput[count+8]
#goto char that is alphanumeric
for count2,c in enumerate(self.rawinput[count+9:]):
if c.isalnum():
break
self.rawinput = self.rawinput[count+count2+9:] #here the interchange should start; UNA is no longer needed
else:
unacharset=False
self.rawinput = self.rawinput[count:] #here the interchange should start
if self.rawinput[:3] != 'UNB':
raise botslib.InMessageError(_(u'No "UNB" at the start of file. Maybe not edifact.'))
self.ta_info['charset'] = self.rawinput[4:8]
self.ta_info['version'] = self.rawinput[9:10]
if not unacharset:
if self.rawinput[3:4]=='+' and self.rawinput[8:9]==':': #assume standard separators.
self.ta_info['sfield_sep'] = ':'
self.ta_info['field_sep'] = '+'
self.ta_info['decimaal'] = '.'
self.ta_info['escape'] = '?'
self.ta_info['reserve'] = '' #for now: no support of repeating dataelements
self.ta_info['record_sep'] = "'"
elif self.rawinput[3:4]=='\x1D' and self.rawinput[8:9]=='\x1F': #check if UNOB separators are used
self.ta_info['sfield_sep'] = '\x1F'
self.ta_info['field_sep'] = '\x1D'
self.ta_info['decimaal'] = '.'
self.ta_info['escape'] = ''
self.ta_info['reserve'] = '' #for now: no support of repeating dataelements
self.ta_info['record_sep'] = '\x1C'
else:
raise botslib.InMessageError(_(u'Incoming edi file uses non-standard separators - should use UNA.'))
try:
self.rawinput = self.rawinput.decode(self.ta_info['charset'],self.ta_info['checkcharsetin'])
except LookupError:
raise botslib.InMessageError(_(u'Incoming edi file has unknown charset "$charset".'),charset=self.ta_info['charset'])
except UnicodeDecodeError, flup:
raise botslib.InMessageError(_(u'not allowed chars in incoming edi file (for translation) at/after filepos: $content'),content=flup[2])
def checkenvelope(self):
self.confirmationlist = [] #information about the edifact file for confirmation/CONTRL; for edifact this is done per interchange (UNB-UNZ)
for nodeunb in self.getloop({'BOTSID':'UNB'}):
botsglobal.logmap.debug(u'Start parsing edifact envelopes')
sender = nodeunb.get({'BOTSID':'UNB','S002.0004':None})
receiver = nodeunb.get({'BOTSID':'UNB','S003.0010':None})
UNBreference = nodeunb.get({'BOTSID':'UNB','0020':None})
UNZreference = nodeunb.get({'BOTSID':'UNB'},{'BOTSID':'UNZ','0020':None})
if UNBreference != UNZreference:
raise botslib.InMessageError(_(u'UNB-reference is "$UNBreference"; should be equal to UNZ-reference "$UNZreference".'),UNBreference=UNBreference,UNZreference=UNZreference)
UNZcount = nodeunb.get({'BOTSID':'UNB'},{'BOTSID':'UNZ','0036':None})
messagecount = len(nodeunb.children) - 1
if int(UNZcount) != messagecount:
raise botslib.InMessageError(_(u'Count in messages in UNZ is $UNZcount; should be equal to number of messages $messagecount.'),UNZcount=UNZcount,messagecount=messagecount)
self.confirmationlist.append({'UNBreference':UNBreference,'UNZcount':UNZcount,'sender':sender,'receiver':receiver,'UNHlist':[]}) #gather information about functional group (GS-GE)
for nodeunh in nodeunb.getloop({'BOTSID':'UNB'},{'BOTSID':'UNH'}):
UNHtype = nodeunh.get({'BOTSID':'UNH','S009.0065':None})
UNHversion = nodeunh.get({'BOTSID':'UNH','S009.0052':None})
UNHrelease = nodeunh.get({'BOTSID':'UNH','S009.0054':None})
UNHcontrollingagency = nodeunh.get({'BOTSID':'UNH','S009.0051':None})
UNHassociationassigned = nodeunh.get({'BOTSID':'UNH','S009.0057':None})
UNHreference = nodeunh.get({'BOTSID':'UNH','0062':None})
UNTreference = nodeunh.get({'BOTSID':'UNH'},{'BOTSID':'UNT','0062':None})
if UNHreference != UNTreference:
raise botslib.InMessageError(_(u'UNH-reference is "$UNHreference"; should be equal to UNT-reference "$UNTreference".'),UNHreference=UNHreference,UNTreference=UNTreference)
UNTcount = nodeunh.get({'BOTSID':'UNH'},{'BOTSID':'UNT','0074':None})
segmentcount = nodeunh.getcount()
if int(UNTcount) != segmentcount:
raise botslib.InMessageError(_(u'Segmentcount in UNT is $UNTcount; should be equal to number of segments $segmentcount.'),UNTcount=UNTcount,segmentcount=segmentcount)
self.confirmationlist[-1]['UNHlist'].append({'UNHreference':UNHreference,'UNHtype':UNHtype,'UNHversion':UNHversion,'UNHrelease':UNHrelease,'UNHcontrollingagency':UNHcontrollingagency,'UNHassociationassigned':UNHassociationassigned}) #add info per message to interchange
for nodeung in nodeunb.getloop({'BOTSID':'UNB'},{'BOTSID':'UNG'}):
UNGreference = nodeung.get({'BOTSID':'UNG','0048':None})
UNEreference = nodeung.get({'BOTSID':'UNG'},{'BOTSID':'UNE','0048':None})
if UNGreference != UNEreference:
raise botslib.InMessageError(_(u'UNG-reference is "$UNGreference"; should be equal to UNE-reference "$UNEreference".'),UNGreference=UNGreference,UNEreference=UNEreference)
UNEcount = nodeung.get({'BOTSID':'UNG'},{'BOTSID':'UNE','0060':None})
groupcount = len(nodeung.children) - 1
if int(UNEcount) != groupcount:
raise botslib.InMessageError(_(u'Groupcount in UNE is $UNEcount; should be equal to number of groups $groupcount.'),UNEcount=UNEcount,groupcount=groupcount)
for nodeunh in nodeung.getloop({'BOTSID':'UNG'},{'BOTSID':'UNH'}):
UNHreference = nodeunh.get({'BOTSID':'UNH','0062':None})
UNTreference = nodeunh.get({'BOTSID':'UNH'},{'BOTSID':'UNT','0062':None})
if UNHreference != UNTreference:
raise botslib.InMessageError(_(u'UNH-reference is "$UNHreference"; should be equal to UNT-reference "$UNTreference".'),UNHreference=UNHreference,UNTreference=UNTreference)
UNTcount = nodeunh.get({'BOTSID':'UNH'},{'BOTSID':'UNT','0074':None})
segmentcount = nodeunh.getcount()
if int(UNTcount) != segmentcount:
raise botslib.InMessageError(_(u'Segmentcount in UNT is $UNTcount; should be equal to number of segments $segmentcount.'),UNTcount=UNTcount,segmentcount=segmentcount)
botsglobal.logmap.debug(u'Parsing edifact envelopes is OK')
def handleconfirm(self,ta_fromfile,error):
''' end of edi file handling.
eg writing of confirmations etc.
send CONTRL messages
parameter 'error' is not used
'''
#filter the confirmationlist
tmpconfirmationlist = []
for confirmation in self.confirmationlist:
tmpmessagelist = []
for message in confirmation['UNHlist']:
if message['UNHtype'] == 'CONTRL': #do not generate CONTRL for a CONTRL message
continue
if botslib.checkconfirmrules('send-edifact-CONTRL',idroute=self.ta_info['idroute'],idchannel=self.ta_info['fromchannel'],
topartner=confirmation['sender'],frompartner=confirmation['receiver'],
editype='edifact',messagetype=message['UNHtype']):
tmpmessagelist.append(message)
confirmation['UNHlist'] = tmpmessagelist
if not tmpmessagelist: #if no messages/transactions in interchange
continue
tmpconfirmationlist.append(confirmation)
self.confirmationlist = tmpconfirmationlist
for confirmation in self.confirmationlist:
reference=str(botslib.unique('messagecounter'))
ta_confirmation = ta_fromfile.copyta(status=TRANSLATED,reference=reference)
filename = str(ta_confirmation.idta)
out = outmessage.outmessage_init(editype='edifact',messagetype='CONTRL22UNEAN002',filename=filename) #make outmessage object
out.ta_info['frompartner']=confirmation['receiver']
out.ta_info['topartner']=confirmation['sender']
out.put({'BOTSID':'UNH','0062':reference,'S009.0065':'CONTRL','S009.0052':'2','S009.0054':'2','S009.0051':'UN','S009.0057':'EAN002'})
out.put({'BOTSID':'UNH'},{'BOTSID':'UCI','0083':'8','S002.0004':confirmation['sender'],'S003.0010':confirmation['sender'],'0020':confirmation['UNBreference']}) #8: interchange received
for message in confirmation['UNHlist']:
lou = out.putloop({'BOTSID':'UNH'},{'BOTSID':'UCM'})
lou.put({'BOTSID':'UCM','0083':'7','S009.0065':message['UNHtype'],'S009.0052':message['UNHversion'],'S009.0054':message['UNHrelease'],'S009.0051':message['UNHcontrollingagency'],'0062':message['UNHreference']})
lou.put({'BOTSID':'UCM','S009.0057':message['UNHassociationassigned']})
out.put({'BOTSID':'UNH'},{'BOTSID':'UNT','0074':out.getcount()+1,'0062':reference}) #last line (counts the segments produced in out-message)
out.writeall() #write tomessage (result of translation)
botsglobal.logger.debug(u'Send edifact confirmation (CONTRL) route "%s" fromchannel "%s" frompartner "%s" topartner "%s".',
self.ta_info['idroute'],self.ta_info['fromchannel'],confirmation['receiver'],confirmation['sender'])
self.confirminfo = dict(confirmtype='send-edifact-CONTRL',confirmed=True,confirmasked = True,confirmidta=ta_confirmation.idta) #this info is used in transform.py to update the ta.....ugly...
ta_confirmation.update(statust=OK,**out.ta_info) #update ta for confirmation
class x12(var):
''' class for edifact inmessage objects.'''
def _getmessagetype(self,messagetypefromsubtranslation,inode):
if messagetypefromsubtranslation is None:
return None
return messagetypefromsubtranslation + inode.record['GS08']
def _sniff(self):
''' examine a file for syntax parameters and correctness of protocol
eg parse ISA, get charset and version
'''
#goto char that is not whitespace
for count,c in enumerate(self.rawinput):
if not c.isspace():
self.rawinput = self.rawinput[count:] #here the interchange should start
break
else:
raise botslib.InMessageError(_(u'edifile only contains whitespace.'))
if self.rawinput[:3] != 'ISA':
raise botslib.InMessageError(_(u'expect "ISA", found "$content". Probably no x12?'),content=self.rawinput[:7])
count = 0
for c in self.rawinput[:120]:
if c in '\r\n' and count!=105:
continue
count +=1
if count==4:
self.ta_info['field_sep'] = c
elif count==105:
self.ta_info['sfield_sep'] = c
elif count==106:
self.ta_info['record_sep'] = c
break
# ISA-version: if <004030: SHOULD use repeating element?
self.ta_info['reserve']=''
self.ta_info['skip_char'] = self.ta_info['skip_char'].replace(self.ta_info['record_sep'],'') #if <CR> is segment terminator: cannot be in the skip_char-string!
#more ISA's in file: find IEA+
def checkenvelope(self):
''' check envelopes, gather information to generate 997 '''
self.confirmationlist = [] #information about the x12 file for confirmation/997; for x12 this is done per functional group
#~ self.root.display()
for nodeisa in self.getloop({'BOTSID':'ISA'}):
botsglobal.logmap.debug(u'Start parsing X12 envelopes')
sender = nodeisa.get({'BOTSID':'ISA','ISA06':None})
receiver = nodeisa.get({'BOTSID':'ISA','ISA08':None})
ISAreference = nodeisa.get({'BOTSID':'ISA','ISA13':None})
IEAreference = nodeisa.get({'BOTSID':'ISA'},{'BOTSID':'IEA','IEA02':None})
if ISAreference != IEAreference:
raise botslib.InMessageError(_(u'ISA-reference is "$ISAreference"; should be equal to IEA-reference "$IEAreference".'),ISAreference=ISAreference,IEAreference=IEAreference)
IEAcount = nodeisa.get({'BOTSID':'ISA'},{'BOTSID':'IEA','IEA01':None})
groupcount = nodeisa.getcountoccurrences({'BOTSID':'ISA'},{'BOTSID':'GS'})
if int(IEAcount) != groupcount:
raise botslib.InMessageError(_(u'Count in IEA-IEA01 is $IEAcount; should be equal to number of groups $groupcount.'),IEAcount=IEAcount,groupcount=groupcount)
for nodegs in nodeisa.getloop({'BOTSID':'ISA'},{'BOTSID':'GS'}):
GSqualifier = nodegs.get({'BOTSID':'GS','GS01':None})
GSreference = nodegs.get({'BOTSID':'GS','GS06':None})
GEreference = nodegs.get({'BOTSID':'GS'},{'BOTSID':'GE','GE02':None})
if GSreference != GEreference:
raise botslib.InMessageError(_(u'GS-reference is "$GSreference"; should be equal to GE-reference "$GEreference".'),GSreference=GSreference,GEreference=GEreference)
GEcount = nodegs.get({'BOTSID':'GS'},{'BOTSID':'GE','GE01':None})
messagecount = len(nodegs.children) - 1
if int(GEcount) != messagecount:
raise botslib.InMessageError(_(u'Count in GE-GE01 is $GEcount; should be equal to number of transactions: $messagecount.'),GEcount=GEcount,messagecount=messagecount)
self.confirmationlist.append({'GSqualifier':GSqualifier,'GSreference':GSreference,'GEcount':GEcount,'sender':sender,'receiver':receiver,'STlist':[]}) #gather information about functional group (GS-GE)
for nodest in nodegs.getloop({'BOTSID':'GS'},{'BOTSID':'ST'}):
STqualifier = nodest.get({'BOTSID':'ST','ST01':None})
STreference = nodest.get({'BOTSID':'ST','ST02':None})
SEreference = nodest.get({'BOTSID':'ST'},{'BOTSID':'SE','SE02':None})
#referencefields are numerical; should I compare values??
if STreference != SEreference:
raise botslib.InMessageError(_(u'ST-reference is "$STreference"; should be equal to SE-reference "$SEreference".'),STreference=STreference,SEreference=SEreference)
SEcount = nodest.get({'BOTSID':'ST'},{'BOTSID':'SE','SE01':None})
segmentcount = nodest.getcount()
if int(SEcount) != segmentcount:
raise botslib.InMessageError(_(u'Count in SE-SE01 is $SEcount; should be equal to number of segments $segmentcount.'),SEcount=SEcount,segmentcount=segmentcount)
self.confirmationlist[-1]['STlist'].append({'STreference':STreference,'STqualifier':STqualifier}) #add info per message to functional group
botsglobal.logmap.debug(u'Parsing X12 envelopes is OK')
def handleconfirm(self,ta_fromfile,error):
''' end of edi file handling.
eg writing of confirmations etc.
send 997 messages
parameter 'error' is not used
'''
#filter the confirmationlist
tmpconfirmationlist = []
for confirmation in self.confirmationlist:
if confirmation['GSqualifier'] == 'FA': #do not generate 997 for 997
continue
tmpmessagelist = []
for message in confirmation['STlist']:
if botslib.checkconfirmrules('send-x12-997',idroute=self.ta_info['idroute'],idchannel=self.ta_info['fromchannel'],
topartner=confirmation['sender'],frompartner=confirmation['receiver'],
editype='x12',messagetype=message['STqualifier']):
tmpmessagelist.append(message)
confirmation['STlist'] = tmpmessagelist
if not tmpmessagelist: #if no messages/transactions in GS-GE
continue
tmpconfirmationlist.append(confirmation)
self.confirmationlist = tmpconfirmationlist
for confirmation in self.confirmationlist:
reference=str(botslib.unique('messagecounter'))
ta_confirmation = ta_fromfile.copyta(status=TRANSLATED,reference=reference)
filename = str(ta_confirmation.idta)
out = outmessage.outmessage_init(editype='x12',messagetype='997004010',filename=filename) #make outmessage object
out.ta_info['frompartner']=confirmation['receiver']
out.ta_info['topartner']=confirmation['sender']
out.put({'BOTSID':'ST','ST01':'997','ST02':reference})
out.put({'BOTSID':'ST'},{'BOTSID':'AK1','AK101':confirmation['GSqualifier'],'AK102':confirmation['GSreference']})
out.put({'BOTSID':'ST'},{'BOTSID':'AK9','AK901':'A','AK902':confirmation['GEcount'],'AK903':confirmation['GEcount'],'AK904':confirmation['GEcount']})
for message in confirmation['STlist']:
lou = out.putloop({'BOTSID':'ST'},{'BOTSID':'AK2'})
lou.put({'BOTSID':'AK2','AK201':message['STqualifier'],'AK202':message['STreference']})
lou.put({'BOTSID':'AK2'},{'BOTSID':'AK5','AK501':'A'})
out.put({'BOTSID':'ST'},{'BOTSID':'SE','SE01':out.getcount()+1,'SE02':reference}) #last line (counts the segments produced in out-message)
out.writeall() #write tomessage (result of translation)
botsglobal.logger.debug(u'Send x12 confirmation (997) route "%s" fromchannel "%s" frompartner "%s" topartner "%s".',
self.ta_info['idroute'],self.ta_info['fromchannel'],confirmation['receiver'],confirmation['sender'])
self.confirminfo = dict(confirmtype='send-x12-997',confirmed=True,confirmasked = True,confirmidta=ta_confirmation.idta) #this info is used in transform.py to update the ta.....ugly...
ta_confirmation.update(statust=OK,**out.ta_info) #update ta for confirmation
class tradacoms(var):
def checkenvelope(self):
for nodeSTX in self.getloop({'BOTSID':'STX'}):
botsglobal.logmap.debug(u'Start parsing tradacoms envelopes')
ENDcount = nodeSTX.get({'BOTSID':'STX'},{'BOTSID':'END','NMST':None})
messagecount = len(nodeSTX.children) - 1
if int(ENDcount) != messagecount:
raise botslib.InMessageError(_(u'Count in messages in END is $ENDcount; should be equal to number of messages $messagecount'),ENDcount=ENDcount,messagecount=messagecount)
firstmessage = True
for nodeMHD in nodeSTX.getloop({'BOTSID':'STX'},{'BOTSID':'MHD'}):
if firstmessage: #
nodeSTX.queries = {'messagetype':nodeMHD.queries['messagetype']}
firstmessage = False
MTRcount = nodeMHD.get({'BOTSID':'MHD'},{'BOTSID':'MTR','NOSG':None})
segmentcount = nodeMHD.getcount()
if int(MTRcount) != segmentcount:
raise botslib.InMessageError(_(u'Segmentcount in MTR is $MTRcount; should be equal to number of segments $segmentcount'),MTRcount=MTRcount,segmentcount=segmentcount)
botsglobal.logmap.debug(u'Parsing tradacoms envelopes is OK')
class xml(var):
''' class for ediobjects in XML. Uses ElementTree'''
def initfromfile(self):
botsglobal.logger.debug(u'read edi file "%s".',self.ta_info['filename'])
filename=botslib.abspathdata(self.ta_info['filename'])
if self.ta_info['messagetype'] == 'mailbag':
''' the messagetype is not know.
bots reads file usersys/grammars/xml/mailbag.py, and uses 'mailbagsearch' to determine the messagetype
mailbagsearch is a list, containing python dicts. Dict consist of 'xpath', 'messagetype' and (optionally) 'content'.
'xpath' is a xpath to use on xml-file (using elementtree xpath functionality)
if found, and 'content' in the dict; if 'content' is equal to value found by xpath-search, then set messagetype.
if found, and no 'content' in the dict; set messagetype.
'''
try:
module,grammarname = botslib.botsimport('grammars','xml.mailbag')
mailbagsearch = getattr(module, 'mailbagsearch')
except AttributeError:
botsglobal.logger.error(u'missing mailbagsearch in mailbag definitions for xml.')
raise
except ImportError:
botsglobal.logger.error(u'missing mailbag definitions for xml, should be there.')
raise
parser = ET.XMLParser()
try:
extra_character_entity = getattr(module, 'extra_character_entity')
for key,value in extra_character_entity.items():
parser.entity[key] = value
except AttributeError:
pass #there is no extra_character_entity in the mailbag definitions, is OK.
etree = ET.ElementTree() #ElementTree: lexes, parses, makes etree; etree is quite similar to bots-node trees but conversion is needed
etreeroot = etree.parse(filename, parser)
for item in mailbagsearch:
if 'xpath' not in item or 'messagetype' not in item:
raise botslib.InMessageError(_(u'invalid search parameters in xml mailbag.'))
#~ print 'search' ,item
found = etree.find(item['xpath'])
if found is not None:
#~ print ' found'
if 'content' in item and found.text != item['content']:
continue
self.ta_info['messagetype'] = item['messagetype']
#~ print ' found right messagedefinition'
#~ continue
break
else:
raise botslib.InMessageError(_(u'could not find right xml messagetype for mailbag.'))
self.defmessage = grammar.grammarread(self.ta_info['editype'],self.ta_info['messagetype'])
botslib.updateunlessset(self.ta_info,self.defmessage.syntax) #write values from grammar to self.ta_info - unless these values are already set eg by sniffing
else:
self.defmessage = grammar.grammarread(self.ta_info['editype'],self.ta_info['messagetype'])
botslib.updateunlessset(self.ta_info,self.defmessage.syntax) #write values from grammar to self.ta_info - unless these values are already set eg by sniffing
parser = ET.XMLParser()
for key,value in self.ta_info['extra_character_entity'].items():
parser.entity[key] = value
etree = ET.ElementTree() #ElementTree: lexes, parses, makes etree; etree is quite similar to bots-node trees but conversion is needed
etreeroot = etree.parse(filename, parser)
self.stack = []
self.root = self.etree2botstree(etreeroot) #convert etree to bots-nodes-tree
self.normalisetree(self.root)
def etree2botstree(self,xmlnode):
self.stack.append(xmlnode.tag)
newnode = node.Node(record=self.etreenode2botstreenode(xmlnode))
for xmlchildnode in xmlnode: #for every node in mpathtree
if self.isfield(xmlchildnode): #if no child entities: treat as 'field': this misses xml where attributes are used as fields....testing for repeating is no good...
if xmlchildnode.text and not xmlchildnode.text.isspace(): #skip empty xml entity
newnode.record[xmlchildnode.tag]=xmlchildnode.text #add as a field
hastxt = True
else:
hastxt = False
for key,value in xmlchildnode.items(): #convert attributes to fields.
if not hastxt:
newnode.record[xmlchildnode.tag]='' #add empty content
hastxt = True
newnode.record[xmlchildnode.tag + self.ta_info['attributemarker'] + key]=value #add as a field
else: #xmlchildnode is a record
newnode.append(self.etree2botstree(xmlchildnode)) #add as a node/record
#~ if botsglobal.ini.getboolean('settings','readrecorddebug',False):
#~ botsglobal.logger.debug('read record "%s":',newnode.record['BOTSID'])
#~ for key,value in newnode.record.items():
#~ botsglobal.logger.debug(' "%s" : "%s"',key,value)
self.stack.pop()
#~ print self.stack
return newnode
def etreenode2botstreenode(self,xmlnode):
''' build a dict from xml-node'''
build = dict((xmlnode.tag + self.ta_info['attributemarker'] + key,value) for key,value in xmlnode.items()) #convert attributes to fields.
build['BOTSID']=xmlnode.tag #'record' tag
if xmlnode.text and not xmlnode.text.isspace():
build['BOTSCONTENT']=xmlnode.text
return build
def isfield(self,xmlchildnode):
''' check if xmlchildnode is field (or record)'''
#~ print 'examine record in stack',xmlchildnode.tag,self.stack
str_recordlist = self.defmessage.structure
for record in self.stack: #find right level in structure
for str_record in str_recordlist:
#~ print ' find right level comparing',record,str_record[0]
if record == str_record[0]:
if 4 not in str_record: #structure record contains no level: must be an attribute
return True
str_recordlist = str_record[4]
break
else:
raise botslib.InMessageError(_(u'Unknown XML-tag in "$record".'),record=record)
for str_record in str_recordlist: #see if xmlchildnode is in structure
#~ print ' is xmlhildnode in this level comparing',xmlchildnode.tag,str_record[0]
if xmlchildnode.tag == str_record[0]:
#~ print 'found'
return False
#xml tag not found in structure: so must be field; validity is check later on with grammar
if len(xmlchildnode)==0:
return True
return False
class xmlnocheck(xml):
''' class for ediobjects in XML. Uses ElementTree'''
def normalisetree(self,node):
pass
def isfield(self,xmlchildnode):
if len(xmlchildnode)==0:
return True
return False
class json(var):
def initfromfile(self):
self.defmessage = grammar.grammarread(self.ta_info['editype'],self.ta_info['messagetype'])
botslib.updateunlessset(self.ta_info,self.defmessage.syntax) #write values from grammar to self.ta_info - unless these values are already set eg by sniffing
self._readcontent_edifile()
jsonobject = simplejson.loads(self.rawinput)
del self.rawinput
if isinstance(jsonobject,list):
self.root=node.Node() #initialise empty node.
self.root.children = self.dojsonlist(jsonobject,self.getrootID()) #fill root with children
for child in self.root.children:
if not child.record: #sanity test: the children must have content
raise botslib.InMessageError(_(u'no usable content.'))
self.normalisetree(child)
elif isinstance(jsonobject,dict):
if len(jsonobject)==1 and isinstance(jsonobject.values()[0],dict):
# best structure: {rootid:{id2:<dict, list>}}
self.root = self.dojsonobject(jsonobject.values()[0],jsonobject.keys()[0])
elif len(jsonobject)==1 and isinstance(jsonobject.values()[0],list) :
#root dict has no name; use value from grammar for rootID; {id2:<dict, list>}
self.root=node.Node(record={'BOTSID': self.getrootID()}) #initialise empty node.
self.root.children = self.dojsonlist(jsonobject.values()[0],jsonobject.keys()[0])
else:
#~ print self.getrootID()
self.root = self.dojsonobject(jsonobject,self.getrootID())
#~ print self.root
if not self.root:
raise botslib.InMessageError(_(u'no usable content.'))
self.normalisetree(self.root)
else:
#root in JSON is neither dict or list.
raise botslib.InMessageError(_(u'Content must be a "list" or "object".'))
def getrootID(self):
return self.defmessage.structure[0][ID]
def dojsonlist(self,jsonobject,name):
lijst=[] #initialise empty list, used to append a listof (converted) json objects
for i in jsonobject:
if isinstance(i,dict): #check list item is dict/object
newnode = self.dojsonobject(i,name)
if newnode:
lijst.append(newnode)
elif self.ta_info['checkunknownentities']:
raise botslib.InMessageError(_(u'List content in must be a "object".'))
return lijst
def dojsonobject(self,jsonobject,name):
thisnode=node.Node(record={}) #initialise empty node.
for key,value in jsonobject.items():
if value is None:
continue
elif isinstance(value,basestring): #json field; map to field in node.record
thisnode.record[key]=value
elif isinstance(value,dict):
newnode = self.dojsonobject(value,key)
if newnode:
thisnode.append(newnode)
elif isinstance(value,list):
thisnode.children.extend(self.dojsonlist(value,key))
elif isinstance(value,(int,long,float)): #json field; map to field in node.record
thisnode.record[key]=str(value)
else:
if self.ta_info['checkunknownentities']:
raise botslib.InMessageError(_(u'Key "$key" value "$value": is not string, list or dict.'),key=key,value=value)
thisnode.record[key]=str(value)
if not thisnode.record and not thisnode.children:
return None #node is empty...
thisnode.record['BOTSID']=name
return thisnode
class jsonnocheck(json):
def normalisetree(self,node):
pass
def getrootID(self):
return self.ta_info['defaultBOTSIDroot'] #as there is no structure in grammar, use value form syntax.
class database(jsonnocheck):
pass
class db(Inmessage):
''' the database-object is unpickled, and passed to the mapping script.
'''
def initfromfile(self):
botsglobal.logger.debug(u'read edi file "%s".',self.ta_info['filename'])
f = botslib.opendata(filename=self.ta_info['filename'],mode='rb')
self.root = pickle.load(f)
f.close()
def nextmessage(self):
yield self
class raw(Inmessage):
''' the file object is just read and passed to the mapping script.
'''
def initfromfile(self):
botsglobal.logger.debug(u'read edi file "%s".',self.ta_info['filename'])
f = botslib.opendata(filename=self.ta_info['filename'],mode='rb')
self.root = f.read()
f.close()
def nextmessage(self):
yield self
| [
[
8,
0,
0.0009,
0.0009,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.0017,
0.0009,
0,
0.66,
0.0323,
609,
0,
1,
0,
0,
609,
0,
0
],
[
1,
0,
0.0026,
0.0009,
0,
0.66... | [
"''' Reading/lexing/parsing/splitting an edifile.'''",
"import StringIO",
"import time",
"import sys",
"try:\n import cPickle as pickle\nexcept:\n import pickle",
" import cPickle as pickle",
" import pickle",
"try:\n import cElementTree as ET\nexcept ImportError:\n try:\n imp... |
import copy
from django.utils.translation import ugettext as _
import botslib
import botsglobal
from botsconfig import *
def grammarread(editype,grammarname):
''' dispatch function for class Grammar or subclass
read whole grammar
'''
try:
classtocall = globals()[editype]
except KeyError:
raise botslib.GrammarError(_(u'Read grammar for editype "$editype" messagetype "$messagetype", but editype is unknown.'), editype=editype, messagetype=grammarname)
terug = classtocall('grammars',editype,grammarname)
terug.initsyntax(includedefault=True)
terug.initrestofgrammar()
return terug
def syntaxread(soortpythonfile,editype,grammarname):
''' dispatch function for class Grammar or subclass
read only grammar
'''
try:
classtocall = globals()[editype]
except KeyError:
raise botslib.GrammarError(_(u'Read grammar for type "$soort" editype "$editype" messagetype "$messagetype", but editype is unknown.'), soort=soortpythonfile,editype=editype, messagetype=grammarname)
terug = classtocall(soortpythonfile,editype,grammarname)
terug.initsyntax(includedefault=False)
return terug
class Grammar(object):
''' Class for translation grammar. The grammar is used in reading or writing an edi file.
Description of the grammar file: see user manual.
The grammar is read from the grammar file.
Grammar file has several grammar parts , eg 'structure'and 'recorddefs'.
every grammar part is in a module is either the grammar part itself or a import from another module.
every module is read once, (default python import-machinery).
The information in a grammar is checked and manipulated.
structure of self.grammar:
is a list of dict
attributes of dict: see header.py
- ID record id
- MIN min #occurences record or group
- MAX max #occurences record of group
- COUNT added after read
- MPATH mpath of record (only record-ids). added after read
- FIELDS tuple of the fields in record. Added ather read from separate record.py-file
- LEVEL child-records
structure of fields:
fields is tuple of (field or subfield)
field is tuple of (ID, MANDATORY, LENGTH, FORMAT)
subfield is tuple of (ID, MANDATORY, tuple of fields)
if a structure or recorddef has been read, Bots remembers this and skip most of the checks.
'''
_checkstructurerequired=True
def __init__(self,soortpythonfile,editype,grammarname):
self.module,self.grammarname = botslib.botsimport(soortpythonfile,editype + '.' + grammarname)
def initsyntax(self,includedefault):
''' Update default syntax from class with syntax read from grammar. '''
if includedefault:
self.syntax = copy.deepcopy(self.__class__.defaultsyntax) #copy syntax from class data
else:
self.syntax = {}
try:
syntaxfromgrammar = getattr(self.module, 'syntax')
except AttributeError:
pass #there is no syntax in the grammar, is OK.
else:
if not isinstance(syntaxfromgrammar,dict):
raise botslib.GrammarError(_(u'Grammar "$grammar": syntax is not a dict{}.'),grammar=self.grammarname)
self.syntax.update(syntaxfromgrammar)
def initrestofgrammar(self):
try:
self.nextmessage = getattr(self.module, 'nextmessage')
except AttributeError: #if grammarpart does not exist set to None; test required grammarpart elsewhere
self.nextmessage = None
try:
self.nextmessage2 = getattr(self.module, 'nextmessage2')
if self.nextmessage is None:
raise botslib.GrammarError(_(u'Grammar "$grammar": if nextmessage2: nextmessage has to be used.'),grammar=self.grammarname)
except AttributeError: #if grammarpart does not exist set to None; test required grammarpart elsewhere
self.nextmessage2 = None
try:
self.nextmessageblock = getattr(self.module, 'nextmessageblock')
if self.nextmessage:
raise botslib.GrammarError(_(u'Grammar "$grammar": nextmessageblock and nextmessage not both allowed.'),grammar=self.grammarname)
except AttributeError: #if grammarpart does not exist set to None; test required grammarpart elsewhere
self.nextmessageblock = None
if self._checkstructurerequired:
try:
self._dostructure()
except AttributeError: #if grammarpart does not exist set to None; test required grammarpart elsewhere
raise botslib.GrammarError(_(u'Grammar "$grammar": no structure, is required.'),grammar=self.grammarname)
except:
self.structurefromgrammar[0]['error'] = True #mark the structure as having errors
raise
try:
self._dorecorddefs()
except:
self.recorddefs['BOTS_1$@#%_error'] = True #mark structure has been read with errors
raise
else:
self.recorddefs['BOTS_1$@#%_error'] = False #mark structure has been read and checked
self.structure = copy.deepcopy(self.structurefromgrammar) #(deep)copy structure for use in translation (in translation values are changed, so use a copy)
self._checkbotscollision(self.structure)
self._linkrecorddefs2structure(self.structure)
def _dorecorddefs(self):
''' 1. check the recorddefinitions for validity.
2. adapt in field-records: normalise length lists, set bool ISFIELD, etc
'''
try:
self.recorddefs = getattr(self.module, 'recorddefs')
except AttributeError:
raise botslib.GrammarError(_(u'Grammar "$grammar": no recorddefs.'),grammar=self.grammarname)
if not isinstance(self.recorddefs,dict):
raise botslib.GrammarError(_(u'Grammar "$grammar": recorddefs is not a dict{}.'),grammar=self.grammarname)
#check if grammar is read & checked earlier in this run. If so, we can skip all checks.
if 'BOTS_1$@#%_error' in self.recorddefs: #if checked before
if self.recorddefs['BOTS_1$@#%_error']: #if grammar had errors
raise botslib.GrammarError(_(u'Grammar "$grammar" has error that is already reported in this run.'),grammar=self.grammarname)
return #no error, skip checks
for recordID ,fields in self.recorddefs.iteritems():
if not isinstance(recordID,basestring):
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record": is not a string.'),grammar=self.grammarname,record=recordID)
if not recordID:
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record": recordID with empty string.'),grammar=self.grammarname,record=recordID)
if not isinstance(fields,list):
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record": no correct fields found.'),grammar=self.grammarname,record=recordID)
if isinstance(self,(xml,json)):
if len (fields) < 1:
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record": too few fields.'),grammar=self.grammarname,record=recordID)
else:
if len (fields) < 2:
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record": too few fields.'),grammar=self.grammarname,record=recordID)
hasBOTSID = False #to check if BOTSID is present
fieldnamelist = [] #to check for double fieldnames
for field in fields:
self._checkfield(field,recordID)
if not field[ISFIELD]: # if composite
for sfield in field[SUBFIELDS]:
self._checkfield(sfield,recordID)
if sfield[ID] in fieldnamelist:
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record": field "$field" appears twice. Field names should be unique within a record.'),grammar=self.grammarname,record=recordID,field=sfield[ID])
fieldnamelist.append(sfield[ID])
else:
if field[ID] == 'BOTSID':
hasBOTSID = True
if field[ID] in fieldnamelist:
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record": field "$field" appears twice. Field names should be unique within a record.'),grammar=self.grammarname,record=recordID,field=field[ID])
fieldnamelist.append(field[ID])
if not hasBOTSID: #there is no field 'BOTSID' in record
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record": no field BOTSID.'),grammar=self.grammarname,record=recordID)
if self.syntax['noBOTSID'] and len(self.recorddefs) != 1:
raise botslib.GrammarError(_(u'Grammar "$grammar": if syntax["noBOTSID"]: there can be only one record in recorddefs.'),grammar=self.grammarname)
if self.nextmessageblock is not None and len(self.recorddefs) != 1:
raise botslib.GrammarError(_(u'Grammar "$grammar": if nextmessageblock: there can be only one record in recorddefs.'),grammar=self.grammarname)
def _checkfield(self,field,recordID):
#'normalise' field: make list equal length
if len(field) == 3: # that is: composite
field +=[None,False,None,None,'A']
elif len(field) == 4: # that is: field (not a composite)
field +=[True,0,0,'A']
elif len(field) == 8: # this happens when there are errors in a table and table is read again
raise botslib.GrammarError(_(u'Grammar "$grammar": error in grammar; error is already reported in this run.'),grammar=self.grammarname)
else:
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record", field "$field": list has invalid number of arguments.') ,grammar=self.grammarname,record=recordID,field=field[ID])
if not isinstance(field[ID],basestring) or not field[ID]:
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record", field "$field": fieldID has to be a string.'),grammar=self.grammarname,record=recordID,field=field[ID])
if not isinstance(field[MANDATORY],basestring):
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record", field "$field": mandatory/conditional has to be a string.'),grammar=self.grammarname,record=recordID,field=field[ID])
if not field[MANDATORY] or field[MANDATORY] not in ['M','C']:
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record", field "$field": mandatory/conditional must be "M" or "C".'),grammar=self.grammarname,record=recordID,field=field[ID])
if field[ISFIELD]: # that is: field, and not a composite
#get MINLENGTH (from tuple or if fixed
if isinstance(field[LENGTH],tuple):
if not isinstance(field[LENGTH][0],(int,float)):
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record", field "$field": min length "$min" has to be a number.'),grammar=self.grammarname,record=recordID,field=field[ID],min=field[LENGTH])
if not isinstance(field[LENGTH][1],(int,float)):
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record", field "$field": max length "$max" has to be a number.'),grammar=self.grammarname,record=recordID,field=field[ID],max=field[LENGTH])
if field[LENGTH][0] > field[LENGTH][1]:
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record", field "$field": min length "$min" must be > max length "$max".'),grammar=self.grammarname,record=recordID,field=field[ID],min=field[LENGTH][0],max=field[LENGTH][1])
field[MINLENGTH]=field[LENGTH][0]
field[LENGTH]=field[LENGTH][1]
elif isinstance(field[LENGTH],(int,float)):
if isinstance(self,fixed):
field[MINLENGTH]=field[LENGTH]
else:
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record", field "$field": length "$len" has to be number or (min,max).'),grammar=self.grammarname,record=recordID,field=field[ID],len=field[LENGTH])
if field[LENGTH] < 1:
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record", field "$field": length "$len" has to be at least 1.'),grammar=self.grammarname,record=recordID,field=field[ID],len=field[LENGTH])
if field[MINLENGTH] < 0:
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record", field "$field": minlength "$len" has to be at least 0.'),grammar=self.grammarname,record=recordID,field=field[ID],len=field[LENGTH])
#format
if not isinstance(field[FORMAT],basestring):
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record", field "$field": format "$format" has to be a string.'),grammar=self.grammarname,record=recordID,field=field[ID],format=field[FORMAT])
self._manipulatefieldformat(field,recordID)
if field[BFORMAT] in ['N','I','R']:
if isinstance(field[LENGTH],float):
field[DECIMALS] = int( round((field[LENGTH]-int(field[LENGTH]))*10) ) #fill DECIMALS
field[LENGTH] = int( round(field[LENGTH]))
if field[DECIMALS] >= field[LENGTH]:
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record", field "$field": field length "$len" has to be greater that nr of decimals "$decimals".'),grammar=self.grammarname,record=recordID,field=field[ID],len=field[LENGTH],decimals=field[DECIMALS])
if isinstance(field[MINLENGTH],float):
field[MINLENGTH] = int( round(field[MINLENGTH]))
else: #if format 'R', A, D, T
if isinstance(field[LENGTH],float):
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record", field "$field": if format "$format", no length "$len".'),grammar=self.grammarname,record=recordID,field=field[ID],format=field[FORMAT],len=field[LENGTH])
if isinstance(field[MINLENGTH],float):
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record", field "$field": if format "$format", no minlength "$len".'),grammar=self.grammarname,record=recordID,field=field[ID],format=field[FORMAT],len=field[MINLENGTH])
else: #check composite
if not isinstance(field[SUBFIELDS],list):
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record", field "$field": is a composite field, has to have subfields.'),grammar=self.grammarname,record=recordID,field=field[ID])
if len(field[SUBFIELDS]) < 2:
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record", field "$field" has < 2 sfields.'),grammar=self.grammarname,record=recordID,field=field[ID])
def _linkrecorddefs2structure(self,structure):
''' recursive
for each record in structure: add the pointer to the right recorddefinition.
'''
for i in structure:
try:
i[FIELDS] = self.recorddefs[i[ID]]
except KeyError:
raise botslib.GrammarError(_(u'Grammar "$grammar": in recorddef no record "$record".'),grammar=self.grammarname,record=i[ID])
if LEVEL in i:
self._linkrecorddefs2structure(i[LEVEL])
def _dostructure(self):
''' 1. check the structure for validity.
2. adapt in structure: Add keys: mpath, count
3. remember that structure is checked and adapted (so when grammar is read again, no checking/adapt needed)
'''
self.structurefromgrammar = getattr(self.module, 'structure')
if len(self.structurefromgrammar) != 1: #every structure has only 1 root!!
raise botslib.GrammarError(_(u'Grammar "$grammar", in structure: only one root record allowed.'),grammar=self.grammarname)
#check if structure is read & checked earlier in this run. If so, we can skip all checks.
if 'error' in self.structurefromgrammar[0]:
pass # grammar has been read before, but there are errors. Do nothing here, same errors will be raised again.
elif MPATH in self.structurefromgrammar[0]:
return # grammar has been red before, with no errors. Do no checks.
self._checkstructure(self.structurefromgrammar,[])
if self.syntax['checkcollision']:
self._checkbackcollision(self.structurefromgrammar)
self._checknestedcollision(self.structurefromgrammar)
def _checkstructure(self,structure,mpath):
''' Recursive
1. Check structure.
2. Add keys: mpath, count
'''
if not isinstance(structure,list):
raise botslib.GrammarError(_(u'Grammar "$grammar", in structure, at "$mpath": not a list.'),grammar=self.grammarname,mpath=mpath)
for i in structure:
if not isinstance(i,dict):
raise botslib.GrammarError(_(u'Grammar "$grammar", in structure, at "$mpath": record should be a dict: "$record".'),grammar=self.grammarname,mpath=mpath,record=i)
if ID not in i:
raise botslib.GrammarError(_(u'Grammar "$grammar", in structure, at "$mpath": record without ID: "$record".'),grammar=self.grammarname,mpath=mpath,record=i)
if not isinstance(i[ID],basestring):
raise botslib.GrammarError(_(u'Grammar "$grammar", in structure, at "$mpath": recordID of record is not a string: "$record".'),grammar=self.grammarname,mpath=mpath,record=i)
if not i[ID]:
raise botslib.GrammarError(_(u'Grammar "$grammar", in structure, at "$mpath": recordID of record is empty: "$record".'),grammar=self.grammarname,mpath=mpath,record=i)
if MIN not in i:
raise botslib.GrammarError(_(u'Grammar "$grammar", in structure, at "$mpath": record without MIN: "$record".'),grammar=self.grammarname,mpath=mpath,record=i)
if MAX not in i:
raise botslib.GrammarError(_(u'Grammar "$grammar", in structure, at "$mpath": record without MAX: "$record".'),grammar=self.grammarname,mpath=mpath,record=i)
if not isinstance(i[MIN],int):
raise botslib.GrammarError(_(u'Grammar "$grammar", in structure, at "$mpath": record where MIN is not whole number: "$record".'),grammar=self.grammarname,mpath=mpath,record=i)
if not isinstance(i[MAX],int):
raise botslib.GrammarError(_(u'Grammar "$grammar", in structure, at "$mpath": record where MAX is not whole number: "$record".'),grammar=self.grammarname,mpath=mpath,record=i)
if i[MIN] > i[MAX]:
raise botslib.GrammarError(_(u'Grammar "$grammar", in structure, at "$mpath": record where MIN > MAX: "$record".'),grammar=self.grammarname,mpath=mpath,record=str(i)[:100])
i[MPATH]=mpath+[[i[ID]]]
i[COUNT]=0
if LEVEL in i:
self._checkstructure(i[LEVEL],i[MPATH])
def _checkbackcollision(self,structure,collision=None):
''' Recursive.
Check if grammar has collision problem.
A message with collision problems is ambiguous.
'''
headerissave = False
if not collision:
collision=[]
for i in structure:
#~ print 'check back',i[MPATH], 'with',collision
if i[ID] in collision:
raise botslib.GrammarError(_(u'Grammar "$grammar", in structure: back-collision detected at record "$mpath".'),grammar=self.grammarname,mpath=i[MPATH])
if i[MIN]:
collision = []
headerissave = True
collision.append(i[ID])
if LEVEL in i:
returncollision,returnheaderissave = self._checkbackcollision(i[LEVEL],[i[ID]])
collision += returncollision
if returnheaderissave: #if one of segment(groups) is required, there is always a segment after the header segment; so remove header from nowcollision:
collision.remove(i[ID])
return collision,headerissave #collision is used to update on higher level; cleared indicates the header segment can not collide anymore
def _checkbotscollision(self,structure):
''' Recursive.
Within one level: if twice the same tag: use BOTSIDnr.
'''
collision={}
for i in structure:
if i[ID] in collision:
#~ raise botslib.GrammarError(_(u'Grammar "$grammar", in structure: bots-collision detected at record "$mpath".'),grammar=self.grammarname,mpath=i[MPATH])
i[BOTSIDnr] = str(collision[i[ID]] + 1)
collision[i[ID]] = collision[i[ID]] + 1
else:
i[BOTSIDnr] = '1'
collision[i[ID]] = 1
if LEVEL in i:
self._checkbotscollision(i[LEVEL])
return
def _checknestedcollision(self,structure,collision=None):
''' Recursive.
Check if grammar has collision problem.
A message with collision problems is ambiguous.
'''
if not collision:
levelcollision = []
else:
levelcollision = collision[:]
for i in reversed(structure):
checkthissegment = True
if LEVEL in i:
checkthissegment = self._checknestedcollision(i[LEVEL],levelcollision + [i[ID]])
#~ print 'check nested',checkthissegment, i[MPATH], 'with',levelcollision
if checkthissegment and i[ID] in levelcollision:
raise botslib.GrammarError(_(u'Grammar "$grammar", in structure: nesting collision detected at record "$mpath".'),grammar=self.grammarname,mpath=i[MPATH])
if i[MIN]:
levelcollision = [] #enecessarympty uppercollision
return bool(levelcollision)
def display(self,structure,level=0):
''' Draw grammar, with indentation for levels.
For debugging.
'''
for i in structure:
print 'Record: ',i[MPATH],i
for field in i[FIELDS]:
print ' Field: ',field
if LEVEL in i:
self.display(i[LEVEL],level+1)
#bots interpretats the format from the grammer; left side are the allowed values; right side are the internal forams bots uses.
#the list directly below are the default values for the formats, subclasses can have their own list.
#this makes it possible to use x12-formats for x12, edifact-formats for edifact etc
formatconvert = {
'A':'A', #alfanumerical
'AN':'A', #alfanumerical
#~ 'AR':'A', #right aligned alfanumerical field, used in fixed records.
'D':'D', #date
'DT':'D', #date-time
'T':'T', #time
'TM':'T', #time
'N':'N', #numerical, fixed decimal. Fixed nr of decimals; if no decimal used: whole number, integer
#~ 'NL':'N', #numerical, fixed decimal. In fixed format: no preceding zeros, left aligned,
#~ 'NR':'N', #numerical, fixed decimal. In fixed format: preceding blancs, right aligned,
'R':'R', #numerical, any number of decimals; the decimal point is 'floating'
#~ 'RL':'R', #numerical, any number of decimals. fixed: no preceding zeros, left aligned
#~ 'RR':'R', #numerical, any number of decimals. fixed: preceding blancs, right aligned
'I':'I', #numercial, implicit decimal
}
def _manipulatefieldformat(self,field,recordID):
try:
field[BFORMAT] = self.formatconvert[field[FORMAT]]
except KeyError:
raise botslib.GrammarError(_(u'Grammar "$grammar", record "$record", field "$field": format "$format" has to be one of "$keys".'),grammar=self.grammarname,record=recordID,field=field[ID],format=field[FORMAT],keys=self.formatconvert.keys())
#grammar subclasses. contain the defaultsyntax
class test(Grammar):
defaultsyntax = {
'checkcollision':True,
'noBOTSID':False,
}
class csv(Grammar):
defaultsyntax = {
'acceptspaceinnumfield':True, #only really used in fixed formats
'add_crlfafterrecord_sep':'',
'allow_lastrecordnotclosedproperly':False, #in csv sometimes the last record is no closed correctly. This is related to communciation over email. Beware: when using this, other checks will not be enforced!
'charset':'utf-8',
'checkcharsetin':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcharsetout':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcollision':True,
'checkunknownentities': True,
'contenttype':'text/csv',
'decimaal':'.',
'envelope':'',
'escape':"",
'field_sep':':',
'forcequote': 1, #(if quote_char is set) 0:no force: only quote if necessary:1:always force: 2:quote if alfanumeric
'lengthnumericbare':False,
'merge':True,
'noBOTSID':False,
'pass_all':True,
'quote_char':"'",
'record_sep':"\r\n",
'record_tag_sep':"", #Tradacoms/GTDI
'reserve':'',
'sfield_sep':'',
'skip_char':'',
'skip_firstline':False,
'stripfield_sep':False, #safe choice, as csv is no real standard
'triad':'',
'wrap_length':0, #for producing wrapped format, where a file consists of fixed length records ending with crr/lf. Often seen in mainframe, as400
}
class fixed(Grammar):
formatconvert = {
'A':'A', #alfanumerical
'AN':'A', #alfanumerical
'AR':'A', #right aligned alfanumerical field, used in fixed records.
'D':'D', #date
'DT':'D', #date-time
'T':'T', #time
'TM':'T', #time
'N':'N', #numerical, fixed decimal. Fixed nr of decimals; if no decimal used: whole number, integer
'NL':'N', #numerical, fixed decimal. In fixed format: no preceding zeros, left aligned,
'NR':'N', #numerical, fixed decimal. In fixed format: preceding blancs, right aligned,
'R':'R', #numerical, any number of decimals; the decimal point is 'floating'
'RL':'R', #numerical, any number of decimals. fixed: no preceding zeros, left aligned
'RR':'R', #numerical, any number of decimals. fixed: preceding blancs, right aligned
'I':'I', #numercial, implicit decimal
}
defaultsyntax = {
'acceptspaceinnumfield':True, #only really used in fixed formats
'add_crlfafterrecord_sep':'',
'charset':'us-ascii',
'checkcharsetin':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcharsetout':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcollision':True,
'checkfixedrecordtoolong':True,
'checkfixedrecordtooshort':False,
'checkunknownentities': True,
'contenttype':'text/plain',
'decimaal':'.',
'endrecordID':3,
'envelope':'',
'escape':'',
'field_sep':'',
'forcequote':0, #csv only
'lengthnumericbare':False,
'merge':True,
'noBOTSID':False,
'pass_all':False,
'quote_char':"",
'record_sep':"\r\n",
'record_tag_sep':"", #Tradacoms/GTDI
'reserve':'',
'sfield_sep':'',
'skip_char':'',
'startrecordID':0,
'stripfield_sep':False,
'triad':'',
'wrap_length':0, #for producing wrapped format, where a file consists of fixed length records ending with crr/lf. Often seen in mainframe, as400
}
class idoc(fixed):
defaultsyntax = {
'acceptspaceinnumfield':True, #only really used in fixed formats
'add_crlfafterrecord_sep':'',
'automaticcount':True,
'charset':'us-ascii',
'checkcharsetin':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcharsetout':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcollision':True,
'checkfixedrecordtoolong':False,
'checkfixedrecordtooshort':False,
'checkunknownentities': True,
'contenttype':'text/plain',
'decimaal':'.',
'endrecordID':10,
'envelope':'',
'escape':'',
'field_sep':'',
'forcequote':0, #csv only
'lengthnumericbare':False,
'merge':False,
'noBOTSID':False,
'pass_all':False,
'quote_char':"",
'record_sep':"\r\n",
'record_tag_sep':"", #Tradacoms/GTDI
'reserve':'',
'sfield_sep':'',
'skip_char':'',
'startrecordID':0,
'stripfield_sep':False,
'triad':'',
'wrap_length':0, #for producing wrapped format, where a file consists of fixed length records ending with crr/lf. Often seen in mainframe, as400
'MANDT':'0',
'DOCNUM':'0',
}
class xml(Grammar):
defaultsyntax = {
'add_crlfafterrecord_sep':'',
'acceptspaceinnumfield':True, #only really used in fixed formats
'attributemarker':'__',
'charset':'utf-8',
'checkcharsetin':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcharsetout':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcollision':False,
'checkunknownentities': True, #??changed later
'contenttype':'text/xml ',
'decimaal':'.',
'DOCTYPE':'', #doctype declaration to use in xml header. DOCTYPE = 'mydoctype SYSTEM "mydoctype.dtd"' will lead to: <!DOCTYPE mydoctype SYSTEM "mydoctype.dtd">
'envelope':'',
'extra_character_entity':{}, #additional character entities to resolve when parsing XML; mostly html character entities. Not in python 2.4. Example: {'euro':u'','nbsp':unichr(160),'apos':u'\u0027'}
'escape':'',
'field_sep':'',
'forcequote':0, #csv only
'indented':False, #False: xml output is one string (no cr/lf); True: xml output is indented/human readable
'lengthnumericbare':False,
'merge':False,
'noBOTSID':False,
'pass_all':False,
'processing_instructions': None, #to generate processing instruction in xml prolog. is a list, consisting of tuples, each tuple consists of type of instruction and text for instruction.
#Example: processing_instructions': [('xml-stylesheet' ,'href="mystylesheet.xsl" type="text/xml"'),('type-of-ppi' ,'attr1="value1" attr2="value2"')]
#leads to this output in xml-file: <?xml-stylesheet href="mystylesheet.xsl" type="text/xml"?><?type-of-ppi attr1="value1" attr2="value2"?>
'quote_char':"",
'record_sep':"",
'record_tag_sep':"", #Tradacoms/GTDI
'reserve':'',
'sfield_sep':'',
'skip_char':'',
'standalone':None, #as used in xml prolog; values: 'yes' , 'no' or None (not used)
'stripfield_sep':False,
'triad':'',
'version':'1.0', #as used in xml prolog
}
class xmlnocheck(xml):
_checkstructurerequired=False
defaultsyntax = {
'add_crlfafterrecord_sep':'',
'acceptspaceinnumfield':True, #only really used in fixed formats
'attributemarker':'__',
'charset':'utf-8',
'checkcharsetin':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcharsetout':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcollision':False,
'checkunknownentities': False,
'contenttype':'text/xml ',
'decimaal':'.',
'DOCTYPE':'', #doctype declaration to use in xml header. DOCTYPE = 'mydoctype SYSTEM "mydoctype.dtd"' will lead to: <!DOCTYPE mydoctype SYSTEM "mydoctype.dtd">
'envelope':'',
'escape':'',
'extra_character_entity':{}, #additional character entities to resolve when parsing XML; mostly html character entities. Not in python 2.4. Example: {'euro':u'','nbsp':unichr(160),'apos':u'\u0027'}
'field_sep':'',
'forcequote':0, #csv only
'indented':False, #False: xml output is one string (no cr/lf); True: xml output is indented/human readable
'lengthnumericbare':False,
'merge':False,
'noBOTSID':False,
'pass_all':False,
'processing_instructions': None, #to generate processing instruction in xml prolog. is a list, consisting of tuples, each tuple consists of type of instruction and text for instruction.
#Example: processing_instructions': [('xml-stylesheet' ,'href="mystylesheet.xsl" type="text/xml"'),('type-of-ppi' ,'attr1="value1" attr2="value2"')]
#leads to this output in xml-file: <?xml-stylesheet href="mystylesheet.xsl" type="text/xml"?><?type-of-ppi attr1="value1" attr2="value2"?>
'quote_char':"",
'record_sep':"",
'record_tag_sep':"", #Tradacoms/GTDI
'reserve':'',
'sfield_sep':'',
'skip_char':'',
'standalone':None, #as used in xml prolog; values: 'yes' , 'no' or None (not used)
'stripfield_sep':False,
'triad':'',
'version':'1.0', #as used in xml prolog
}
class template(Grammar):
_checkstructurerequired=False
defaultsyntax = { \
'add_crlfafterrecord_sep':'',
'charset':'utf-8',
'checkcharsetin':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcharsetout':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcollision':False,
'contenttype':'text/xml',
'checkunknownentities': True,
'decimaal':'.',
'envelope':'template',
'envelope-template':'',
'escape':'',
'field_sep':'',
'forcequote':0, #csv only
'lengthnumericbare':False,
'merge':True,
'noBOTSID':False,
'output':'xhtml-strict',
'quote_char':"",
'pass_all':False,
'record_sep':"",
'record_tag_sep':"", #Tradacoms/GTDI
'reserve':'',
'sfield_sep':'',
'skip_char':'',
'stripfield_sep':False,
'triad':'',
}
class templatehtml(Grammar):
_checkstructurerequired=False
defaultsyntax = { \
'add_crlfafterrecord_sep':'',
'charset':'utf-8',
'checkcharsetin':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcharsetout':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcollision':False,
'contenttype':'text/xml',
'checkunknownentities': True,
'decimaal':'.',
'envelope':'templatehtml',
'envelope-template':'',
'escape':'',
'field_sep':'',
'forcequote':0, #csv only
'lengthnumericbare':False,
'merge':True,
'noBOTSID':False,
'output':'xhtml-strict',
'quote_char':"",
'pass_all':False,
'record_sep':"",
'record_tag_sep':"", #Tradacoms/GTDI
'reserve':'',
'sfield_sep':'',
'skip_char':'',
'stripfield_sep':False,
'triad':'',
}
class edifact(Grammar):
defaultsyntax = {
'add_crlfafterrecord_sep':'\r\n',
'acceptspaceinnumfield':True, #only really used in fixed formats
'charset':'UNOA',
'checkcharsetin':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcharsetout':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcollision':True,
'checkunknownentities': True,
'contenttype':'application/EDIFACT',
'decimaal':'.',
'envelope':'edifact',
'escape':'?',
'field_sep':'+',
'forcequote':0, #csv only
'forceUNA' : False,
'lengthnumericbare':True,
'merge':True,
'noBOTSID':False,
'pass_all':False,
'quote_char':'',
'record_sep':"'",
'record_tag_sep':"", #Tradacoms/GTDI
'reserve':'*',
'sfield_sep':':',
'skip_char':'\r\n',
'stripfield_sep':True,
'triad':'',
'version':'3',
'wrap_length':0, #for producing wrapped format, where a file consists of fixed length records ending with crr/lf. Often seen in mainframe, as400
'UNB.S002.0007':'14',
'UNB.S003.0007':'14',
'UNB.0026':'',
'UNB.0035':'0',
}
formatconvert = {
'A':'A',
'AN':'A',
'N':'R',
}
class x12(Grammar):
defaultsyntax = {
'add_crlfafterrecord_sep':'\r\n',
'acceptspaceinnumfield':True, #only really used in fixed formats
'charset':'us-ascii',
'checkcharsetin':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcharsetout':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcollision':True,
'checkunknownentities': True,
'contenttype':'application/X12',
'decimaal':'.',
'envelope':'x12',
'escape':'',
'field_sep':'*',
'forcequote':0, #csv only
'functionalgroup' : 'XX',
'lengthnumericbare':True,
'merge':True,
'noBOTSID':False,
'pass_all':False,
'quote_char':'',
'record_sep':"~",
'record_tag_sep':"", #Tradacoms/GTDI
'replacechar':'', #if separator found, replace by this character; if replacechar is an empty string: raise error
'reserve':'^',
'sfield_sep':'>', #advised '\'?
'skip_char':'\r\n',
'stripfield_sep':True,
'triad':'',
'version':'00403',
'wrap_length':0, #for producing wrapped format, where a file consists of fixed length records ending with crr/lf. Often seen in mainframe, as400
'ISA01':'00',
'ISA02':' ',
'ISA03':'00',
'ISA04':' ',
'ISA05':'01',
'ISA07':'01',
'ISA11':'U', #since ISA version 00403 this is the reserve/repetition separator. Bots does not use this anymore for ISA version >00403
'ISA14':'1',
'ISA15':'P',
'GS07':'X',
}
formatconvert = {
'AN':'A',
'DT':'D',
'TM':'T',
'N':'I',
'N0':'I',
'N1':'I',
'N2':'I',
'N3':'I',
'N4':'I',
'N5':'I',
'N6':'I',
'N7':'I',
'N8':'I',
'N9':'I',
'R':'R',
'B':'A',
'ID':'A',
}
def _manipulatefieldformat(self,field,recordID):
super(x12,self)._manipulatefieldformat(field,recordID)
if field[BFORMAT]=='I':
if field[FORMAT][1:]:
field[DECIMALS] = int(field[FORMAT][1])
else:
field[DECIMALS] = 0
class json(Grammar):
defaultsyntax = {
'add_crlfafterrecord_sep':'',
'acceptspaceinnumfield':True, #only really used in fixed formats
'charset':'utf-8',
'checkcharsetin':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcharsetout':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcollision':False,
'checkunknownentities': True, #??changed later
'contenttype':'text/xml ',
'decimaal':'.',
'defaultBOTSIDroot':'ROOT',
'envelope':'',
'escape':'',
'field_sep':'',
'forcequote':0, #csv only
'indented':False, #False: output is one string (no cr/lf); True: output is indented/human readable
'lengthnumericbare':False,
'merge':False,
'noBOTSID':False,
'pass_all':False,
'quote_char':"",
'record_sep':"",
'record_tag_sep':"", #Tradacoms/GTDI
'reserve':'',
'sfield_sep':'',
'skip_char':'',
'stripfield_sep':False,
'triad':'',
}
class jsonnocheck(json):
_checkstructurerequired=False
defaultsyntax = {
'add_crlfafterrecord_sep':'',
'acceptspaceinnumfield':True, #only really used in fixed formats
'charset':'utf-8',
'checkcharsetin':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcharsetout':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcollision':False,
'checkunknownentities': False,
'contenttype':'text/xml ',
'decimaal':'.',
'defaultBOTSIDroot':'ROOT',
'envelope':'',
'escape':'',
'field_sep':'',
'forcequote':0, #csv only
'indented':False, #False: output is one string (no cr/lf); True: output is indented/human readable
'lengthnumericbare':False,
'merge':False,
'noBOTSID':False,
'pass_all':False,
'quote_char':"",
'record_sep':"",
'record_tag_sep':"", #Tradacoms/GTDI
'reserve':'',
'sfield_sep':'',
'skip_char':'',
'stripfield_sep':False,
'triad':'',
}
class tradacoms(Grammar):
defaultsyntax = {
'add_crlfafterrecord_sep':'\n',
'acceptspaceinnumfield':True, #only really used in fixed formats
'charset':'us-ascii',
'checkcharsetin':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcharsetout':'strict', #strict, ignore or botsreplace (replace with char as set in bots.ini).
'checkcollision':True,
'checkunknownentities': True,
'contenttype':'application/text',
'decimaal':'.',
'envelope':'tradacoms',
'escape':'?',
'field_sep':'+',
'forcequote':0, #csv only
'indented':False, #False: output is one string (no cr/lf); True: output is indented/human readable
'lengthnumericbare':True,
'merge':False,
'noBOTSID':False,
'pass_all':False,
'quote_char':'',
'record_sep':"'",
'record_tag_sep':"=", #Tradacoms/GTDI
'reserve':'',
'sfield_sep':':',
'skip_char':'\r\n',
'stripfield_sep':True,
'triad':'',
'wrap_length':0, #for producing wrapped format, where a file consists of fixed length records ending with crr/lf. Often seen in mainframe, as400
'STX.STDS1':'ANA',
'STX.STDS2':'1',
'STX.FROM.02':'',
'STX.UNTO.02':'',
'STX.APRF':'',
'STX.PRCD':'',
}
formatconvert = {
'X':'A',
'9':'R',
'9V9':'I',
}
class database(jsonnocheck):
pass
| [
[
1,
0,
0.0012,
0.0012,
0,
0.66,
0,
739,
0,
1,
0,
0,
739,
0,
0
],
[
1,
0,
0.0023,
0.0012,
0,
0.66,
0.0476,
389,
0,
1,
0,
0,
389,
0,
0
],
[
1,
0,
0.0035,
0.0012,
0,
... | [
"import copy",
"from django.utils.translation import ugettext as _",
"import botslib",
"import botsglobal",
"from botsconfig import *",
"def grammarread(editype,grammarname):\n ''' dispatch function for class Grammar or subclass\n read whole grammar\n '''\n try:\n classtocall = glob... |
import os
import glob
import time
import datetime
import stat
import shutil
from django.utils.translation import ugettext as _
#bots modules
import botslib
import botsglobal
from botsconfig import *
def cleanup():
''' public function, does all cleanup of the database and file system.'''
try:
_cleanupsession()
_cleandatafile()
_cleanarchive()
_cleanpersist()
_cleantransactions()
_cleanprocessnothingreceived()
except:
botsglobal.logger.exception(u'Cleanup error.')
def _cleanupsession():
''' delete all expired sessions. Bots-engine starts up much more often than web-server.'''
vanaf = datetime.datetime.today()
botslib.change('''DELETE FROM django_session WHERE expire_date < %(vanaf)s''', {'vanaf':vanaf})
def _cleanarchive():
''' delete all archive directories older than maxdaysarchive days.'''
vanaf = (datetime.date.today()-datetime.timedelta(days=botsglobal.ini.getint('settings','maxdaysarchive',180))).strftime('%Y%m%d')
for row in botslib.query('''SELECT archivepath FROM channel '''):
if row['archivepath']:
vanafdir = botslib.join(row['archivepath'],vanaf)
for dir in glob.glob(botslib.join(row['archivepath'],'*')):
if dir < vanafdir:
shutil.rmtree(dir,ignore_errors=True)
def _cleandatafile():
''' delete all data files older than xx days.'''
vanaf = time.time() - (botsglobal.ini.getint('settings','maxdays',30) * 3600 * 24)
frompath = botslib.join(botsglobal.ini.get('directories','data','botssys/data'),'*')
for filename in glob.glob(frompath):
statinfo = os.stat(filename)
if not stat.S_ISDIR(statinfo.st_mode):
try:
os.remove(filename) #remove files - should be no files in root of data dir
except:
botsglobal.logger.exception(_(u'Cleanup could not remove file'))
elif statinfo.st_mtime > vanaf :
continue #directory is newer than maxdays, which is also true for the data files in it. Skip it.
else: #check files in dir and remove all older than maxdays
frompath2 = botslib.join(filename,'*')
emptydir=True #track check if directory is empty after loop (should directory itself be deleted/)
for filename2 in glob.glob(frompath2):
statinfo2 = os.stat(filename2)
if statinfo2.st_mtime > vanaf or stat.S_ISDIR(statinfo2.st_mode): #check files in dir and remove all older than maxdays
emptydir = False
else:
try:
os.remove(filename2)
except:
botsglobal.logger.exception(_(u'Cleanup could not remove file'))
if emptydir:
try:
os.rmdir(filename)
except:
botsglobal.logger.exception(_(u'Cleanup could not remove directory'))
def _cleanpersist():
'''delete all persist older than xx days.'''
vanaf = datetime.datetime.today() - datetime.timedelta(days=botsglobal.ini.getint('settings','maxdayspersist',30))
botslib.change('''DELETE FROM persist WHERE ts < %(vanaf)s''',{'vanaf':vanaf})
def _cleantransactions():
''' delete records from report, filereport and ta.
best indexes are on idta/reportidta; this should go fast.
'''
vanaf = datetime.datetime.today() - datetime.timedelta(days=botsglobal.ini.getint('settings','maxdays',30))
for row in botslib.query('''SELECT max(idta) as max FROM report WHERE ts < %(vanaf)s''',{'vanaf':vanaf}):
maxidta = row['max']
break
else: #if there is no maxidta to delete, do nothing
return
botslib.change('''DELETE FROM report WHERE idta < %(maxidta)s''',{'maxidta':maxidta})
botslib.change('''DELETE FROM filereport WHERE reportidta < %(maxidta)s''',{'maxidta':maxidta})
botslib.change('''DELETE FROM ta WHERE idta < %(maxidta)s''',{'maxidta':maxidta})
#the most recent run that is older than maxdays is kept (using < instead of <=).
#Reason: when deleting in ta this would leave the ta-records of the most recent run older than maxdays (except the first ta-record).
#this will not lead to problems.
def _cleanprocessnothingreceived():
''' delete all --new runs that received no files; including all process under the run
processes are organised as trees, so recursive.
'''
def core(idta):
#select db-ta's referring to this db-ta
for row in botslib.query('''SELECT idta
FROM ta
WHERE idta > %(idta)s
AND script=%(idta)s''',
{'idta':idta}):
core(row['idta'])
ta=botslib.OldTransaction(idta)
ta.delete()
return
#select root-processes older than hoursnotrefferedarekept
vanaf = datetime.datetime.today() - datetime.timedelta(hours=botsglobal.ini.getint('settings','hoursrunwithoutresultiskept',1))
for row in botslib.query('''SELECT idta
FROM report
WHERE type = 'new'
AND lastreceived=0
AND ts < %(vanaf)s''',
{'vanaf':vanaf}):
core(row['idta'])
#delete report
botslib.change('''DELETE FROM report WHERE idta=%(idta)s ''',{'idta':row['idta']})
| [
[
1,
0,
0.008,
0.008,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.016,
0.008,
0,
0.66,
0.0625,
958,
0,
1,
0,
0,
958,
0,
0
],
[
1,
0,
0.024,
0.008,
0,
0.66,
... | [
"import os",
"import glob",
"import time",
"import datetime",
"import stat",
"import shutil",
"from django.utils.translation import ugettext as _",
"import botslib",
"import botsglobal",
"from botsconfig import *",
"def cleanup():\n ''' public function, does all cleanup of the database and fi... |
#Globals used by Bots
incommunicate = False #used to set all incommunication off
db = None #db-object
ini = None #ini-file-object that is read (bots.ini)
routeid = '' #current route. This is used to set routeid for Processes.
preprocessnumber = 0 #different preprocessnumbers are needed for different preprocessing.
version = '2.1.0' #bots version
minta4query = 0 #used in retry; this determines which ta's are queried in a route
######################################
| [
[
14,
0,
0.2222,
0.1111,
0,
0.66,
0,
483,
1,
0,
0,
0,
0,
4,
0
],
[
14,
0,
0.3333,
0.1111,
0,
0.66,
0.1667,
761,
1,
0,
0,
0,
0,
9,
0
],
[
14,
0,
0.4444,
0.1111,
0,
0... | [
"incommunicate = False #used to set all incommunication off",
"db = None #db-object",
"ini = None #ini-file-object that is read (bots.ini)",
"routeid = '' #current route. This is used to set routeid for Processes.",
"preprocessnumber = 0 #different preprocessnumbe... |
#!/usr/bin/env python
from bots import xml2botsgrammar
if __name__=='__main__':
xml2botsgrammar.start()
| [
[
1,
0,
0.4,
0.2,
0,
0.66,
0,
261,
0,
1,
0,
0,
261,
0,
0
],
[
4,
0,
0.9,
0.4,
0,
0.66,
1,
0,
0,
0,
0,
0,
0,
0,
1
],
[
8,
1,
1,
0.2,
1,
0.53,
0,
511,
3,
... | [
"from bots import xml2botsgrammar",
"if __name__=='__main__':\n xml2botsgrammar.start()",
" xml2botsgrammar.start()"
] |
import os
import unittest
import shutil
import bots.inmessage as inmessage
import bots.outmessage as outmessage
import filecmp
try:
import json as simplejson
except ImportError:
import simplejson
import bots.botslib as botslib
import bots.botsinit as botsinit
import utilsunit
try:
import cElementTree as ET
except ImportError:
try:
import elementtree.ElementTree as ET
except ImportError:
try:
from xml.etree import cElementTree as ET
except ImportError:
from xml.etree import ElementTree as ET
'''
PLUGIN: unitinmessagejson.zip
'''
class InmessageJson(unittest.TestCase):
#~ #***********************************************************************
#~ #***********test json eg list of article (as eg used in database comm *******
#~ #***********************************************************************
def testjson01(self):
filein = 'botssys/infile/unitinmessagejson/org/01.jsn'
filecomp = 'botssys/infile/unitinmessagejson/comp/01.xml'
inn1 = inmessage.edifromfile(filename=filein,editype='json',messagetype='articles')
inn2 = inmessage.edifromfile(filename=filecomp,editype='xml',messagetype='articles')
self.failUnless(utilsunit.comparenode(inn1.root,inn2.root))
def testjson01nocheck(self):
filein = 'botssys/infile/unitinmessagejson/org/01.jsn'
filecomp = 'botssys/infile/unitinmessagejson/comp/01.xml'
inn1 = inmessage.edifromfile(filename=filein,editype='jsonnocheck',messagetype='articles')
inn2 = inmessage.edifromfile(filename=filecomp,editype='xml',messagetype='articles')
self.failUnless(utilsunit.comparenode(inn1.root,inn2.root))
def testjson11(self):
filein = 'botssys/infile/unitinmessagejson/org/11.jsn'
filecomp = 'botssys/infile/unitinmessagejson/comp/01.xml'
inn1 = inmessage.edifromfile(filename=filein,editype='json',messagetype='articles')
inn2 = inmessage.edifromfile(filename=filecomp,editype='xml',messagetype='articles')
self.failUnless(utilsunit.comparenode(inn1.root,inn2.root))
def testjson11nocheck(self):
filein = 'botssys/infile/unitinmessagejson/org/11.jsn'
filecomp = 'botssys/infile/unitinmessagejson/comp/01.xml'
inn1 = inmessage.edifromfile(filename=filein,editype='jsonnocheck',messagetype='articles')
inn2 = inmessage.edifromfile(filename=filecomp,editype='xml',messagetype='articles')
self.failUnless(utilsunit.comparenode(inn1.root,inn2.root))
#***********************************************************************
#*********json incoming tests complex structure*************************
#***********************************************************************
def testjsoninvoic01(self):
filein = 'botssys/infile/unitinmessagejson/org/invoic01.jsn'
filecomp = 'botssys/infile/unitinmessagejson/comp/invoic01.xml'
inn1 = inmessage.edifromfile(filename=filein,editype='json',messagetype='invoic')
inn2 = inmessage.edifromfile(filename=filecomp,editype='xml',messagetype='invoic')
self.failUnless(utilsunit.comparenode(inn1.root,inn2.root))
def testjsoninvoic01nocheck(self):
filein = 'botssys/infile/unitinmessagejson/org/invoic01.jsn'
filecomp = 'botssys/infile/unitinmessagejson/comp/invoic01.xml'
inn1 = inmessage.edifromfile(filename=filein,editype='jsonnocheck',messagetype='invoic')
inn2 = inmessage.edifromfile(filename=filecomp,editype='xml',messagetype='invoic')
self.failUnless(utilsunit.comparenode(inn1.root,inn2.root))
def testjsoninvoic02(self):
''' check 01.xml the same after rad&write/check '''
filein = 'botssys/infile/unitinmessagejson/org/invoic02.jsn'
filecomp = 'botssys/infile/unitinmessagejson/comp/invoic01.xml'
inn1 = inmessage.edifromfile(filename=filein,editype='json',messagetype='invoic')
inn2 = inmessage.edifromfile(filename=filecomp,editype='xml',messagetype='invoic')
self.failUnless(utilsunit.comparenode(inn1.root,inn2.root))
def testjsoninvoic02nocheck(self):
''' check 01.xml the same after rad&write/check '''
filein = 'botssys/infile/unitinmessagejson/org/invoic02.jsn'
filecomp = 'botssys/infile/unitinmessagejson/comp/invoic01.xml'
inn1 = inmessage.edifromfile(filename=filein,editype='jsonnocheck',messagetype='invoic')
inn2 = inmessage.edifromfile(filename=filecomp,editype='xml',messagetype='invoic')
self.failUnless(utilsunit.comparenode(inn1.root,inn2.root))
#***********************************************************************
#*********json incoming tests int,float*********************************
#***********************************************************************
def testjsoninvoic03(self):
''' test int, float in json '''
filein = 'botssys/infile/unitinmessagejson/org/invoic03.jsn'
filecomp = 'botssys/infile/unitinmessagejson/comp/invoic02.xml'
inn1 = inmessage.edifromfile(filename=filein,editype='json',messagetype='invoic')
inn2 = inmessage.edifromfile(filename=filecomp,editype='xml',messagetype='invoic')
self.failUnless(utilsunit.comparenode(inn1.root,inn2.root))
def testjsoninvoic03xmlnocheck(self):
''' test int, float in json '''
filein = 'botssys/infile/unitinmessagejson/org/invoic03.jsn'
filecomp = 'botssys/infile/unitinmessagejson/comp/invoic02.xml'
inn1 = inmessage.edifromfile(filename=filein,editype='json',messagetype='invoic')
inn2 = inmessage.edifromfile(filename=filecomp,editype='xmlnocheck',messagetype='invoic')
self.failUnless(utilsunit.comparenode(inn1.root,inn2.root))
def testjsoninvoic03nocheck(self):
''' test int, float in json '''
filein = 'botssys/infile/unitinmessagejson/org/invoic03.jsn'
filecomp = 'botssys/infile/unitinmessagejson/comp/invoic02.xml'
inn1 = inmessage.edifromfile(filename=filein,editype='jsonnocheck',messagetype='invoic')
inn2 = inmessage.edifromfile(filename=filecomp,editype='xml',messagetype='invoic')
self.failUnless(utilsunit.comparenode(inn1.root,inn2.root))
def testjsoninvoic03nocheckxmlnocheck(self):
''' test int, float in json '''
filein = 'botssys/infile/unitinmessagejson/org/invoic03.jsn'
filecomp = 'botssys/infile/unitinmessagejson/comp/invoic02.xml'
inn1 = inmessage.edifromfile(filename=filein,editype='jsonnocheck',messagetype='invoic')
inn2 = inmessage.edifromfile(filename=filecomp,editype='xmlnocheck',messagetype='invoic')
self.failUnless(utilsunit.comparenode(inn1.root,inn2.root))
def testjsondiv(self):
self.failUnless(inmessage.edifromfile(editype='json',messagetype='testjsonorder01',checkunknownentities=True,filename='botssys/infile/unitinmessagejson/org/130101.json'), 'standaard test')
self.failUnless(inmessage.edifromfile(editype='jsonnocheck',messagetype='jsonnocheck',filename='botssys/infile/unitinmessagejson/org/130101.json'), 'standaard test')
#empty object
self.assertRaises(botslib.InMessageError,inmessage.edifromfile, editype='json',messagetype='testjsonorder01',checkunknownentities=True,filename='botssys/infile/unitinmessagejson/org/130102.json')
#unknown field
self.failUnless(inmessage.edifromfile(editype='json',messagetype='testjsonorder01',checkunknownentities=False,filename='botssys/infile/unitinmessagejson/org/130103.json'), 'unknown field')
self.failUnless(inmessage.edifromfile(editype='jsonnocheck',messagetype='jsonnocheck',filename='botssys/infile/unitinmessagejson/org/130103.json'), 'unknown field')
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='json',messagetype='testjsonorder01',checkunknownentities=True,filename='botssys/infile/unitinmessagejson/org/130103.json') #unknown field
#compare standard test with standard est with extra unknown fields and objects: must give same tree:
in1 = inmessage.edifromfile(editype='json',messagetype='testjsonorder01',checkunknownentities=True,filename='botssys/infile/unitinmessagejson/org/130101.json')
in2 = inmessage.edifromfile(editype='json',messagetype='testjsonorder01',checkunknownentities=False,filename='botssys/infile/unitinmessagejson/org/130115.json')
self.failUnless(utilsunit.comparenode(in1.root,in2.root),'compare')
#numeriek field
self.failUnless(inmessage.edifromfile(editype='json',messagetype='testjsonorder01',checkunknownentities=False,filename='botssys/infile/unitinmessagejson/org/130104.json'), 'numeriek field')
self.failUnless(inmessage.edifromfile(editype='jsonnocheck',messagetype='jsonnocheck',checkunknownentities=False,filename='botssys/infile/unitinmessagejson/org/130104.json'), 'numeriek field')
self.failUnless(inmessage.edifromfile(editype='json',messagetype='testjsonorder01',checkunknownentities=True,filename='botssys/infile/unitinmessagejson/org/130104.json'), 'numeriek field')
self.failUnless(inmessage.edifromfile(editype='json',messagetype='testjsonorder01',checkunknownentities=False,filename='botssys/infile/unitinmessagejson/org/130105.json'), 'fucked up')
self.failUnless(inmessage.edifromfile(editype='jsonnocheck',messagetype='jsonnocheck',filename='botssys/infile/unitinmessagejson/org/130105.json'), 'fucked up')
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='json',messagetype='testjsonorder01',checkunknownentities=True,filename='botssys/infile/unitinmessagejson/org/130105.json') #fucked up
self.failUnless(inmessage.edifromfile(editype='json',messagetype='testjsonorder01',checkunknownentities=False,filename='botssys/infile/unitinmessagejson/org/130106.json'), 'fucked up')
self.failUnless(inmessage.edifromfile(editype='jsonnocheck',messagetype='jsonnocheck',filename='botssys/infile/unitinmessagejson/org/130106.json'), 'fucked up')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile, editype='json',messagetype='testjsonorder01',checkunknownentities=True,filename='botssys/infile/unitinmessagejson/org/130106.json') #fucked up
self.failUnless(inmessage.edifromfile(editype='json',messagetype='testjsonorder01',checkunknownentities=False,filename='botssys/infile/unitinmessagejson/org/130107.json'), 'fucked up')
self.failUnless(inmessage.edifromfile(editype='jsonnocheck',messagetype='jsonnocheck',filename='botssys/infile/unitinmessagejson/org/130107.json'), 'fucked up')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile, editype='json',messagetype='testjsonorder01',checkunknownentities=True,filename='botssys/infile/unitinmessagejson/org/130107.json') #fucked up
#root is list with 3 messagetrees
inn = inmessage.edifromfile(editype='json',messagetype='testjsonorder01',checkunknownentities=False,filename='botssys/infile/unitinmessagejson/org/130108.json')
self.assertEqual(len(inn.root.children), 3,'should deliver 3 messagetrees')
inn = inmessage.edifromfile(editype='jsonnocheck',messagetype='jsonnocheck',filename='botssys/infile/unitinmessagejson/org/130108.json')
self.assertEqual(len(inn.root.children), 3,'should deliver 3 messagetrees')
#root is list, but list has a non-object member
self.failUnless(inmessage.edifromfile(editype='json',messagetype='testjsonorder01',checkunknownentities=False,filename='botssys/infile/unitinmessagejson/org/130109.json'), 'root is list, but list has a non-object member')
self.failUnless(inmessage.edifromfile(editype='jsonnocheck',messagetype='jsonnocheck',filename='botssys/infile/unitinmessagejson/org/130109.json'), 'root is list, but list has a non-object member')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile, editype='json',messagetype='testjsonorder01',checkunknownentities=True,filename='botssys/infile/unitinmessagejson/org/130109.json') #root is list, but list has a non-object member
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='json',messagetype='testjsonorder01',checkunknownentities=False,filename='botssys/infile/unitinmessagejson/org/130110.json') #too many occurences
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='json',messagetype='testjsonorder01',checkunknownentities=False,filename='botssys/infile/unitinmessagejson/org/130111.json') #ent TEST1 should have a TEST2
self.failUnless(inmessage.edifromfile(editype='jsonnocheck',messagetype='jsonnocheck',filename='botssys/infile/unitinmessagejson/org/130111.json'), 'ent TEST1 should have a TEST2')
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='json',messagetype='testjsonorder01',checkunknownentities=True,filename='botssys/infile/unitinmessagejson/org/130111.json') #ent TEST1 should have a TEST2
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='json',messagetype='testjsonorder01',checkunknownentities=False,filename='botssys/infile/unitinmessagejson/org/130112.json') #ent TEST1 has a TEST2
self.failUnless(inmessage.edifromfile(editype='jsonnocheck',messagetype='jsonnocheck',filename='botssys/infile/unitinmessagejson/org/130112.json'), 'ent TEST1 has a TEST2')
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='json',messagetype='testjsonorder01',checkunknownentities=True,filename='botssys/infile/unitinmessagejson/org/130112.json') #ent TEST1 has a TEST2
#unknown entries
inn = inmessage.edifromfile(editype='json',messagetype='testjsonorder01',checkunknownentities=False,filename='botssys/infile/unitinmessagejson/org/130113.json')
#empty file
self.assertRaises(ValueError,inmessage.edifromfile, editype='json',messagetype='testjsonorder01',checkunknownentities=False,filename='botssys/infile/unitinmessagejson/org/130114.json') #empty file
self.assertRaises(ValueError,inmessage.edifromfile, editype='jsonnocheck',messagetype='jsonnocheck',filename='botssys/infile/unitinmessagejson/org/130114.json') #empty file
#numeric key
self.assertRaises(ValueError,inmessage.edifromfile, editype='json',messagetype='testjsonorder01',checkunknownentities=False,filename='botssys/infile/unitinmessagejson/org/130116.json')
#key is list
self.assertRaises(ValueError,inmessage.edifromfile, editype='json',messagetype='testjsonorder01',checkunknownentities=False,filename='botssys/infile/unitinmessagejson/org/130117.json')
def testinisoutjson01(self):
filein = 'botssys/infile/unitinmessagejson/org/inisout01.json'
fileout1 = 'botssys/infile/unitinmessagejson/output/inisout01.json'
fileout3 = 'botssys/infile/unitinmessagejson/output/inisout03.json'
utilsunit.readwrite(editype='json',messagetype='jsonorder',filenamein=filein,filenameout=fileout1)
utilsunit.readwrite(editype='jsonnocheck',messagetype='jsonnocheck',filenamein=filein,filenameout=fileout3)
inn1 = inmessage.edifromfile(filename=fileout1,editype='jsonnocheck',messagetype='jsonnocheck')
inn2 = inmessage.edifromfile(filename=fileout3,editype='jsonnocheck',messagetype='jsonnocheck')
self.failUnless(utilsunit.comparenode(inn1.root,inn2.root))
def testinisoutjson02(self):
#fails. this is because list of messages is read; and these are written in one time....nice for next release...
filein = 'botssys/infile/unitinmessagejson/org/inisout05.json'
fileout = 'botssys/infile/unitinmessagejson/output/inisout05.json'
inn = inmessage.edifromfile(editype='json',messagetype='jsoninvoic',filename=filein)
out = outmessage.outmessage_init(editype='json',messagetype='jsoninvoic',filename=fileout,divtext='',topartner='') #make outmessage object
#~ inn.root.display()
out.root = inn.root
out.writeall()
inn1 = inmessage.edifromfile(filename=filein,editype='jsonnocheck',messagetype='jsonnocheck',defaultBOTSIDroot='HEA')
inn2 = inmessage.edifromfile(filename=fileout,editype='jsonnocheck',messagetype='jsonnocheck')
#~ inn1.root.display()
#~ inn2.root.display()
#~ self.failUnless(utilsunit.comparenode(inn1.root,inn2.root))
#~ rawfile1 = utilsunit.readfile(filein)
#~ rawfile2 = utilsunit.readfile(fileout)
#~ jsonobject1 = simplejson.loads(rawfile1)
#~ jsonobject2 = simplejson.loads(rawfile2)
#~ self.assertEqual(jsonobject1,jsonobject2,'CmpJson')
def testinisoutjson03(self):
''' non-ascii-char'''
filein = 'botssys/infile/unitinmessagejson/org/inisout04.json'
fileout = 'botssys/infile/unitinmessagejson/output/inisout04.json'
utilsunit.readwrite(editype='json',messagetype='jsonorder',filenamein=filein,filenameout=fileout)
inn1 = inmessage.edifromfile(filename=filein,editype='jsonnocheck',messagetype='jsonnocheck')
inn2 = inmessage.edifromfile(filename=fileout,editype='jsonnocheck',messagetype='jsonnocheck')
self.failUnless(utilsunit.comparenode(inn1.root,inn2.root))
def testinisoutjson04(self):
filein = 'botssys/infile/unitinmessagejson/org/inisout05.json'
inn1 = inmessage.edifromfile(filename=filein,editype='jsonnocheck',messagetype='jsonnocheck',defaultBOTSIDroot='HEA')
inn2 = inmessage.edifromfile(filename=filein,editype='json',messagetype='jsoninvoic')
self.failUnless(utilsunit.comparenode(inn1.root,inn2.root))
if __name__ == '__main__':
botsinit.generalinit('config')
botsinit.initenginelogging()
shutil.rmtree('bots/botssys/infile/unitinmessagejson/output/',ignore_errors=True) #remove whole output directory
os.mkdir('bots/botssys/infile/unitinmessagejson/output')
unittest.main()
| [
[
1,
0,
0.004,
0.004,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.008,
0.004,
0,
0.66,
0.0769,
88,
0,
1,
0,
0,
88,
0,
0
],
[
1,
0,
0.012,
0.004,
0,
0.66,
... | [
"import os",
"import unittest",
"import shutil",
"import bots.inmessage as inmessage",
"import bots.outmessage as outmessage",
"import filecmp",
"try:\n import json as simplejson\nexcept ImportError:\n import simplejson",
" import json as simplejson",
" import simplejson",
"import bots... |
import os
import unittest
import shutil
import bots.inmessage as inmessage
import bots.outmessage as outmessage
import filecmp
import bots.botslib as botslib
import bots.botsinit as botsinit
import utilsunit
''' pluging unitinmessagexml.zip'''
class TestInmessage(unittest.TestCase):
''' Read messages; some should be OK (True), some shoudl give errors (False).
Tets per editype.
'''
def setUp(self):
pass
def testxml(self):
#~ #empty file
self.assertRaises(SyntaxError,inmessage.edifromfile, editype='xmlnocheck',messagetype='xmlnocheck',filename='botssys/infile/unitinmessagexml/xml/110401.xml')
self.assertRaises(SyntaxError,inmessage.edifromfile, editype='xml',messagetype='testxml', filename='botssys/infile/unitinmessagexml/xml/110401.xml')
self.assertRaises(SyntaxError,inmessage.edifromfile, editype='xml',messagetype='testxml',checkunknownentities=True, filename='botssys/infile/unitinmessagexml/xml/110401.xml')
self.assertRaises(SyntaxError,inmessage.edifromfile, editype='xml',messagetype='testxmlflatten', filename='botssys/infile/unitinmessagexml/xml/110401.xml')
#only root record in 110402.xml
self.failUnless(inmessage.edifromfile(editype='xmlnocheck',messagetype='xmlnocheck',filename='botssys/infile/unitinmessagexml/xml/110402.xml'), 'only a root tag; should be OK')
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='xml',messagetype='testxml',filename='botssys/infile/unitinmessagexml/xml/110402.xml')
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='xml',messagetype='testxml',checkunknownentities=True, filename='botssys/infile/unitinmessagexml/xml/110402.xml')
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='xml',messagetype='testxmlflatten',filename='botssys/infile/unitinmessagexml/xml/110402.xml')
#root tag different from grammar
self.assertRaises(botslib.InMessageError,inmessage.edifromfile, editype='xml',messagetype='testxml',filename='botssys/infile/unitinmessagexml/xml/110406.xml')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile, editype='xml',messagetype='testxml',checkunknownentities=True, filename='botssys/infile/unitinmessagexml/xml/110406.xml')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile, editype='xml',messagetype='testxmlflatten',filename='botssys/infile/unitinmessagexml/xml/110406.xml')
#root tag is double
self.assertRaises(SyntaxError,inmessage.edifromfile, editype='xmlnocheck',messagetype='xmlnocheck',filename='botssys/infile/unitinmessagexml/xml/110407.xml')
#invalid: no closing tag
self.assertRaises(SyntaxError,inmessage.edifromfile, editype='xmlnocheck',messagetype='xmlnocheck',filename='botssys/infile/unitinmessagexml/xml/110408.xml')
#invalid: extra closing tag
self.assertRaises(SyntaxError,inmessage.edifromfile, editype='xmlnocheck',messagetype='xmlnocheck',filename='botssys/infile/unitinmessagexml/xml/110409.xml')
#invalid: mandatory xml-element missing
self.failUnless(inmessage.edifromfile(editype='xmlnocheck',messagetype='xmlnocheck',filename='botssys/infile/unitinmessagexml/xml/110410.xml'), '')
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='xml',messagetype='testxml',filename='botssys/infile/unitinmessagexml/xml/110410.xml')
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='xml',messagetype='testxml',checkunknownentities=True, filename='botssys/infile/unitinmessagexml/xml/110410.xml')
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='xml',messagetype='testxmlflatten',filename='botssys/infile/unitinmessagexml/xml/110410.xml')
#invalid: to many occurences
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='xml',messagetype='testxml',filename='botssys/infile/unitinmessagexml/xml/110411.xml')
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='xml',messagetype='testxml',checkunknownentities=True, filename='botssys/infile/unitinmessagexml/xml/110411.xml')
#invalid: missing mandatory xml attribute
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='xml',messagetype='testxml',filename='botssys/infile/unitinmessagexml/xml/110412.xml')
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='xml',messagetype='testxml',checkunknownentities=True,filename='botssys/infile/unitinmessagexml/xml/110412.xml')
#unknown xml element
self.assertRaises(botslib.MessageError,inmessage.edifromfile, editype='xml',messagetype='testxml',checkunknownentities=True,filename='botssys/infile/unitinmessagexml/xml/110413.xml')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile, editype='xml',messagetype='testxml',checkunknownentities=True,filename='botssys/infile/unitinmessagexml/xml/110414.xml')
#2x the same xml attribute
self.assertRaises(SyntaxError,inmessage.edifromfile, editype='xml',messagetype='testxml',filename='botssys/infile/unitinmessagexml/xml/110415.xml')
self.assertRaises(SyntaxError,inmessage.edifromfile, editype='xml',messagetype='testxml',checkunknownentities=True,filename='botssys/infile/unitinmessagexml/xml/110415.xml')
#messages with all max occurences, use attributes, etc
in1 = inmessage.edifromfile(editype='xml',messagetype='testxml',filename='botssys/infile/unitinmessagexml/xml/110416.xml') #all elements, attributes
#other order of xml elements; should esult in the same node tree
in1 = inmessage.edifromfile(editype='xml',messagetype='testxml',filename='botssys/infile/unitinmessagexml/xml/110417.xml') #as 18., other order of elements
in2 = inmessage.edifromfile(editype='xml',messagetype='testxml',filename='botssys/infile/unitinmessagexml/xml/110418.xml')
self.failUnless(utilsunit.comparenode(in2.root,in1.root),'compare')
#??what is tested here??
inn7= inmessage.edifromfile(editype='xml',messagetype='testxml',checkunknownentities=True,filename='botssys/infile/unitinmessagexml/xml/110405.xml') #with <?xml version="1.0" encoding="utf-8"?>
inn8= inmessage.edifromfile(editype='xml',messagetype='testxmlflatten',checkunknownentities=True,filename='botssys/infile/unitinmessagexml/xml/110405.xml') #with <?xml version="1.0" encoding="utf-8"?>
self.failUnless(utilsunit.comparenode(inn7.root,inn8.root),'compare')
#~ #test different file which should give equal results
in1= inmessage.edifromfile(editype='xmlnocheck',messagetype='xmlnocheck',filename='botssys/infile/unitinmessagexml/xml/110403.xml') #no grammar used
in5= inmessage.edifromfile(editype='xmlnocheck',messagetype='xmlnocheck',filename='botssys/infile/unitinmessagexml/xml/110404.xml') #no grammar used
in6= inmessage.edifromfile(editype='xmlnocheck',messagetype='xmlnocheck',filename='botssys/infile/unitinmessagexml/xml/110405.xml') #no grammar used
in2= inmessage.edifromfile(editype='xml',messagetype='testxml',filename='botssys/infile/unitinmessagexml/xml/110403.xml') #with <?xml version="1.0" encoding="utf-8"?>
in3= inmessage.edifromfile(editype='xml',messagetype='testxml',filename='botssys/infile/unitinmessagexml/xml/110404.xml') #without <?xml version="1.0" encoding="utf-8"?>
in4= inmessage.edifromfile(editype='xml',messagetype='testxml',filename='botssys/infile/unitinmessagexml/xml/110405.xml') #use cr/lf and whitespace for 'nice' xml
self.failUnless(utilsunit.comparenode(in2.root,in1.root),'compare')
self.failUnless(utilsunit.comparenode(in2.root,in3.root),'compare')
self.failUnless(utilsunit.comparenode(in2.root,in4.root),'compare')
self.failUnless(utilsunit.comparenode(in2.root,in5.root),'compare')
self.failUnless(utilsunit.comparenode(in2.root,in6.root),'compare')
#~ #test different file which should give equal results; flattenxml=True,
in1= inmessage.edifromfile(editype='xmlnocheck',messagetype='xmlnocheck',filename='botssys/infile/unitinmessagexml/xml/110403.xml') #no grammar used
in5= inmessage.edifromfile(editype='xmlnocheck',messagetype='xmlnocheck',filename='botssys/infile/unitinmessagexml/xml/110404.xml') #no grammar used
in6= inmessage.edifromfile(editype='xmlnocheck',messagetype='xmlnocheck',filename='botssys/infile/unitinmessagexml/xml/110405.xml') #no grammar used
in4= inmessage.edifromfile(editype='xml',messagetype='testxmlflatten',filename='botssys/infile/unitinmessagexml/xml/110405.xml') #use cr/lf and whitespace for 'nice' xml
in2= inmessage.edifromfile(editype='xml',messagetype='testxmlflatten',filename='botssys/infile/unitinmessagexml/xml/110403.xml') #with <?xml version="1.0" encoding="utf-8"?>
in3= inmessage.edifromfile(editype='xml',messagetype='testxmlflatten',filename='botssys/infile/unitinmessagexml/xml/110404.xml') #without <?xml version="1.0" encoding="utf-8"?>
self.failUnless(utilsunit.comparenode(in2.root,in1.root),'compare')
self.failUnless(utilsunit.comparenode(in2.root,in3.root),'compare')
self.failUnless(utilsunit.comparenode(in2.root,in4.root),'compare')
self.failUnless(utilsunit.comparenode(in2.root,in5.root),'compare')
self.failUnless(utilsunit.comparenode(in2.root,in6.root),'compare')
class Testinisoutxml(unittest.TestCase):
def testxml01a(self):
''' check xml; new behaviour'''
filenamein='botssys/infile/unitinmessagexml/xml/inisout02.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout01a.xml'
utilsunit.readwrite(editype='xml',messagetype='xmlorder',filenamein=filenamein,filenameout=filenameout)
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamein))
def testxml02a(self):
''' check xmlnoccheck; new behaviour'''
filenamein='botssys/infile/unitinmessagexml/xml/inisout02.xml'
filenametmp='botssys/infile/unitinmessagexml/output/inisout02tmpa.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout02a.xml'
utilsunit.readwrite(editype='xmlnocheck',messagetype='xmlnocheck',filenamein=filenamein,filenameout=filenametmp)
utilsunit.readwrite(editype='xml',messagetype='xmlorder',filenamein=filenametmp,filenameout=filenameout)
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamein))
def testxml03(self):
''' check xml (different grammar)'''
filenamein='botssys/infile/unitinmessagexml/xml/110419.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout03.xml'
utilsunit.readwrite(editype='xml',messagetype='testxmlflatten',charset='utf-8',filenamein=filenamein,filenameout=filenameout)
self.failUnless(filecmp.cmp('bots/' + filenamein,'bots/' + filenameout))
def testxml04(self):
''' check xmlnoccheck'''
filenamein='botssys/infile/unitinmessagexml/xml/110419.xml'
filenametmp='botssys/infile/unitinmessagexml/output/inisout04tmp.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout04.xml'
utilsunit.readwrite(editype='xmlnocheck',messagetype='xmlnocheck',charset='utf-8',filenamein=filenamein,filenameout=filenametmp)
utilsunit.readwrite(editype='xml',messagetype='testxmlflatten',charset='utf-8',filenamein=filenametmp,filenameout=filenameout)
self.failUnless(filecmp.cmp('bots/' + filenamein,'bots/' + filenameout))
def testxml05(self):
''' test xml; iso-8859-1'''
filenamein='botssys/infile/unitinmessagexml/xml/inisout03.xml'
filenamecmp='botssys/infile/unitinmessagexml/xml/inisoutcompare05.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout05.xml'
utilsunit.readwrite(editype='xml',messagetype='testxml',filenamein=filenamein,filenameout=filenameout,charset='ISO-8859-1')
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamecmp))
def testxml06(self):
''' test xmlnocheck; iso-8859-1'''
filenamein='botssys/infile/unitinmessagexml/xml/inisout03.xml'
filenametmp='botssys/infile/unitinmessagexml/output/inisout05tmp.xml'
filenamecmp='botssys/infile/unitinmessagexml/xml/inisoutcompare05.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout05a.xml'
utilsunit.readwrite(editype='xmlnocheck',messagetype='xmlnocheck',filenamein=filenamein,filenameout=filenametmp,charset='ISO-8859-1')
utilsunit.readwrite(editype='xml',messagetype='testxml',filenamein=filenametmp,filenameout=filenameout,charset='ISO-8859-1')
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamecmp))
def testxml09(self):
''' BOM;; BOM is not written....'''
filenamein='botssys/infile/unitinmessagexml/xml/inisout05.xml'
filenamecmp='botssys/infile/unitinmessagexml/xml/inisout04.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout09.xml'
utilsunit.readwrite(editype='xml',messagetype='testxml',filenamein=filenamein,filenameout=filenameout,charset='utf-8')
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamecmp))
def testxml10(self):
''' BOM;; BOM is not written....'''
filenamein='botssys/infile/unitinmessagexml/xml/inisout05.xml'
filenametmp='botssys/infile/unitinmessagexml/output/inisout10tmp.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout10.xml'
filenamecmp='botssys/infile/unitinmessagexml/xml/inisout04.xml'
utilsunit.readwrite(editype='xmlnocheck',messagetype='xmlnocheck',filenamein=filenamein,filenameout=filenametmp)
utilsunit.readwrite(editype='xml',messagetype='testxml',filenamein=filenametmp,filenameout=filenameout,charset='utf-8')
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamecmp))
def testxml11(self):
''' check xml; new behaviour; use standalone parameter'''
filenamein='botssys/infile/unitinmessagexml/xml/inisout06.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout11.xml'
filenamecmp='botssys/infile/unitinmessagexml/xml/inisout02.xml'
utilsunit.readwrite(editype='xml',messagetype='xmlorder',filenamein=filenamein,filenameout=filenameout,standalone=None)
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamecmp))
def testxml11a(self):
''' check xml; new behaviour; use standalone parameter'''
filenamein='botssys/infile/unitinmessagexml/xml/inisout06.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout11a.xml'
utilsunit.readwrite(editype='xml',messagetype='xmlorder',filenamein=filenamein,filenameout=filenameout,standalone='no')
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamein))
def testxml12(self):
''' check xmlnoccheck; new behaviour use standalone parameter'''
filenamein='botssys/infile/unitinmessagexml/xml/inisout06.xml'
filenametmp='botssys/infile/unitinmessagexml/output/inisout12tmp.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout12.xml'
utilsunit.readwrite(editype='xmlnocheck',messagetype='xmlnocheck',filenamein=filenamein,filenameout=filenametmp,standalone='no')
utilsunit.readwrite(editype='xml',messagetype='xmlorder',filenamein=filenametmp,filenameout=filenameout,standalone='no')
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamein))
def testxml13(self):
''' check xml; read doctype&write doctype'''
filenamein='botssys/infile/unitinmessagexml/xml/inisout13.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout13.xml'
utilsunit.readwrite(editype='xml',messagetype='xmlorder',filenamein=filenamein,filenameout=filenameout,DOCTYPE = 'mydoctype SYSTEM "mydoctype.dtd"')
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamein))
def testxml14(self):
''' check xmlnoccheck; read doctype&write doctype'''
filenamein='botssys/infile/unitinmessagexml/xml/inisout13.xml'
filenametmp='botssys/infile/unitinmessagexml/output/inisout14tmp.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout14.xml'
utilsunit.readwrite(editype='xmlnocheck',messagetype='xmlnocheck',filenamein=filenamein,filenameout=filenametmp,DOCTYPE = 'mydoctype SYSTEM "mydoctype.dtd"')
utilsunit.readwrite(editype='xml',messagetype='xmlorder',filenamein=filenametmp,filenameout=filenameout,DOCTYPE = 'mydoctype SYSTEM "mydoctype.dtd"')
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamein))
def testxml15(self):
''' check xml; read processing_instructions&write processing_instructions'''
filenamein='botssys/infile/unitinmessagexml/xml/inisout15.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout15.xml'
utilsunit.readwrite(editype='xml',messagetype='xmlorder',filenamein=filenamein,filenameout=filenameout,processing_instructions=[('xml-stylesheet' ,'href="mystylesheet.xsl" type="text/xml"'),('type-of-ppi' ,'attr1="value1" attr2="value2"')])
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamein))
def testxml16(self):
''' check xmlnoccheck; read processing_instructions&write processing_instructions'''
filenamein='botssys/infile/unitinmessagexml/xml/inisout15.xml'
filenametmp='botssys/infile/unitinmessagexml/output/inisout16tmp.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout16.xml'
utilsunit.readwrite(editype='xmlnocheck',messagetype='xmlnocheck',filenamein=filenamein,filenameout=filenametmp,processing_instructions=[('xml-stylesheet' ,'href="mystylesheet.xsl" type="text/xml"'),('type-of-ppi' ,'attr1="value1" attr2="value2"')])
utilsunit.readwrite(editype='xml',messagetype='xmlorder',filenamein=filenametmp,filenameout=filenameout,processing_instructions=[('xml-stylesheet' ,'href="mystylesheet.xsl" type="text/xml"'),('type-of-ppi' ,'attr1="value1" attr2="value2"')])
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamein))
def testxml17(self):
''' check xml; read processing_instructions&doctype&comments. Do not write these.'''
filenamein='botssys/infile/unitinmessagexml/xml/inisout17.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout17.xml'
filenamecmp='botssys/infile/unitinmessagexml/xml/inisout02.xml'
utilsunit.readwrite(editype='xml',messagetype='xmlorder',filenamein=filenamein,filenameout=filenameout)
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamecmp))
def testxml18(self):
''' check xml; read processing_instructions&doctype&comments. Do not write these.'''
filenamein='botssys/infile/unitinmessagexml/xml/inisout17.xml'
filenametmp='botssys/infile/unitinmessagexml/output/inisout18tmp.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout18.xml'
filenamecmp='botssys/infile/unitinmessagexml/xml/inisout02.xml'
utilsunit.readwrite(editype='xmlnocheck',messagetype='xmlnocheck',filenamein=filenamein,filenameout=filenametmp)
utilsunit.readwrite(editype='xml',messagetype='xmlorder',filenamein=filenametmp,filenameout=filenameout)
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamecmp))
def testxml19(self):
''' check xml; indented; use lot of options.'''
filenamein='botssys/infile/unitinmessagexml/xml/inisout02.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout19.xml'
filenamecmp='botssys/infile/unitinmessagexml/xml/inisout19.xml'
utilsunit.readwrite(editype='xml',messagetype='xmlorder',filenamein=filenamein,filenameout=filenameout,indented=True,standalone='yes',DOCTYPE = 'mydoctype SYSTEM "mydoctype.dtd"',processing_instructions=[('xml-stylesheet' ,'href="mystylesheet.xsl" type="text/xml"'),('type-of-ppi' ,'attr1="value1" attr2="value2"')])
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamecmp))
def testxml20(self):
''' check xml; indented; use lot of options.'''
filenamein='botssys/infile/unitinmessagexml/xml/inisout02.xml'
filenametmp='botssys/infile/unitinmessagexml/output/inisout20tmp.xml'
filenameout='botssys/infile/unitinmessagexml/output/inisout20.xml'
filenamecmp='botssys/infile/unitinmessagexml/xml/inisout19.xml'
utilsunit.readwrite(editype='xmlnocheck',messagetype='xmlnocheck',filenamein=filenamein,filenameout=filenametmp,indented=True,standalone='yes',DOCTYPE = 'mydoctype SYSTEM "mydoctype.dtd"',processing_instructions=[('xml-stylesheet' ,'href="mystylesheet.xsl" type="text/xml"'),('type-of-ppi' ,'attr1="value1" attr2="value2"')])
utilsunit.readwrite(editype='xml',messagetype='xmlorder',filenamein=filenametmp,filenameout=filenameout,indented=True,standalone='yes',DOCTYPE = 'mydoctype SYSTEM "mydoctype.dtd"',processing_instructions=[('xml-stylesheet' ,'href="mystylesheet.xsl" type="text/xml"'),('type-of-ppi' ,'attr1="value1" attr2="value2"')])
self.failUnless(filecmp.cmp('bots/' + filenameout,'bots/' + filenamecmp))
if __name__ == '__main__':
botsinit.generalinit('config')
#~ botslib.initbotscharsets()
botsinit.initenginelogging()
shutil.rmtree('bots/botssys/infile/unitinmessagexml/output',ignore_errors=True) #remove whole output directory
os.mkdir('bots/botssys/infile/unitinmessagexml/output')
unittest.main()
| [
[
1,
0,
0.0036,
0.0036,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0073,
0.0036,
0,
0.66,
0.0833,
88,
0,
1,
0,
0,
88,
0,
0
],
[
1,
0,
0.0109,
0.0036,
0,
0.... | [
"import os",
"import unittest",
"import shutil",
"import bots.inmessage as inmessage",
"import bots.outmessage as outmessage",
"import filecmp",
"import bots.botslib as botslib",
"import bots.botsinit as botsinit",
"import utilsunit",
"''' pluging unitinmessagexml.zip'''",
"class TestInmessage(u... |
import os
import unittest
import shutil
import filecmp
import bots.inmessage as inmessage
import bots.outmessage as outmessage
import bots.botslib as botslib
import bots.botsinit as botsinit
'''plugin unitnode.zip'''
class Testnode(unittest.TestCase):
''' test node.py and message.py.
'''
def testedifact01(self):
inn = inmessage.edifromfile(editype='edifact',messagetype='invoicwithenvelope',filename='botssys/infile/unitnode/nodetest01.edi')
out = outmessage.outmessage_init(editype='edifact',messagetype='invoicwithenvelope',filename='botssys/infile/unitnode/output/inisout03.edi',divtext='',topartner='') #make outmessage object
out.root = inn.root
#* getloop **************************************
count = 0
for t in inn.getloop({'BOTSID':'XXX'}):
count += 1
self.assertEqual(count,0,'Cmplines')
count = 0
for t in inn.getloop({'BOTSID':'UNB'}):
count += 1
self.assertEqual(count,2,'Cmplines')
count = 0
for t in out.getloop({'BOTSID':'UNB'},{'BOTSID':'XXX'}):
count += 1
self.assertEqual(count,0,'Cmplines')
count = 0
for t in out.getloop({'BOTSID':'UNB'},{'BOTSID':'UNH'}):
count += 1
self.assertEqual(count,3,'Cmplines')
count = 0
for t in inn.getloop({'BOTSID':'UNB'},{'BOTSID':'UNH'},{'BOTSID':'XXX'}):
count += 1
self.assertEqual(count,0,'Cmplines')
count = 0
for t in inn.getloop({'BOTSID':'UNB'},{'BOTSID':'UNH'},{'BOTSID':'LIN'}):
count += 1
self.assertEqual(count,6,'Cmplines')
count = 0
for t in inn.getloop({'BOTSID':'UNB'},{'BOTSID':'XXX'},{'BOTSID':'LIN'},{'BOTSID':'QTY'}):
count += 1
self.assertEqual(count,0,'Cmplines')
count = 0
for t in inn.getloop({'BOTSID':'UNB'},{'BOTSID':'UNH'},{'BOTSID':'LIN'},{'BOTSID':'XXX'}):
count += 1
self.assertEqual(count,0,'Cmplines')
count = 0
for t in inn.getloop({'BOTSID':'UNB'},{'BOTSID':'UNH'},{'BOTSID':'LIN'},{'BOTSID':'QTY'}):
count += 1
self.assertEqual(count,6,'Cmplines')
#* getcount, getcountmpath **************************************
count = 0
countlist=[5,0,1]
nrsegmentslist=[132,10,12]
for t in out.getloop({'BOTSID':'UNB'},{'BOTSID':'UNH'}):
count2 = 0
for u in t.getloop({'BOTSID':'UNH'},{'BOTSID':'LIN'}):
count2 += 1
count3 = t.getcountoccurrences({'BOTSID':'UNH'},{'BOTSID':'LIN'})
self.assertEqual(t.getcount(),nrsegmentslist[count],'Cmplines')
self.assertEqual(count2,countlist[count],'Cmplines')
self.assertEqual(count3,countlist[count],'Cmplines')
count += 1
self.assertEqual(out.getcountoccurrences({'BOTSID':'UNB'},{'BOTSID':'UNH'}),count,'Cmplines')
self.assertEqual(out.getcount(),sum(nrsegmentslist,4),'Cmplines')
#* get, getnozero, countmpath, sort**************************************
for t in out.getloop({'BOTSID':'UNB'},{'BOTSID':'UNH'}):
self.assertRaises(botslib.MappingRootError,out.get,())
self.assertRaises(botslib.MappingRootError,out.getnozero,())
self.assertRaises(botslib.MappingRootError,out.get,0)
self.assertRaises(botslib.MappingRootError,out.getnozero,0)
t.sort({'BOTSID':'UNH'},{'BOTSID':'LIN','C212.7140':None})
start='0'
for u in t.getloop({'BOTSID':'UNH'},{'BOTSID':'LIN'}):
nextstart = u.get({'BOTSID':'LIN','C212.7140':None})
self.failUnless(start<nextstart)
start = nextstart
t.sort({'BOTSID':'UNH'},{'BOTSID':'LIN','1082':None})
start='0'
for u in t.getloop({'BOTSID':'UNH'},{'BOTSID':'LIN'}):
nextstart = u.get({'BOTSID':'LIN','1082':None})
self.failUnless(start<nextstart)
start = nextstart
self.assertRaises(botslib.MappingRootError,out.get,())
self.assertRaises(botslib.MappingRootError,out.getnozero,())
#~ # self.assertRaises(botslib.MpathError,out.get,())
first=True
for t in out.getloop({'BOTSID':'UNB'}):
if first:
self.assertEqual('15',t.getcountsum({'BOTSID':'UNB'},{'BOTSID':'UNH'},{'BOTSID':'LIN','1082':None}),'Cmplines')
self.assertEqual('8',t.getcountsum({'BOTSID':'UNB'},{'BOTSID':'UNH'},{'BOTSID':'LIN'},{'BOTSID':'QTY','C186.6063':'47','C186.6060':None}),'Cmplines')
self.assertEqual('0',t.getcountsum({'BOTSID':'UNB'},{'BOTSID':'UNH'},{'BOTSID':'LIN'},{'BOTSID':'QTY','C186.6063':'12','C186.6060':None}),'Cmplines')
self.assertEqual('54.4',t.getcountsum({'BOTSID':'UNB'},{'BOTSID':'UNH'},{'BOTSID':'LIN'},{'BOTSID':'MOA','C516.5025':'203','C516.5004':None}),'Cmplines')
first = False
else:
self.assertEqual('1',t.getcountsum({'BOTSID':'UNB'},{'BOTSID':'UNH'},{'BOTSID':'LIN'},{'BOTSID':'QTY','C186.6063':'47','C186.6060':None}),'Cmplines')
self.assertEqual('0',t.getcountsum({'BOTSID':'UNB'},{'BOTSID':'UNH'},{'BOTSID':'LIN'},{'BOTSID':'QTY','C186.6063':'12','C186.6060':None}),'Cmplines')
self.assertEqual('0',t.getcountsum({'BOTSID':'UNB'},{'BOTSID':'UNH'},{'BOTSID':'LIN'},{'BOTSID':'MOA','C516.5025':'203','C516.5004':None}),'Cmplines')
def testedifact02(self):
#~ #display query correct? incluuding propagating 'down the tree'?
inn = inmessage.edifromfile(editype='edifact',messagetype='invoicwithenvelopetestquery',filename='botssys/infile/unitnode/nodetest01.edi')
inn.root.processqueries({},2)
inn.root.displayqueries()
if __name__ == '__main__':
import datetime
botsinit.generalinit('config')
botsinit.initenginelogging()
shutil.rmtree('bots/botssys/infile/unitnode/output',ignore_errors=True) #remove whole output directory
os.mkdir('bots/botssys/infile/unitnode/output')
unittest.main()
unittest.main()
unittest.main()
| [
[
1,
0,
0.0075,
0.0075,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.015,
0.0075,
0,
0.66,
0.1,
88,
0,
1,
0,
0,
88,
0,
0
],
[
1,
0,
0.0226,
0.0075,
0,
0.66,
... | [
"import os",
"import unittest",
"import shutil",
"import filecmp",
"import bots.inmessage as inmessage",
"import bots.outmessage as outmessage",
"import bots.botslib as botslib",
"import bots.botsinit as botsinit",
"'''plugin unitnode.zip'''",
"class Testnode(unittest.TestCase):\n ''' test node... |
#!/usr/bin/env python
if __name__ == '__main__':
import cProfile
cProfile.run('from bots import engine; engine.start()','profile.tmp')
import pstats
p = pstats.Stats('profile.tmp')
#~ p.sort_stats('cumulative').print_stats(25)
p.sort_stats('time').print_stats(25)
#~ p.print_callees('deepcopy').print_stats(1)
p.print_callees('mydeepcopy')
#~ p.sort_stats('time').print_stats('grammar.py',50)
| [
[
4,
0,
0.5,
0.6429,
0,
0.66,
0,
0,
0,
0,
0,
0,
0,
0,
5
],
[
1,
1,
0.2857,
0.0714,
1,
0.01,
0,
686,
0,
1,
0,
0,
686,
0,
0
],
[
8,
1,
0.3571,
0.0714,
1,
0.01,
0.... | [
"if __name__ == '__main__':\n import cProfile\n cProfile.run('from bots import engine; engine.start()','profile.tmp')\n import pstats\n p = pstats.Stats('profile.tmp')\n #~ p.sort_stats('cumulative').print_stats(25)\n p.sort_stats('time').print_stats(25)\n #~ p.print_callees('deepcopy').print_s... |
import unittest
import bots.inmessage as inmessage
import bots.botslib as botslib
import bots.botsinit as botsinit
import utilsunit
'''plugin unitinmessageedifact.zip'''
class TestInmessage(unittest.TestCase):
def testEdifact0401(self):
''' 0401 Errors in records'''
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0401/040101.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0401/040102.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0401/040103.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0401/040104.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0401/040105.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0401/040106.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0401/040107.edi')
def testedifact0403(self):
#~ #test charsets
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0403/040301.edi') #UNOA-regular OK for UNOA as UNOC
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0403/040302F-generated.edi') #UNOA-regular OK for UNOA as UNOC
in0= inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0403/040303.edi') #UNOA-regular also UNOA-strict
in1= inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0403/040306.edi') #UNOA regular
in2= inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0403/T0000000005.edi') #UNOA regular
in3= inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0403/T0000000006.edi') #UNOA regular
for in1node,in2node,in3node in zip(in1.nextmessage(),in2.nextmessage(),in3.nextmessage()):
self.failUnless(utilsunit.comparenode(in1node.root,in2node.root),'compare')
self.failUnless(utilsunit.comparenode(in1node.root,in3node.root),'compare')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0403/040305.edi') #needs UNOA regular
#~ in1= inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0403/040305.edi') #needs UNOA extended
in7= inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0403/040304.edi') #UNOB-regular
in5= inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0403/T0000000008.edi') #UNOB regular
in4= inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0403/T0000000007-generated.edi') #UNOB regular
in6= inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0403/T0000000009.edi') #UNOC
def testedifact0404(self):
#envelope tests
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040401.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040402.edi')
self.failUnless(inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040403.edi'), 'standaard test')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040404.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040405.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040406.edi')
#self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040407.edi') #syntax version '0'; is not checked anymore
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040408.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040409.edi')
self.failUnless(inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040410.edi'), 'standaard test')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040411.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040412.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040413.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040414.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040415.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040416.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040417.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0404/040418.edi')
def testedifact0407(self):
#lex test with characters in strange places
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0407/040701.edi')
self.failUnless(inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0407/040702.edi'), 'standaard test')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0407/040703.edi')
self.assertRaises(botslib.InMessageError,inmessage.edifromfile,editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0407/040704.edi')
self.failUnless(inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0407/040705.edi'), 'standaard test')
self.failUnless(inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0407/040706.edi'), 'UNOA Crtl-Z at end')
self.failUnless(inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0407/040707.edi'), 'UNOB Crtl-Z at end')
self.failUnless(inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0407/040708.edi'), 'UNOC Crtl-Z at end')
def testedifact0408(self):
#differentenvelopingsamecontent: 1rst UNH per UNB, 2nd has 2 UNB for all UNH's, 3rd has UNG-UNE
in1= inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0408/040801.edi')
in2= inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0408/040802.edi')
in3= inmessage.edifromfile(editype='edifact',messagetype='edifact',filename='botssys/infile/unitinmessageedifact/0408/040803.edi')
for in1node,in2node,in3node in zip(in1.nextmessage(),in2.nextmessage(),in3.nextmessage()):
self.failUnless(utilsunit.comparenode(in1node.root,in2node.root),'compare')
self.failUnless(utilsunit.comparenode(in1node.root,in3node.root),'compare')
if __name__ == '__main__':
botsinit.generalinit('config')
botsinit.initenginelogging()
unittest.main()
| [
[
1,
0,
0.0115,
0.0115,
0,
0.66,
0,
88,
0,
1,
0,
0,
88,
0,
0
],
[
1,
0,
0.023,
0.0115,
0,
0.66,
0.1429,
855,
0,
1,
0,
0,
855,
0,
0
],
[
1,
0,
0.0345,
0.0115,
0,
0.6... | [
"import unittest",
"import bots.inmessage as inmessage",
"import bots.botslib as botslib",
"import bots.botsinit as botsinit",
"import utilsunit",
"'''plugin unitinmessageedifact.zip'''",
"class TestInmessage(unittest.TestCase):\n def testEdifact0401(self):\n ''' 0401\tErrors in records'''\n ... |
import filecmp
import glob
import shutil
import os
import sys
import subprocess
import logging
import logging
import bots.botslib as botslib
import bots.botsinit as botsinit
import bots.botsglobal as botsglobal
from bots.botsconfig import *
'''plugin unitretry.zip'''
#!!!activate rotues
''' input: mime (complex structure); 2 different edi attachments, and ' tekst' attachemnt
some user scripts are written in this unit test; so one runs errors will occur; write user script which prevents error in next run
before running: delete all transactions.
runs OK if no errors in unit tests
'''
def dummylogger():
botsglobal.logger = logging.getLogger('dummy')
botsglobal.logger.setLevel(logging.ERROR)
botsglobal.logger.addHandler(logging.StreamHandler(sys.stdout))
def getlastreport():
for row in botslib.query(u'''SELECT *
FROM report
ORDER BY idta DESC
'''):
#~ print row
return row
def mycompare(dict1,dict2):
for key,value in dict1.items():
if value != dict2[key]:
raise Exception('error comparing "%s": should be %s but is %s (in db),'%(key,value,dict2[key]))
def scriptwrite(path,content):
f = open(path,'w')
f.write(content)
f.close()
if __name__ == '__main__':
pythoninterpreter = 'python2.7'
newcommand = [pythoninterpreter,'bots-engine.py',]
retrycommand = [pythoninterpreter,'bots-engine.py','--retry']
botsinit.generalinit('config')
botssys = botsglobal.ini.get('directories','botssys')
usersys = botsglobal.ini.get('directories','usersysabs')
dummylogger()
botsinit.connect()
try:
os.remove(os.path.join(usersys,'mappings','edifact','unitretry_2.py'))
os.remove(os.path.join(usersys,'mappings','edifact','unitretry_2.pyc'))
except:
pass
scriptwrite(os.path.join(usersys,'mappings','edifact','unitretry_2.py'),
'''
import bots.transform as transform
def main(inn,out):
raise Exception('test mapping')
transform.inn2out(inn,out)''')
#~ os.remove(os.path.join(usersys,'communicationscripts','unitretry_mime1_in.py'))
#~ os.remove(os.path.join(usersys,'communicationscripts','unitretry_mime1_in.pyc'))
scriptwrite(os.path.join(usersys,'communicationscripts','unitretry_mime1_in.py'),
'''
def accept_incoming_attachment(channeldict,ta,charset,content,contenttype):
if not content.startswith('UNB'):
raise Exception('test')
return True
''')
#
#new; error in mime-handling
subprocess.call(newcommand)
mycompare({'status':1,'lastreceived':1,'lasterror':1,'lastdone':0,'send':0},getlastreport())
#retry: again same error
subprocess.call(retrycommand)
mycompare({'status':1,'lastreceived':1,'lasterror':1,'lastdone':0,'send':0},getlastreport())
#
os.remove(os.path.join(usersys,'communicationscripts','unitretry_mime1_in.py'))
os.remove(os.path.join(usersys,'communicationscripts','unitretry_mime1_in.pyc'))
scriptwrite(os.path.join(usersys,'communicationscripts','unitretry_mime1_in.py'),
'''
def accept_incoming_attachment(channeldict,ta,charset,content,contenttype):
if not content.startswith('UNB'):
return False
return True
''')
#retry: mime is OK< but mapping error will occur
subprocess.call(retrycommand)
mycompare({'status':1,'lastreceived':1,'lasterror':1,'lastdone':0,'send':1},getlastreport())
#retry: mime is OK, same mapping error
subprocess.call(retrycommand)
mycompare({'status':1,'lastreceived':1,'lasterror':1,'lastdone':0,'send':0},getlastreport())
os.remove(os.path.join(usersys,'mappings','edifact','unitretry_2.py'))
os.remove(os.path.join(usersys,'mappings','edifact','unitretry_2.pyc'))
scriptwrite(os.path.join(usersys,'mappings','edifact','unitretry_2.py'),
'''
import bots.transform as transform
def main(inn,out):
transform.inn2out(inn,out)''')
#retry: whole translation is OK
subprocess.call(retrycommand)
mycompare({'status':0,'lastreceived':1,'lasterror':0,'lastdone':1,'send':2},getlastreport())
#new; whole transaltion is OK
subprocess.call(newcommand)
mycompare({'status':0,'lastreceived':1,'lasterror':0,'lastdone':1,'send':2},getlastreport())
logging.shutdown()
botsglobal.db.close | [
[
1,
0,
0.0081,
0.0081,
0,
0.66,
0,
891,
0,
1,
0,
0,
891,
0,
0
],
[
1,
0,
0.0161,
0.0081,
0,
0.66,
0.0556,
958,
0,
1,
0,
0,
958,
0,
0
],
[
1,
0,
0.0242,
0.0081,
0,
... | [
"import filecmp",
"import glob",
"import shutil",
"import os",
"import sys",
"import subprocess",
"import logging",
"import logging",
"import bots.botslib as botslib",
"import bots.botsinit as botsinit",
"import bots.botsglobal as botsglobal",
"from bots.botsconfig import *",
"'''plugin unit... |
import unittest
import bots.botslib as botslib
import bots.botsinit as botsinit
import bots.grammar as grammar
import bots.inmessage as inmessage
import bots.outmessage as outmessage
import utilsunit
''' plugin unitgrammar.zip '''
class TestGrammar(unittest.TestCase):
def testgeneralgrammarerrors(self):
self.assertRaises(botslib.GrammarError,grammar.grammarread,'flup','edifact') #not eexisting editype
self.assertRaises(botslib.GrammarError,grammar.syntaxread,'grammars','flup','edifact') #not eexisting editype
self.assertRaises(ImportError,grammar.grammarread,'edifact','flup') #not existing messagetype
self.assertRaises(ImportError,grammar.syntaxread,'grammars','edifact','flup') #not existing messagetype
self.assertRaises(botslib.GrammarError,grammar.grammarread,'test','test3') #no structure
self.assertRaises(ImportError,grammar.grammarread,'test','test4') #No tabel - Reference to not-existing tabel
self.assertRaises(botslib.ScriptImportError,grammar.grammarread,'test','test5') #Error in tabel: structure is not valid python list (syntax-error)
self.assertRaises(botslib.GrammarError,grammar.grammarread,'test','test6') #Error in tabel: record in structure not in recorddefs
self.assertRaises(ImportError,grammar.grammarread,'edifact','test7') #error in syntax
self.assertRaises(ImportError,grammar.syntaxread,'grammars','edifact','test7') #error in syntax
def testgramfieldedifact_and_general(self):
tabel = grammar.grammarread('edifact','edifact')
gramfield = tabel._checkfield
#edifact formats to bots formats
field = ['S001.0001','M', 1,'A']
fieldresult = ['S001.0001','M', 1,'A',True,0,0,'A']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', 4,'N']
fieldresult = ['S001.0001','M', 4,'N',True,0,0,'R']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', 4,'AN']
fieldresult = ['S001.0001','M', 4,'AN',True,0,0,'A']
gramfield(field,'')
self.assertEqual(field,fieldresult)
#min&max length
field = ['S001.0001','M', (2,4),'AN']
fieldresult = ['S001.0001','M', 4,'AN',True,0,2,'A']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', (0,4),'AN']
fieldresult = ['S001.0001','M', 4,'AN',True,0,0,'A']
gramfield(field,'')
self.assertEqual(field,fieldresult)
#decimals
field = ['S001.0001','M', 3.2,'N']
fieldresult = ['S001.0001','M', 3,'N',True,2,0,'R']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', (4,4.3),'N']
fieldresult = ['S001.0001','M', 4,'N',True,3,4,'R']
gramfield(field,'')
self.assertEqual(field,fieldresult),
field = ['S001.0001','M', (3.2,4.2),'N']
fieldresult = ['S001.0001','M', 4,'N',True,2,3,'R']
gramfield(field,'')
self.assertEqual(field,fieldresult),
#++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
#test all types of fields (I,R,N,A,D,T); tests not needed repeat for other editypes
#check field itself
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',4],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',4,'','M', 4,'','M'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',4,'','M', 4,''],'')
#check ID
self.assertRaises(botslib.GrammarError,gramfield,['','M', 4,'A'],'')
self.assertRaises(botslib.GrammarError,gramfield,[None,'M', 4,'A'],'')
#check M/C
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','A',4,'I'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','',4,'I'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001',[],4,'I'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','MC',4,'I'],'')
#check format
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',4,'I'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',4,'N7'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',4,''],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',4,5],'')
#check length
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M','N','N'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',0,'N'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',-2,'N'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',-3.2,'N'],'')
#length for formats without float
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',2.1,'A'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',(2.1,3),'A'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',(2,3.2),'A'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',(3,2),'A'],'')
#length for formats with float
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',1.1,'N'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',('A',5),'N'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',(-1,1),'N'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',(5,None),'N'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',(0,1.1),'N'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',(0,0),'N'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',(2,1),'N'],'')
def testgramfieldx12(self):
tabel = grammar.grammarread('x12','x12')
gramfield = tabel._checkfield
#x12 formats to bots formats
field = ['S001.0001','M', 1,'AN']
fieldresult = ['S001.0001','M', 1,'AN',True,0,0,'A']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', 4,'DT']
fieldresult = ['S001.0001','M', 4,'DT',True,0,0,'D']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', 4,'TM']
fieldresult = ['S001.0001','M', 4,'TM',True,0,0,'T']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', 4,'B']
fieldresult = ['S001.0001','M', 4,'B',True,0,0,'A']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', 4,'ID']
fieldresult = ['S001.0001','M', 4,'ID',True,0,0,'A']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', 4,'R']
fieldresult = ['S001.0001','M', 4,'R',True,0,0,'R']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', 4,'N']
fieldresult = ['S001.0001','M', 4,'N',True,0,0,'I']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', 4,'N0']
fieldresult = ['S001.0001','M', 4,'N0',True,0,0,'I']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', 4,'N3']
fieldresult = ['S001.0001','M', 4,'N3',True,3,0,'I']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', 4,'N9']
fieldresult = ['S001.0001','M', 4,'N9',True,9,0,'I']
gramfield(field,'')
self.assertEqual(field,fieldresult)
#decimals
field = ['S001.0001','M', 3,'R']
fieldresult = ['S001.0001','M', 3,'R',True,0,0,'R']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M',4.3,'R']
fieldresult = ['S001.0001','M', 4,'R',True,3,0,'R']
gramfield(field,'')
self.assertEqual(field,fieldresult)
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',4,'D'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',4.3,'I'],'')
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',4.3,'NO'],'')
def testgramfieldfixed(self):
tabel = grammar.grammarread('fixed','invoicfixed')
gramfield = tabel._checkfield
#fixed formats to bots formats
field = ['S001.0001','M', 1,'A']
fieldresult = ['S001.0001','M', 1,'A',True,0,1,'A']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', 4,'D']
fieldresult = ['S001.0001','M', 4,'D',True,0,4,'D']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', 4,'T']
fieldresult = ['S001.0001','M', 4,'T',True,0,4,'T']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', 4,'R']
fieldresult = ['S001.0001','M', 4,'R',True,0,4,'R']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', 4.3,'N']
fieldresult = ['S001.0001','M', 4,'N',True,3,4,'N']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M', 4.3,'I']
fieldresult = ['S001.0001','M', 4,'I',True,3,4,'I']
gramfield(field,'')
self.assertEqual(field,fieldresult)
field = ['S001.0001','M',4.3,'R']
fieldresult = ['S001.0001','M', 4,'R',True,3,4,'R']
gramfield(field,'')
self.assertEqual(field,fieldresult)
self.assertRaises(botslib.GrammarError,gramfield,['S001.0001','M',4,'B'],'')
if __name__ == '__main__':
botsinit.generalinit('config')
#~ botslib.initbotscharsets()
botsinit.initenginelogging()
unittest.main()
| [
[
1,
0,
0.0044,
0.0044,
0,
0.66,
0,
88,
0,
1,
0,
0,
88,
0,
0
],
[
1,
0,
0.0088,
0.0044,
0,
0.66,
0.1111,
816,
0,
1,
0,
0,
816,
0,
0
],
[
1,
0,
0.0132,
0.0044,
0,
0.... | [
"import unittest",
"import bots.botslib as botslib",
"import bots.botsinit as botsinit",
"import bots.grammar as grammar",
"import bots.inmessage as inmessage",
"import bots.outmessage as outmessage",
"import utilsunit",
"''' plugin unitgrammar.zip '''",
"class TestGrammar(unittest.TestCase):\n\n ... |
#!/usr/bin/env python
from bots import engine
if __name__ == '__main__':
engine.start()
| [
[
1,
0,
0.4,
0.2,
0,
0.66,
0,
261,
0,
1,
0,
0,
261,
0,
0
],
[
4,
0,
0.9,
0.4,
0,
0.66,
1,
0,
0,
0,
0,
0,
0,
0,
1
],
[
8,
1,
1,
0.2,
1,
0.21,
0,
511,
3,
... | [
"from bots import engine",
"if __name__ == '__main__':\n engine.start()",
" engine.start()"
] |
import sys
import os
import tarfile
import glob
import shutil
import subprocess
import traceback
import bots.botsglobal as botsglobal
def join(path,*paths):
return os.path.normpath(os.path.join(path,*paths))
#******************************************************************************
#*** start *********************************************
#******************************************************************************
def start():
print 'Installation of bots open source edi translator.'
#python version dependencies
version = str(sys.version_info[0]) + str(sys.version_info[1])
if version == '25':
pass
elif version == '26':
pass
elif version == '27':
pass
else:
raise Exception('Wrong python version, use python 2.5.*, 2.6.* or 2.7.*')
botsdir = os.path.dirname(botsglobal.__file__)
print ' Installed bots in "%s".'%(botsdir)
#******************************************************************************
#*** shortcuts *******************************************************
#******************************************************************************
scriptpath = join(sys.prefix,'Scripts')
shortcutdir = join(get_special_folder_path('CSIDL_COMMON_PROGRAMS'),'Bots2.1')
try:
os.mkdir(shortcutdir)
except:
pass
else:
directory_created(shortcutdir)
try:
#~ create_shortcut(join(scriptpath,'botswebserver'),'Bots open source EDI translator',join(shortcutdir,'Bots-webserver.lnk'))
create_shortcut(join(sys.prefix,'python'),'bots open source edi translator',join(shortcutdir,'bots-webserver.lnk'),join(scriptpath,'bots-webserver.py'))
file_created(join(shortcutdir,'bots-webserver.lnk'))
except:
print ' Failed to install shortcut/link for bots in your menu.'
else:
print ' Installed shortcut in "Program Files".'
#******************************************************************************
#*** install libraries, dependencies ***************************************
#******************************************************************************
for library in glob.glob(join(botsdir,'installwin','*.gz')):
tar = tarfile.open(library)
tar.extractall(path=os.path.dirname(library))
tar.close()
untar_dir = library[:-len('.tar.gz')]
subprocess.call([join(sys.prefix,'pythonw'), 'setup.py','install'],cwd=untar_dir,stdin=open(os.devnull,'r'),stdout=open(os.devnull,'w'),stderr=open(os.devnull,'w'))
shutil.rmtree(untar_dir, ignore_errors=True)
print ' Installed needed libraries.'
#******************************************************************************
#*** install configuration files **************************************
#******************************************************************************
if os.path.exists(join(botsdir,'config','settings.py')): #use this to see if this is an existing installation
print ' Found existing configuration files'
print ' Configuration files bots.ini and settings.py not overwritten.'
print ' Manual action is needed.'
print ' See bots web site-documentation-migrate for more info.'
else:
shutil.copy(join(botsdir,'install','bots.ini'),join(botsdir,'config','bots.ini'))
shutil.copy(join(botsdir,'install','settings.py'),join(botsdir,'config','settings.py'))
print ' Installed configuration files'
#******************************************************************************
#*** install database; upgrade existing db *********************************
#******************************************************************************
sqlitedir = join(botsdir,'botssys','sqlitedb')
if os.path.exists(join(sqlitedir,'botsdb')): #use this to see if this is an existing installation
print ' Found existing database file botssys/sqlitedb/botsdb'
print ' Manual action is needed - there is a tool/program to update the database.'
print ' See bots web site-documentation-migrate for more info.'
else:
if not os.path.exists(sqlitedir): #use this to see if this is an existing installation
os.makedirs(sqlitedir)
shutil.copy(join(botsdir,'install','botsdb'),join(sqlitedir,'botsdb'))
print ' Installed SQLite database'
#******************************************************************************
#******************************************************************************
if __name__=='__main__':
if len(sys.argv)>1 and sys.argv[1]=='-install':
try:
start()
except:
print traceback.format_exc(0)
print
print 'Bots installation failed.'
else:
print
print 'Bots installation succeeded.'
| [
[
1,
0,
0.0323,
0.0323,
0,
0.66,
0,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0645,
0.0323,
0,
0.66,
0.1111,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0968,
0.0323,
0,
... | [
"import sys",
"import os",
"import tarfile",
"import glob",
"import shutil",
"import subprocess",
"import traceback",
"import bots.botsglobal as botsglobal",
"def join(path,*paths):\n return os.path.normpath(os.path.join(path,*paths))",
" return os.path.normpath(os.path.join(path,*paths))",
... |
import os
import sys
from distutils.core import setup
def fullsplit(path, result=None):
"""
Split a pathname into components (the opposite of os.path.join) in a
platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
for dirpath, dirnames, filenames in os.walk('bots'):
# Ignore dirnames that start with '.'
#~ for i, dirname in enumerate(dirnames):
#~ if dirname.startswith('.'): del dirnames[i]
if '__init__.py' in filenames:
packages.append('.'.join(fullsplit(dirpath)))
data_files.append([dirpath, [os.path.join(dirpath, f) for f in filenames if not f.endswith('.pyc')]])
elif filenames:
data_files.append([dirpath, [os.path.join(dirpath, f) for f in filenames if not f.endswith('.pyc')]])
#~ # Small hack for working with bdist_wininst.
#~ # See http://mail.python.org/pipermail/distutils-sig/2004-August/004134.html
#~ if len(sys.argv) > 1 and 'bdist_wininst' in sys.argv[1:]:
if len(sys.argv) > 1 and 'bdist_wininst' in sys.argv[1:]:
for file_info in data_files:
file_info[0] = '\\PURELIB\\%s' % file_info[0]
setup(
name="bots",
version="2.1.0",
author = "hjebbers",
author_email = "hjebbers@gmail.com",
url = "http://bots.sourceforge.net/",
description="Bots open source edi translator",
long_description = "Bots is complete software for edi (Electronic Data Interchange): translate and communicate. All major edi data formats are supported: edifact, x12, tradacoms, xml",
platforms="OS Independent (Written in an interpreted language)",
license="GNU General Public License (GPL)",
classifiers = [
'Development Status :: 5 - Production/Stable',
'Operating System :: OS Independent',
'Programming Language :: Python',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Topic :: Office/Business',
'Topic :: Office/Business :: Financial',
'Topic :: Other/Nonlisted Topic',
'Topic :: Communications',
'Environment :: Console',
'Environment :: Web Environment',
],
scripts = [ 'bots-webserver.py',
'bots-engine.py',
'postinstallation.py',
'bots-grammarcheck.py',
'bots-xml2botsgrammar.py',
#~ 'bots/bots-updatedb.py',
],
packages = packages,
data_files = data_files,
)
| [
[
1,
0,
0.0122,
0.0122,
0,
0.66,
0,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0244,
0.0122,
0,
0.66,
0.1111,
509,
0,
1,
0,
0,
509,
0,
0
],
[
1,
0,
0.0366,
0.0122,
0,
... | [
"import os",
"import sys",
"from distutils.core import setup",
"def fullsplit(path, result=None):\n \"\"\"\n Split a pathname into components (the opposite of os.path.join) in a\n platform-neutral way.\n \"\"\"\n if result is None:\n result = []\n head, tail = os.path.split(path)",
... |
'''
Module which prompts the user for translations and saves them.
TODO: implement
@author: Rodrigo Damazio
'''
class Translator(object):
'''
classdocs
'''
def __init__(self, language):
'''
Constructor
'''
self._language = language
def Translate(self, string_names):
print string_names | [
[
8,
0,
0.1905,
0.3333,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
3,
0,
0.7143,
0.619,
0,
0.66,
1,
229,
0,
2,
0,
0,
186,
0,
1
],
[
8,
1,
0.5238,
0.1429,
1,
0.2,
0... | [
"'''\nModule which prompts the user for translations and saves them.\n\nTODO: implement\n\n@author: Rodrigo Damazio\n'''",
"class Translator(object):\n '''\n classdocs\n '''\n\n def __init__(self, language):\n '''\n Constructor",
" '''\n classdocs\n '''",
" def __init__(self, language):\n '''... |
'''
Module which brings history information about files from Mercurial.
@author: Rodrigo Damazio
'''
import re
import subprocess
REVISION_REGEX = re.compile(r'(?P<hash>[0-9a-f]{12}):.*')
def _GetOutputLines(args):
'''
Runs an external process and returns its output as a list of lines.
@param args: the arguments to run
'''
process = subprocess.Popen(args,
stdout=subprocess.PIPE,
universal_newlines = True,
shell = False)
output = process.communicate()[0]
return output.splitlines()
def FillMercurialRevisions(filename, parsed_file):
'''
Fills the revs attribute of all strings in the given parsed file with
a list of revisions that touched the lines corresponding to that string.
@param filename: the name of the file to get history for
@param parsed_file: the parsed file to modify
'''
# Take output of hg annotate to get revision of each line
output_lines = _GetOutputLines(['hg', 'annotate', '-c', filename])
# Create a map of line -> revision (key is list index, line 0 doesn't exist)
line_revs = ['dummy']
for line in output_lines:
rev_match = REVISION_REGEX.match(line)
if not rev_match:
raise 'Unexpected line of output from hg: %s' % line
rev_hash = rev_match.group('hash')
line_revs.append(rev_hash)
for str in parsed_file.itervalues():
# Get the lines that correspond to each string
start_line = str['startLine']
end_line = str['endLine']
# Get the revisions that touched those lines
revs = []
for line_number in range(start_line, end_line + 1):
revs.append(line_revs[line_number])
# Merge with any revisions that were already there
# (for explict revision specification)
if 'revs' in str:
revs += str['revs']
# Assign the revisions to the string
str['revs'] = frozenset(revs)
def DoesRevisionSuperceed(filename, rev1, rev2):
'''
Tells whether a revision superceeds another.
This essentially means that the older revision is an ancestor of the newer
one.
This also returns True if the two revisions are the same.
@param rev1: the revision that may be superceeding the other
@param rev2: the revision that may be superceeded
@return: True if rev1 superceeds rev2 or they're the same
'''
if rev1 == rev2:
return True
# TODO: Add filename
args = ['hg', 'log', '-r', 'ancestors(%s)' % rev1, '--template', '{node|short}\n', filename]
output_lines = _GetOutputLines(args)
return rev2 in output_lines
def NewestRevision(filename, rev1, rev2):
'''
Returns which of two revisions is closest to the head of the repository.
If none of them is the ancestor of the other, then we return either one.
@param rev1: the first revision
@param rev2: the second revision
'''
if DoesRevisionSuperceed(filename, rev1, rev2):
return rev1
return rev2 | [
[
8,
0,
0.0319,
0.0532,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.0745,
0.0106,
0,
0.66,
0.1429,
540,
0,
1,
0,
0,
540,
0,
0
],
[
1,
0,
0.0851,
0.0106,
0,
0.66... | [
"'''\nModule which brings history information about files from Mercurial.\n\n@author: Rodrigo Damazio\n'''",
"import re",
"import subprocess",
"REVISION_REGEX = re.compile(r'(?P<hash>[0-9a-f]{12}):.*')",
"def _GetOutputLines(args):\n '''\n Runs an external process and returns its output as a list of lines... |
'''
Module which parses a string XML file.
@author: Rodrigo Damazio
'''
from xml.parsers.expat import ParserCreate
import re
#import xml.etree.ElementTree as ET
class StringsParser(object):
'''
Parser for string XML files.
This object is not thread-safe and should be used for parsing a single file at
a time, only.
'''
def Parse(self, file):
'''
Parses the given file and returns a dictionary mapping keys to an object
with attributes for that key, such as the value, start/end line and explicit
revisions.
In addition to the standard XML format of the strings file, this parser
supports an annotation inside comments, in one of these formats:
<!-- KEEP_PARENT name="bla" -->
<!-- KEEP_PARENT name="bla" rev="123456789012" -->
Such an annotation indicates that we're explicitly inheriting form the
master file (and the optional revision says that this decision is compatible
with the master file up to that revision).
@param file: the name of the file to parse
'''
self._Reset()
# Unfortunately expat is the only parser that will give us line numbers
self._xml_parser = ParserCreate()
self._xml_parser.StartElementHandler = self._StartElementHandler
self._xml_parser.EndElementHandler = self._EndElementHandler
self._xml_parser.CharacterDataHandler = self._CharacterDataHandler
self._xml_parser.CommentHandler = self._CommentHandler
file_obj = open(file)
self._xml_parser.ParseFile(file_obj)
file_obj.close()
return self._all_strings
def _Reset(self):
self._currentString = None
self._currentStringName = None
self._currentStringValue = None
self._all_strings = {}
def _StartElementHandler(self, name, attrs):
if name != 'string':
return
if 'name' not in attrs:
return
assert not self._currentString
assert not self._currentStringName
self._currentString = {
'startLine' : self._xml_parser.CurrentLineNumber,
}
if 'rev' in attrs:
self._currentString['revs'] = [attrs['rev']]
self._currentStringName = attrs['name']
self._currentStringValue = ''
def _EndElementHandler(self, name):
if name != 'string':
return
assert self._currentString
assert self._currentStringName
self._currentString['value'] = self._currentStringValue
self._currentString['endLine'] = self._xml_parser.CurrentLineNumber
self._all_strings[self._currentStringName] = self._currentString
self._currentString = None
self._currentStringName = None
self._currentStringValue = None
def _CharacterDataHandler(self, data):
if not self._currentString:
return
self._currentStringValue += data
_KEEP_PARENT_REGEX = re.compile(r'\s*KEEP_PARENT\s+'
r'name\s*=\s*[\'"]?(?P<name>[a-z0-9_]+)[\'"]?'
r'(?:\s+rev=[\'"]?(?P<rev>[0-9a-f]{12})[\'"]?)?\s*',
re.MULTILINE | re.DOTALL)
def _CommentHandler(self, data):
keep_parent_match = self._KEEP_PARENT_REGEX.match(data)
if not keep_parent_match:
return
name = keep_parent_match.group('name')
self._all_strings[name] = {
'keepParent' : True,
'startLine' : self._xml_parser.CurrentLineNumber,
'endLine' : self._xml_parser.CurrentLineNumber
}
rev = keep_parent_match.group('rev')
if rev:
self._all_strings[name]['revs'] = [rev] | [
[
8,
0,
0.0261,
0.0435,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.0609,
0.0087,
0,
0.66,
0.3333,
573,
0,
1,
0,
0,
573,
0,
0
],
[
1,
0,
0.0696,
0.0087,
0,
0.66... | [
"'''\nModule which parses a string XML file.\n\n@author: Rodrigo Damazio\n'''",
"from xml.parsers.expat import ParserCreate",
"import re",
"class StringsParser(object):\n '''\n Parser for string XML files.\n\n This object is not thread-safe and should be used for parsing a single file at\n a time, only.\n... |
#!/usr/bin/python
'''
Entry point for My Tracks i18n tool.
@author: Rodrigo Damazio
'''
import mytracks.files
import mytracks.translate
import mytracks.validate
import sys
def Usage():
print 'Usage: %s <command> [<language> ...]\n' % sys.argv[0]
print 'Commands are:'
print ' cleanup'
print ' translate'
print ' validate'
sys.exit(1)
def Translate(languages):
'''
Asks the user to interactively translate any missing or oudated strings from
the files for the given languages.
@param languages: the languages to translate
'''
validator = mytracks.validate.Validator(languages)
validator.Validate()
missing = validator.missing_in_lang()
outdated = validator.outdated_in_lang()
for lang in languages:
untranslated = missing[lang] + outdated[lang]
if len(untranslated) == 0:
continue
translator = mytracks.translate.Translator(lang)
translator.Translate(untranslated)
def Validate(languages):
'''
Computes and displays errors in the string files for the given languages.
@param languages: the languages to compute for
'''
validator = mytracks.validate.Validator(languages)
validator.Validate()
error_count = 0
if (validator.valid()):
print 'All files OK'
else:
for lang, missing in validator.missing_in_master().iteritems():
print 'Missing in master, present in %s: %s:' % (lang, str(missing))
error_count = error_count + len(missing)
for lang, missing in validator.missing_in_lang().iteritems():
print 'Missing in %s, present in master: %s:' % (lang, str(missing))
error_count = error_count + len(missing)
for lang, outdated in validator.outdated_in_lang().iteritems():
print 'Outdated in %s: %s:' % (lang, str(outdated))
error_count = error_count + len(outdated)
return error_count
if __name__ == '__main__':
argv = sys.argv
argc = len(argv)
if argc < 2:
Usage()
languages = mytracks.files.GetAllLanguageFiles()
if argc == 3:
langs = set(argv[2:])
if not langs.issubset(languages):
raise 'Language(s) not found'
# Filter just to the languages specified
languages = dict((lang, lang_file)
for lang, lang_file in languages.iteritems()
if lang in langs or lang == 'en' )
cmd = argv[1]
if cmd == 'translate':
Translate(languages)
elif cmd == 'validate':
error_count = Validate(languages)
else:
Usage()
error_count = 0
print '%d errors found.' % error_count
| [
[
8,
0,
0.0417,
0.0521,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.0833,
0.0104,
0,
0.66,
0.125,
640,
0,
1,
0,
0,
640,
0,
0
],
[
1,
0,
0.0938,
0.0104,
0,
0.66,... | [
"'''\nEntry point for My Tracks i18n tool.\n\n@author: Rodrigo Damazio\n'''",
"import mytracks.files",
"import mytracks.translate",
"import mytracks.validate",
"import sys",
"def Usage():\n print('Usage: %s <command> [<language> ...]\\n' % sys.argv[0])\n print('Commands are:')\n print(' cleanup')\n p... |
'''
Module which compares languague files to the master file and detects
issues.
@author: Rodrigo Damazio
'''
import os
from mytracks.parser import StringsParser
import mytracks.history
class Validator(object):
def __init__(self, languages):
'''
Builds a strings file validator.
Params:
@param languages: a dictionary mapping each language to its corresponding directory
'''
self._langs = {}
self._master = None
self._language_paths = languages
parser = StringsParser()
for lang, lang_dir in languages.iteritems():
filename = os.path.join(lang_dir, 'strings.xml')
parsed_file = parser.Parse(filename)
mytracks.history.FillMercurialRevisions(filename, parsed_file)
if lang == 'en':
self._master = parsed_file
else:
self._langs[lang] = parsed_file
self._Reset()
def Validate(self):
'''
Computes whether all the data in the files for the given languages is valid.
'''
self._Reset()
self._ValidateMissingKeys()
self._ValidateOutdatedKeys()
def valid(self):
return (len(self._missing_in_master) == 0 and
len(self._missing_in_lang) == 0 and
len(self._outdated_in_lang) == 0)
def missing_in_master(self):
return self._missing_in_master
def missing_in_lang(self):
return self._missing_in_lang
def outdated_in_lang(self):
return self._outdated_in_lang
def _Reset(self):
# These are maps from language to string name list
self._missing_in_master = {}
self._missing_in_lang = {}
self._outdated_in_lang = {}
def _ValidateMissingKeys(self):
'''
Computes whether there are missing keys on either side.
'''
master_keys = frozenset(self._master.iterkeys())
for lang, file in self._langs.iteritems():
keys = frozenset(file.iterkeys())
missing_in_master = keys - master_keys
missing_in_lang = master_keys - keys
if len(missing_in_master) > 0:
self._missing_in_master[lang] = missing_in_master
if len(missing_in_lang) > 0:
self._missing_in_lang[lang] = missing_in_lang
def _ValidateOutdatedKeys(self):
'''
Computers whether any of the language keys are outdated with relation to the
master keys.
'''
for lang, file in self._langs.iteritems():
outdated = []
for key, str in file.iteritems():
# Get all revisions that touched master and language files for this
# string.
master_str = self._master[key]
master_revs = master_str['revs']
lang_revs = str['revs']
if not master_revs or not lang_revs:
print 'WARNING: No revision for %s in %s' % (key, lang)
continue
master_file = os.path.join(self._language_paths['en'], 'strings.xml')
lang_file = os.path.join(self._language_paths[lang], 'strings.xml')
# Assume that the repository has a single head (TODO: check that),
# and as such there is always one revision which superceeds all others.
master_rev = reduce(
lambda r1, r2: mytracks.history.NewestRevision(master_file, r1, r2),
master_revs)
lang_rev = reduce(
lambda r1, r2: mytracks.history.NewestRevision(lang_file, r1, r2),
lang_revs)
# If the master version is newer than the lang version
if mytracks.history.DoesRevisionSuperceed(lang_file, master_rev, lang_rev):
outdated.append(key)
if len(outdated) > 0:
self._outdated_in_lang[lang] = outdated
| [
[
8,
0,
0.0304,
0.0522,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.0696,
0.0087,
0,
0.66,
0.25,
688,
0,
1,
0,
0,
688,
0,
0
],
[
1,
0,
0.0783,
0.0087,
0,
0.66,
... | [
"'''\nModule which compares languague files to the master file and detects\nissues.\n\n@author: Rodrigo Damazio\n'''",
"import os",
"from mytracks.parser import StringsParser",
"import mytracks.history",
"class Validator(object):\n\n def __init__(self, languages):\n '''\n Builds a strings file valida... |
'''
Module for dealing with resource files (but not their contents).
@author: Rodrigo Damazio
'''
import os.path
from glob import glob
import re
MYTRACKS_RES_DIR = 'MyTracks/res'
ANDROID_MASTER_VALUES = 'values'
ANDROID_VALUES_MASK = 'values-*'
def GetMyTracksDir():
'''
Returns the directory in which the MyTracks directory is located.
'''
path = os.getcwd()
while not os.path.isdir(os.path.join(path, MYTRACKS_RES_DIR)):
if path == '/':
raise 'Not in My Tracks project'
# Go up one level
path = os.path.split(path)[0]
return path
def GetAllLanguageFiles():
'''
Returns a mapping from all found languages to their respective directories.
'''
mytracks_path = GetMyTracksDir()
res_dir = os.path.join(mytracks_path, MYTRACKS_RES_DIR, ANDROID_VALUES_MASK)
language_dirs = glob(res_dir)
master_dir = os.path.join(mytracks_path, MYTRACKS_RES_DIR, ANDROID_MASTER_VALUES)
if len(language_dirs) == 0:
raise 'No languages found!'
if not os.path.isdir(master_dir):
raise 'Couldn\'t find master file'
language_tuples = [(re.findall(r'.*values-([A-Za-z-]+)', dir)[0],dir) for dir in language_dirs]
language_tuples.append(('en', master_dir))
return dict(language_tuples)
| [
[
8,
0,
0.0667,
0.1111,
0,
0.66,
0,
0,
1,
0,
0,
0,
0,
0,
0
],
[
1,
0,
0.1333,
0.0222,
0,
0.66,
0.125,
79,
0,
1,
0,
0,
79,
0,
0
],
[
1,
0,
0.1556,
0.0222,
0,
0.66,
... | [
"'''\nModule for dealing with resource files (but not their contents).\n\n@author: Rodrigo Damazio\n'''",
"import os.path",
"from glob import glob",
"import re",
"MYTRACKS_RES_DIR = 'MyTracks/res'",
"ANDROID_MASTER_VALUES = 'values'",
"ANDROID_VALUES_MASK = 'values-*'",
"def GetMyTracksDir():\n '''\n... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.