repo_name
stringlengths
6
100
path
stringlengths
4
294
copies
stringlengths
1
5
size
stringlengths
4
6
content
stringlengths
606
896k
license
stringclasses
15 values
huguesv/PTVS
Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/win32com/client/build.py
7
23485
"""Contains knowledge to build a COM object definition. This module is used by both the @dynamic@ and @makepy@ modules to build all knowledge of a COM object. This module contains classes which contain the actual knowledge of the object. This include parameter and return type information, the COM dispid and CLSID, etc. Other modules may use this information to generate .py files, use the information dynamically, or possibly even generate .html documentation for objects. """ # # NOTES: DispatchItem and MapEntry used by dynamic.py. # the rest is used by makepy.py # # OleItem, DispatchItem, MapEntry, BuildCallList() is used by makepy import sys import string from keyword import iskeyword import pythoncom from pywintypes import TimeType import winerror import datetime # It isn't really clear what the quoting rules are in a C/IDL string and # literals like a quote char and backslashes makes life a little painful to # always render the string perfectly - so just punt and fall-back to a repr() def _makeDocString(s): if sys.version_info < (3,): s = s.encode("mbcs") return repr(s) error = "PythonCOM.Client.Build error" class NotSupportedException(Exception): pass # Raised when we cant support a param type. DropIndirection="DropIndirection" NoTranslateTypes = [ pythoncom.VT_BOOL, pythoncom.VT_CLSID, pythoncom.VT_CY, pythoncom.VT_DATE, pythoncom.VT_DECIMAL, pythoncom.VT_EMPTY, pythoncom.VT_ERROR, pythoncom.VT_FILETIME, pythoncom.VT_HRESULT, pythoncom.VT_I1, pythoncom.VT_I2, pythoncom.VT_I4, pythoncom.VT_I8, pythoncom.VT_INT, pythoncom.VT_NULL, pythoncom.VT_R4, pythoncom.VT_R8, pythoncom.VT_NULL, pythoncom.VT_STREAM, pythoncom.VT_UI1, pythoncom.VT_UI2, pythoncom.VT_UI4, pythoncom.VT_UI8, pythoncom.VT_UINT, pythoncom.VT_VOID, ] NoTranslateMap = {} for v in NoTranslateTypes: NoTranslateMap[v] = None class MapEntry: "Simple holder for named attibutes - items in a map." def __init__(self, desc_or_id, names=None, doc=None, resultCLSID=pythoncom.IID_NULL, resultDoc = None, hidden=0): if type(desc_or_id)==type(0): self.dispid = desc_or_id self.desc = None else: self.dispid = desc_or_id[0] self.desc = desc_or_id self.names = names self.doc = doc self.resultCLSID = resultCLSID self.resultDocumentation = resultDoc self.wasProperty = 0 # Have I been transformed into a function so I can pass args? self.hidden = hidden def GetResultCLSID(self): rc = self.resultCLSID if rc == pythoncom.IID_NULL: return None return rc # Return a string, suitable for output - either "'{...}'" or "None" def GetResultCLSIDStr(self): rc = self.GetResultCLSID() if rc is None: return "None" return repr(str(rc)) # Convert the IID object to a string, then to a string in a string. def GetResultName(self): if self.resultDocumentation is None: return None return self.resultDocumentation[0] class OleItem: typename = "OleItem" def __init__(self, doc=None): self.doc = doc if self.doc: self.python_name = MakePublicAttributeName(self.doc[0]) else: self.python_name = None self.bWritten = 0 self.bIsDispatch = 0 self.bIsSink = 0 self.clsid = None self.co_class = None class DispatchItem(OleItem): typename = "DispatchItem" def __init__(self, typeinfo=None, attr=None, doc=None, bForUser=1): OleItem.__init__(self,doc) self.propMap = {} self.propMapGet = {} self.propMapPut = {} self.mapFuncs = {} self.defaultDispatchName = None self.hidden = 0 if typeinfo: self.Build(typeinfo, attr, bForUser) def _propMapPutCheck_(self,key,item): ins, outs, opts = self.CountInOutOptArgs(item.desc[2]) if ins>1: # if a Put property takes more than 1 arg: if opts+1==ins or ins==item.desc[6]+1: newKey = "Set" + key deleteExisting = 0 # This one is still OK else: deleteExisting = 1 # No good to us if key in self.mapFuncs or key in self.propMapGet: newKey = "Set" + key else: newKey = key item.wasProperty = 1 self.mapFuncs[newKey] = item if deleteExisting: del self.propMapPut[key] def _propMapGetCheck_(self,key,item): ins, outs, opts = self.CountInOutOptArgs(item.desc[2]) if ins > 0: # if a Get property takes _any_ in args: if item.desc[6]==ins or ins==opts: newKey = "Get" + key deleteExisting = 0 # This one is still OK else: deleteExisting = 1 # No good to us if key in self.mapFuncs: newKey = "Get" + key else: newKey = key item.wasProperty = 1 self.mapFuncs[newKey] = item if deleteExisting: del self.propMapGet[key] def _AddFunc_(self,typeinfo,fdesc,bForUser): id = fdesc.memid funcflags = fdesc.wFuncFlags try: names = typeinfo.GetNames(id) name=names[0] except pythoncom.ole_error: name = "" names = None doc = None try: if bForUser: doc = typeinfo.GetDocumentation(id) except pythoncom.ole_error: pass if id==0 and name: self.defaultDispatchName = name invkind = fdesc.invkind # We need to translate any Alias', Enums, structs etc in result and args typerepr, flag, defval = fdesc.rettype # sys.stderr.write("%s result - %s -> " % (name, typerepr)) typerepr, resultCLSID, resultDoc = _ResolveType(typerepr, typeinfo) # sys.stderr.write("%s\n" % (typerepr,)) fdesc.rettype = typerepr, flag, defval, resultCLSID # Translate any Alias or Enums in argument list. argList = [] for argDesc in fdesc.args: typerepr, flag, defval = argDesc # sys.stderr.write("%s arg - %s -> " % (name, typerepr)) arg_type, arg_clsid, arg_doc = _ResolveType(typerepr, typeinfo) argDesc = arg_type, flag, defval, arg_clsid # sys.stderr.write("%s\n" % (argDesc[0],)) argList.append(argDesc) fdesc.args = tuple(argList) hidden = (funcflags & pythoncom.FUNCFLAG_FHIDDEN) != 0 if invkind == pythoncom.INVOKE_PROPERTYGET: map = self.propMapGet # This is not the best solution, but I dont think there is # one without specific "set" syntax. # If there is a single PUT or PUTREF, it will function as a property. # If there are both, then the PUT remains a property, and the PUTREF # gets transformed into a function. # (in vb, PUT=="obj=other_obj", PUTREF="set obj=other_obj elif invkind in (pythoncom.INVOKE_PROPERTYPUT, pythoncom.INVOKE_PROPERTYPUTREF): # Special case existing = self.propMapPut.get(name, None) if existing is not None: if existing.desc[4]==pythoncom.INVOKE_PROPERTYPUT: # Keep this one map = self.mapFuncs name = "Set"+name else: # Existing becomes a func. existing.wasProperty = 1 self.mapFuncs["Set"+name]=existing map = self.propMapPut # existing gets overwritten below. else: map = self.propMapPut # first time weve seen it. elif invkind == pythoncom.INVOKE_FUNC: map = self.mapFuncs else: map = None if not map is None: # if map.has_key(name): # sys.stderr.write("Warning - overwriting existing method/attribute %s\n" % name) map[name] = MapEntry(tuple(fdesc), names, doc, resultCLSID, resultDoc, hidden) # any methods that can't be reached via DISPATCH we return None # for, so dynamic dispatch doesnt see it. if fdesc.funckind != pythoncom.FUNC_DISPATCH: return None return (name,map) return None def _AddVar_(self,typeinfo,fdesc,bForUser): ### need pythoncom.VARFLAG_FRESTRICTED ... ### then check it if fdesc.varkind == pythoncom.VAR_DISPATCH: id = fdesc.memid names = typeinfo.GetNames(id) # Translate any Alias or Enums in result. typerepr, flags, defval = fdesc.elemdescVar typerepr, resultCLSID, resultDoc = _ResolveType(typerepr, typeinfo) fdesc.elemdescVar = typerepr, flags, defval doc = None try: if bForUser: doc = typeinfo.GetDocumentation(id) except pythoncom.ole_error: pass # handle the enumerator specially map = self.propMap # Check if the element is hidden. hidden = 0 if hasattr(fdesc,"wVarFlags"): hidden = (fdesc.wVarFlags & 0x40) != 0 # VARFLAG_FHIDDEN map[names[0]] = MapEntry(tuple(fdesc), names, doc, resultCLSID, resultDoc, hidden) return (names[0],map) else: return None def Build(self, typeinfo, attr, bForUser = 1): self.clsid = attr[0] self.bIsDispatch = (attr.wTypeFlags & pythoncom.TYPEFLAG_FDISPATCHABLE) != 0 if typeinfo is None: return # Loop over all methods for j in range(attr[6]): fdesc = typeinfo.GetFuncDesc(j) self._AddFunc_(typeinfo,fdesc,bForUser) # Loop over all variables (ie, properties) for j in range(attr[7]): fdesc = typeinfo.GetVarDesc(j) self._AddVar_(typeinfo,fdesc,bForUser) # Now post-process the maps. For any "Get" or "Set" properties # that have arguments, we must turn them into methods. If a method # of the same name already exists, change the name. for key, item in list(self.propMapGet.items()): self._propMapGetCheck_(key,item) for key, item in list(self.propMapPut.items()): self._propMapPutCheck_(key,item) def CountInOutOptArgs(self, argTuple): "Return tuple counting in/outs/OPTS. Sum of result may not be len(argTuple), as some args may be in/out." ins = out = opts = 0 for argCheck in argTuple: inOut = argCheck[1] if inOut==0: ins = ins + 1 out = out + 1 else: if inOut & pythoncom.PARAMFLAG_FIN: ins = ins + 1 if inOut & pythoncom.PARAMFLAG_FOPT: opts = opts + 1 if inOut & pythoncom.PARAMFLAG_FOUT: out = out + 1 return ins, out, opts def MakeFuncMethod(self, entry, name, bMakeClass = 1): # If we have a type description, and not varargs... if entry.desc is not None and (len(entry.desc) < 6 or entry.desc[6]!=-1): return self.MakeDispatchFuncMethod(entry, name, bMakeClass) else: return self.MakeVarArgsFuncMethod(entry, name, bMakeClass) def MakeDispatchFuncMethod(self, entry, name, bMakeClass = 1): fdesc = entry.desc doc = entry.doc names = entry.names ret = [] if bMakeClass: linePrefix = "\t" defNamedOptArg = "defaultNamedOptArg" defNamedNotOptArg = "defaultNamedNotOptArg" defUnnamedArg = "defaultUnnamedArg" else: linePrefix = "" defNamedOptArg = "pythoncom.Missing" defNamedNotOptArg = "pythoncom.Missing" defUnnamedArg = "pythoncom.Missing" defOutArg = "pythoncom.Missing" id = fdesc[0] s = linePrefix + 'def ' + name + '(self' + BuildCallList(fdesc, names, defNamedOptArg, defNamedNotOptArg, defUnnamedArg, defOutArg) + '):' ret.append(s) if doc and doc[1]: ret.append(linePrefix + '\t' + _makeDocString(doc[1])) # print "fdesc is ", fdesc resclsid = entry.GetResultCLSID() if resclsid: resclsid = "'%s'" % resclsid else: resclsid = 'None' # Strip the default values from the arg desc retDesc = fdesc[8][:2] argsDesc = tuple([what[:2] for what in fdesc[2]]) # The runtime translation of the return types is expensive, so when we know the # return type of the function, there is no need to check the type at runtime. # To qualify, this function must return a "simple" type, and have no byref args. # Check if we have byrefs or anything in the args which mean we still need a translate. param_flags = [what[1] for what in fdesc[2]] bad_params = [flag for flag in param_flags if flag & (pythoncom.PARAMFLAG_FOUT | pythoncom.PARAMFLAG_FRETVAL)!=0] s = None if len(bad_params)==0 and len(retDesc)==2 and retDesc[1]==0: rd = retDesc[0] if rd in NoTranslateMap: s = '%s\treturn self._oleobj_.InvokeTypes(%d, LCID, %s, %s, %s%s)' % (linePrefix, id, fdesc[4], retDesc, argsDesc, _BuildArgList(fdesc, names)) elif rd in [pythoncom.VT_DISPATCH, pythoncom.VT_UNKNOWN]: s = '%s\tret = self._oleobj_.InvokeTypes(%d, LCID, %s, %s, %s%s)\n' % (linePrefix, id, fdesc[4], retDesc, repr(argsDesc), _BuildArgList(fdesc, names)) s = s + '%s\tif ret is not None:\n' % (linePrefix,) if rd == pythoncom.VT_UNKNOWN: s = s + "%s\t\t# See if this IUnknown is really an IDispatch\n" % (linePrefix,) s = s + "%s\t\ttry:\n" % (linePrefix,) s = s + "%s\t\t\tret = ret.QueryInterface(pythoncom.IID_IDispatch)\n" % (linePrefix,) s = s + "%s\t\texcept pythoncom.error:\n" % (linePrefix,) s = s + "%s\t\t\treturn ret\n" % (linePrefix,) s = s + '%s\t\tret = Dispatch(ret, %s, %s)\n' % (linePrefix,repr(name), resclsid) s = s + '%s\treturn ret' % (linePrefix) elif rd == pythoncom.VT_BSTR: s = "%s\t# Result is a Unicode object\n" % (linePrefix,) s = s + '%s\treturn self._oleobj_.InvokeTypes(%d, LCID, %s, %s, %s%s)' % (linePrefix, id, fdesc[4], retDesc, repr(argsDesc), _BuildArgList(fdesc, names)) # else s remains None if s is None: s = '%s\treturn self._ApplyTypes_(%d, %s, %s, %s, %s, %s%s)' % (linePrefix, id, fdesc[4], retDesc, argsDesc, repr(name), resclsid, _BuildArgList(fdesc, names)) ret.append(s) ret.append("") return ret def MakeVarArgsFuncMethod(self, entry, name, bMakeClass = 1): fdesc = entry.desc names = entry.names doc = entry.doc ret = [] argPrefix = "self" if bMakeClass: linePrefix = "\t" else: linePrefix = "" ret.append(linePrefix + 'def ' + name + '(' + argPrefix + ', *args):') if doc and doc[1]: ret.append(linePrefix + '\t' + _makeDocString(doc[1])) if fdesc: invoketype = fdesc[4] else: invoketype = pythoncom.DISPATCH_METHOD s = linePrefix + '\treturn self._get_good_object_(self._oleobj_.Invoke(*((' ret.append(s + str(entry.dispid) + ",0,%d,1)+args)),'%s')" % (invoketype, names[0])) ret.append("") return ret # Note - "DispatchItem" poorly named - need a new intermediate class. class VTableItem(DispatchItem): def Build(self, typeinfo, attr, bForUser = 1): DispatchItem.Build(self, typeinfo, attr, bForUser) assert typeinfo is not None, "Cant build vtables without type info!" meth_list = list(self.mapFuncs.values()) + list(self.propMapGet.values()) + list(self.propMapPut.values()) meth_list.sort(key=lambda m: m.desc[7]) # Now turn this list into the run-time representation # (ready for immediate use or writing to gencache) self.vtableFuncs = [] for entry in meth_list: self.vtableFuncs.append( (entry.names, entry.dispid, entry.desc) ) # A Lazy dispatch item - builds an item on request using info from # an ITypeComp. The dynamic module makes the called to build each item, # and also holds the references to the typeinfo and typecomp. class LazyDispatchItem(DispatchItem): typename = "LazyDispatchItem" def __init__(self, attr, doc): self.clsid = attr[0] DispatchItem.__init__(self, None, attr, doc, 0) typeSubstMap = { pythoncom.VT_INT: pythoncom.VT_I4, pythoncom.VT_UINT: pythoncom.VT_UI4, pythoncom.VT_HRESULT: pythoncom.VT_I4, } def _ResolveType(typerepr, itypeinfo): # Resolve VT_USERDEFINED (often aliases or typed IDispatches) if type(typerepr)==tuple: indir_vt, subrepr = typerepr if indir_vt == pythoncom.VT_PTR: # If it is a VT_PTR to a VT_USERDEFINED that is an IDispatch/IUnknown, # then it resolves to simply the object. # Otherwise, it becomes a ByRef of the resolved type # We need to drop an indirection level on pointer to user defined interfaces. # eg, (VT_PTR, (VT_USERDEFINED, somehandle)) needs to become VT_DISPATCH # only when "somehandle" is an object. # but (VT_PTR, (VT_USERDEFINED, otherhandle)) doesnt get the indirection dropped. was_user = type(subrepr)==tuple and subrepr[0]==pythoncom.VT_USERDEFINED subrepr, sub_clsid, sub_doc = _ResolveType(subrepr, itypeinfo) if was_user and subrepr in [pythoncom.VT_DISPATCH, pythoncom.VT_UNKNOWN, pythoncom.VT_RECORD]: # Drop the VT_PTR indirection return subrepr, sub_clsid, sub_doc # Change PTR indirection to byref return subrepr | pythoncom.VT_BYREF, sub_clsid, sub_doc if indir_vt == pythoncom.VT_SAFEARRAY: # resolve the array element, and convert to VT_ARRAY subrepr, sub_clsid, sub_doc = _ResolveType(subrepr, itypeinfo) return pythoncom.VT_ARRAY | subrepr, sub_clsid, sub_doc if indir_vt == pythoncom.VT_CARRAY: # runtime has no support for this yet. # resolve the array element, and convert to VT_CARRAY # sheesh - return _something_ return pythoncom.VT_CARRAY, None, None if indir_vt == pythoncom.VT_USERDEFINED: try: resultTypeInfo = itypeinfo.GetRefTypeInfo(subrepr) except pythoncom.com_error as details: if details.hresult in [winerror.TYPE_E_CANTLOADLIBRARY, winerror.TYPE_E_LIBNOTREGISTERED]: # an unregistered interface return pythoncom.VT_UNKNOWN, None, None raise resultAttr = resultTypeInfo.GetTypeAttr() typeKind = resultAttr.typekind if typeKind == pythoncom.TKIND_ALIAS: tdesc = resultAttr.tdescAlias return _ResolveType(tdesc, resultTypeInfo) elif typeKind in [pythoncom.TKIND_ENUM, pythoncom.TKIND_MODULE]: # For now, assume Long return pythoncom.VT_I4, None, None elif typeKind == pythoncom.TKIND_DISPATCH: clsid = resultTypeInfo.GetTypeAttr()[0] retdoc = resultTypeInfo.GetDocumentation(-1) return pythoncom.VT_DISPATCH, clsid, retdoc elif typeKind in [pythoncom.TKIND_INTERFACE, pythoncom.TKIND_COCLASS]: # XXX - should probably get default interface for CO_CLASS??? clsid = resultTypeInfo.GetTypeAttr()[0] retdoc = resultTypeInfo.GetDocumentation(-1) return pythoncom.VT_UNKNOWN, clsid, retdoc elif typeKind == pythoncom.TKIND_RECORD: return pythoncom.VT_RECORD, None, None raise NotSupportedException("Can not resolve alias or user-defined type") return typeSubstMap.get(typerepr,typerepr), None, None def _BuildArgList(fdesc, names): "Builds list of args to the underlying Invoke method." # Word has TypeInfo for Insert() method, but says "no args" numArgs = max(fdesc[6], len(fdesc[2])) names = list(names) while None in names: i = names.index(None) names[i] = "arg%d" % (i,) # We've seen 'source safe' libraries offer the name of 'ret' params in # 'names' - although we can't reproduce this, it would be insane to offer # more args than we have arg infos for - hence the upper limit on names... names = list(map(MakePublicAttributeName, names[1:(numArgs + 1)])) name_num = 0 while len(names) < numArgs: names.append("arg%d" % (len(names),)) # As per BuildCallList(), avoid huge lines. # Hack a "\n" at the end of every 5th name - "strides" would be handy # here but don't exist in 2.2 for i in range(0, len(names), 5): names[i] = names[i] + "\n\t\t\t" return "," + ", ".join(names) valid_identifier_chars = string.ascii_letters + string.digits + "_" def demunge_leading_underscores(className): i = 0 while className[i] == "_": i += 1 assert i >= 2, "Should only be here with names starting with '__'" return className[i-1:] + className[:i-1] # Given a "public name" (eg, the name of a class, function, etc) # make sure it is a legal (and reasonable!) Python name. def MakePublicAttributeName(className, is_global = False): # Given a class attribute that needs to be public, convert it to a # reasonable name. # Also need to be careful that the munging doesnt # create duplicates - eg, just removing a leading "_" is likely to cause # a clash. # if is_global is True, then the name is a global variable that may # overwrite a builtin - eg, "None" if className[:2]=='__': return demunge_leading_underscores(className) elif className == 'None': # assign to None is evil (and SyntaxError in 2.4, even though # iskeyword says False there) - note that if it was a global # it would get picked up below className = 'NONE' elif iskeyword(className): # most keywords are lower case (except True, False etc in py3k) ret = className.capitalize() # but those which aren't get forced upper. if ret == className: ret = ret.upper() return ret elif is_global and hasattr(__builtins__, className): # builtins may be mixed case. If capitalizing it doesn't change it, # force to all uppercase (eg, "None", "True" become "NONE", "TRUE" ret = className.capitalize() if ret==className: # didn't change - force all uppercase. ret = ret.upper() return ret # Strip non printable chars return ''.join([char for char in className if char in valid_identifier_chars]) # Given a default value passed by a type library, return a string with # an appropriate repr() for the type. # Takes a raw ELEMDESC and returns a repr string, or None # (NOTE: The string itself may be '"None"', which is valid, and different to None. # XXX - To do: Dates are probably screwed, but can they come in? def MakeDefaultArgRepr(defArgVal): try: inOut = defArgVal[1] except IndexError: # something strange - assume is in param. inOut = pythoncom.PARAMFLAG_FIN if inOut & pythoncom.PARAMFLAG_FHASDEFAULT: # times need special handling... val = defArgVal[2] if isinstance(val, datetime.datetime): # VARIANT <-> SYSTEMTIME conversions always lose any sub-second # resolution, so just use a 'timetuple' here. return repr(tuple(val.utctimetuple())) if type(val) is TimeType: # must be the 'old' pywintypes time object... year=val.year; month=val.month; day=val.day; hour=val.hour; minute=val.minute; second=val.second; msec=val.msec return "pywintypes.Time((%(year)d, %(month)d, %(day)d, %(hour)d, %(minute)d, %(second)d,0,0,0,%(msec)d))" % locals() return repr(val) return None def BuildCallList(fdesc, names, defNamedOptArg, defNamedNotOptArg, defUnnamedArg, defOutArg, is_comment = False): "Builds a Python declaration for a method." # Names[0] is the func name - param names are from 1. numArgs = len(fdesc[2]) numOptArgs = fdesc[6] strval = '' if numOptArgs==-1: # Special value that says "var args after here" firstOptArg = numArgs numArgs = numArgs - 1 else: firstOptArg = numArgs - numOptArgs for arg in range(numArgs): try: argName = names[arg+1] namedArg = argName is not None except IndexError: namedArg = 0 if not namedArg: argName = "arg%d" % (arg) thisdesc = fdesc[2][arg] # See if the IDL specified a default value defArgVal = MakeDefaultArgRepr(thisdesc) if defArgVal is None: # Out params always get their special default if thisdesc[1] & (pythoncom.PARAMFLAG_FOUT | pythoncom.PARAMFLAG_FIN) == pythoncom.PARAMFLAG_FOUT: defArgVal = defOutArg else: # Unnamed arg - always allow default values. if namedArg: # Is a named argument if arg >= firstOptArg: defArgVal = defNamedOptArg else: defArgVal = defNamedNotOptArg else: defArgVal = defUnnamedArg argName = MakePublicAttributeName(argName) # insanely long lines with an 'encoding' flag crashes python 2.4.0 # keep 5 args per line # This may still fail if the arg names are insane, but that seems # unlikely. See also _BuildArgList() if (arg+1) % 5 == 0: strval = strval + "\n" if is_comment: strval = strval + "#" strval = strval + "\t\t\t" strval = strval + ", " + argName if defArgVal: strval = strval + "=" + defArgVal if numOptArgs==-1: strval = strval + ", *" + names[-1] return strval if __name__=='__main__': print("Use 'makepy.py' to generate Python code - this module is just a helper")
apache-2.0
johngian/remo
vendor-local/lib/python/tablib/packages/openpyxl/shared/date_time.py
61
5956
# file openpyxl/shared/date_time.py # Copyright (c) 2010 openpyxl # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # # @license: http://www.opensource.org/licenses/mit-license.php # @author: Eric Gazoni """Manage Excel date weirdness.""" # Python stdlib imports from __future__ import division from math import floor import calendar import datetime import time import re # constants W3CDTF_FORMAT = '%Y-%m-%dT%H:%M:%SZ' RE_W3CDTF = '(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})(.(\d{2}))?Z' EPOCH = datetime.datetime.utcfromtimestamp(0) def datetime_to_W3CDTF(dt): """Convert from a datetime to a timestamp string.""" return datetime.datetime.strftime(dt, W3CDTF_FORMAT) def W3CDTF_to_datetime(formatted_string): """Convert from a timestamp string to a datetime object.""" match = re.match(RE_W3CDTF,formatted_string) digits = map(int, match.groups()[:6]) return datetime.datetime(*digits) class SharedDate(object): """Date formatting utilities for Excel with shared state. Excel has a two primary date tracking schemes: Windows - Day 1 == 1900-01-01 Mac - Day 1 == 1904-01-01 SharedDate stores which system we are using and converts dates between Python and Excel accordingly. """ CALENDAR_WINDOWS_1900 = 1900 CALENDAR_MAC_1904 = 1904 datetime_object_type = 'DateTime' def __init__(self): self.excel_base_date = self.CALENDAR_WINDOWS_1900 def datetime_to_julian(self, date): """Convert from python datetime to excel julian date representation.""" if isinstance(date, datetime.datetime): return self.to_julian(date.year, date.month, date.day, \ hours=date.hour, minutes=date.minute, seconds=date.second) elif isinstance(date, datetime.date): return self.to_julian(date.year, date.month, date.day) def to_julian(self, year, month, day, hours=0, minutes=0, seconds=0): """Convert from Python date to Excel JD.""" # explicitly disallow bad years # Excel 2000 treats JD=0 as 1/0/1900 (buggy, disallow) # Excel 2000 treats JD=2958466 as a bad date (Y10K bug!) if year < 1900 or year > 10000: msg = 'Year not supported by Excel: %s' % year raise ValueError(msg) if self.excel_base_date == self.CALENDAR_WINDOWS_1900: # Fudge factor for the erroneous fact that the year 1900 is # treated as a Leap Year in MS Excel. This affects every date # following 28th February 1900 if year == 1900 and month <= 2: excel_1900_leap_year = False else: excel_1900_leap_year = True excel_base_date = 2415020 else: raise NotImplementedError('Mac dates are not yet supported.') #excel_base_date = 2416481 #excel_1900_leap_year = False # Julian base date adjustment if month > 2: month = month - 3 else: month = month + 9 year -= 1 # Calculate the Julian Date, then subtract the Excel base date # JD 2415020 = 31 - Dec - 1899 -> Excel Date of 0 century, decade = int(str(year)[:2]), int(str(year)[2:]) excel_date = floor(146097 * century / 4) + \ floor((1461 * decade) / 4) + floor((153 * month + 2) / 5) + \ day + 1721119 - excel_base_date if excel_1900_leap_year: excel_date += 1 # check to ensure that we exclude 2/29/1900 as a possible value if self.excel_base_date == self.CALENDAR_WINDOWS_1900 \ and excel_date == 60: msg = 'Error: Excel believes 1900 was a leap year' raise ValueError(msg) excel_time = ((hours * 3600) + (minutes * 60) + seconds) / 86400 return excel_date + excel_time def from_julian(self, value=0): """Convert from the Excel JD back to a date""" if self.excel_base_date == self.CALENDAR_WINDOWS_1900: excel_base_date = 25569 if value < 60: excel_base_date -= 1 elif value == 60: msg = 'Error: Excel believes 1900 was a leap year' raise ValueError(msg) else: raise NotImplementedError('Mac dates are not yet supported.') #excel_base_date = 24107 if value >= 1: utc_days = value - excel_base_date return EPOCH + datetime.timedelta(days=utc_days) elif value >= 0: hours = floor(value * 24) mins = floor(value * 24 * 60) - floor(hours * 60) secs = floor(value * 24 * 60 * 60) - floor(hours * 60 * 60) - \ floor(mins * 60) return datetime.time(int(hours), int(mins), int(secs)) else: msg = 'Negative dates (%s) are not supported' % value raise ValueError(msg)
bsd-3-clause
ofanoyi/scrapy
scrapy/contrib/spiders/feed.py
18
5599
""" This module implements the XMLFeedSpider which is the recommended spider to use for scraping from an XML feed. See documentation in docs/topics/spiders.rst """ from scrapy.spider import Spider from scrapy.item import BaseItem from scrapy.http import Request from scrapy.utils.iterators import xmliter, csviter from scrapy.utils.spider import iterate_spider_output from scrapy.selector import Selector from scrapy.exceptions import NotConfigured, NotSupported class XMLFeedSpider(Spider): """ This class intends to be the base class for spiders that scrape from XML feeds. You can choose whether to parse the file using the 'iternodes' iterator, an 'xml' selector, or an 'html' selector. In most cases, it's convenient to use iternodes, since it's a faster and cleaner. """ iterator = 'iternodes' itertag = 'item' namespaces = () def process_results(self, response, results): """This overridable method is called for each result (item or request) returned by the spider, and it's intended to perform any last time processing required before returning the results to the framework core, for example setting the item GUIDs. It receives a list of results and the response which originated that results. It must return a list of results (Items or Requests). """ return results def adapt_response(self, response): """You can override this function in order to make any changes you want to into the feed before parsing it. This function must return a response. """ return response def parse_node(self, response, selector): """This method must be overriden with your custom spider functionality""" if hasattr(self, 'parse_item'): # backward compatibility return self.parse_item(response, selector) raise NotImplementedError def parse_nodes(self, response, nodes): """This method is called for the nodes matching the provided tag name (itertag). Receives the response and an Selector for each node. Overriding this method is mandatory. Otherwise, you spider won't work. This method must return either a BaseItem, a Request, or a list containing any of them. """ for selector in nodes: ret = iterate_spider_output(self.parse_node(response, selector)) for result_item in self.process_results(response, ret): yield result_item def parse(self, response): if not hasattr(self, 'parse_node'): raise NotConfigured('You must define parse_node method in order to scrape this XML feed') response = self.adapt_response(response) if self.iterator == 'iternodes': nodes = self._iternodes(response) elif self.iterator == 'xml': selector = Selector(response, type='xml') self._register_namespaces(selector) nodes = selector.xpath('//%s' % self.itertag) elif self.iterator == 'html': selector = Selector(response, type='html') self._register_namespaces(selector) nodes = selector.xpath('//%s' % self.itertag) else: raise NotSupported('Unsupported node iterator') return self.parse_nodes(response, nodes) def _iternodes(self, response): for node in xmliter(response, self.itertag): self._register_namespaces(node) yield node def _register_namespaces(self, selector): for (prefix, uri) in self.namespaces: selector.register_namespace(prefix, uri) class CSVFeedSpider(Spider): """Spider for parsing CSV feeds. It receives a CSV file in a response; iterates through each of its rows, and calls parse_row with a dict containing each field's data. You can set some options regarding the CSV file, such as the delimiter and the file's headers. """ delimiter = None # When this is None, python's csv module's default delimiter is used headers = None def process_results(self, response, results): """This method has the same purpose as the one in XMLFeedSpider""" return results def adapt_response(self, response): """This method has the same purpose as the one in XMLFeedSpider""" return response def parse_row(self, response, row): """This method must be overriden with your custom spider functionality""" raise NotImplementedError def parse_rows(self, response): """Receives a response and a dict (representing each row) with a key for each provided (or detected) header of the CSV file. This spider also gives the opportunity to override adapt_response and process_results methods for pre and post-processing purposes. """ for row in csviter(response, self.delimiter, self.headers): ret = self.parse_row(response, row) if isinstance(ret, (BaseItem, Request)): ret = [ret] if not isinstance(ret, (list, tuple)): raise TypeError('You cannot return an "%s" object from a spider' % type(ret).__name__) for result_item in self.process_results(response, ret): yield result_item def parse(self, response): if not hasattr(self, 'parse_row'): raise NotConfigured('You must define parse_row method in order to scrape this CSV feed') response = self.adapt_response(response) return self.parse_rows(response)
bsd-3-clause
pamu/FooService
FooService1/project/target/node-modules/webjars/npm/node_modules/node-gyp/gyp/pylib/gyp/input_test.py
604
3207
#!/usr/bin/env python # Copyright 2013 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Unit tests for the input.py file.""" import gyp.input import unittest import sys class TestFindCycles(unittest.TestCase): def setUp(self): self.nodes = {} for x in ('a', 'b', 'c', 'd', 'e'): self.nodes[x] = gyp.input.DependencyGraphNode(x) def _create_dependency(self, dependent, dependency): dependent.dependencies.append(dependency) dependency.dependents.append(dependent) def test_no_cycle_empty_graph(self): for label, node in self.nodes.iteritems(): self.assertEquals([], node.FindCycles()) def test_no_cycle_line(self): self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['b'], self.nodes['c']) self._create_dependency(self.nodes['c'], self.nodes['d']) for label, node in self.nodes.iteritems(): self.assertEquals([], node.FindCycles()) def test_no_cycle_dag(self): self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['a'], self.nodes['c']) self._create_dependency(self.nodes['b'], self.nodes['c']) for label, node in self.nodes.iteritems(): self.assertEquals([], node.FindCycles()) def test_cycle_self_reference(self): self._create_dependency(self.nodes['a'], self.nodes['a']) self.assertEquals([(self.nodes['a'], self.nodes['a'])], self.nodes['a'].FindCycles()) def test_cycle_two_nodes(self): self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['b'], self.nodes['a']) self.assertEquals([(self.nodes['a'], self.nodes['b'], self.nodes['a'])], self.nodes['a'].FindCycles()) self.assertEquals([(self.nodes['b'], self.nodes['a'], self.nodes['b'])], self.nodes['b'].FindCycles()) def test_two_cycles(self): self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['b'], self.nodes['a']) self._create_dependency(self.nodes['b'], self.nodes['c']) self._create_dependency(self.nodes['c'], self.nodes['b']) cycles = self.nodes['a'].FindCycles() self.assertTrue( (self.nodes['a'], self.nodes['b'], self.nodes['a']) in cycles) self.assertTrue( (self.nodes['b'], self.nodes['c'], self.nodes['b']) in cycles) self.assertEquals(2, len(cycles)) def test_big_cycle(self): self._create_dependency(self.nodes['a'], self.nodes['b']) self._create_dependency(self.nodes['b'], self.nodes['c']) self._create_dependency(self.nodes['c'], self.nodes['d']) self._create_dependency(self.nodes['d'], self.nodes['e']) self._create_dependency(self.nodes['e'], self.nodes['a']) self.assertEquals([(self.nodes['a'], self.nodes['b'], self.nodes['c'], self.nodes['d'], self.nodes['e'], self.nodes['a'])], self.nodes['a'].FindCycles()) if __name__ == '__main__': unittest.main()
apache-2.0
korbenzhang/vim-ycm-win
third_party/requests/requests/packages/urllib3/response.py
316
10537
# urllib3/response.py # Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) # # This module is part of urllib3 and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php import logging import zlib import io from .exceptions import DecodeError from .packages.six import string_types as basestring, binary_type from .util import is_fp_closed log = logging.getLogger(__name__) class DeflateDecoder(object): def __init__(self): self._first_try = True self._data = binary_type() self._obj = zlib.decompressobj() def __getattr__(self, name): return getattr(self._obj, name) def decompress(self, data): if not self._first_try: return self._obj.decompress(data) self._data += data try: return self._obj.decompress(data) except zlib.error: self._first_try = False self._obj = zlib.decompressobj(-zlib.MAX_WBITS) try: return self.decompress(self._data) finally: self._data = None def _get_decoder(mode): if mode == 'gzip': return zlib.decompressobj(16 + zlib.MAX_WBITS) return DeflateDecoder() class HTTPResponse(io.IOBase): """ HTTP Response container. Backwards-compatible to httplib's HTTPResponse but the response ``body`` is loaded and decoded on-demand when the ``data`` property is accessed. Extra parameters for behaviour not present in httplib.HTTPResponse: :param preload_content: If True, the response's body will be preloaded during construction. :param decode_content: If True, attempts to decode specific content-encoding's based on headers (like 'gzip' and 'deflate') will be skipped and raw data will be used instead. :param original_response: When this HTTPResponse wrapper is generated from an httplib.HTTPResponse object, it's convenient to include the original for debug purposes. It's otherwise unused. """ CONTENT_DECODERS = ['gzip', 'deflate'] REDIRECT_STATUSES = [301, 302, 303, 307, 308] def __init__(self, body='', headers=None, status=0, version=0, reason=None, strict=0, preload_content=True, decode_content=True, original_response=None, pool=None, connection=None): self.headers = headers or {} self.status = status self.version = version self.reason = reason self.strict = strict self.decode_content = decode_content self._decoder = None self._body = body if body and isinstance(body, basestring) else None self._fp = None self._original_response = original_response self._fp_bytes_read = 0 self._pool = pool self._connection = connection if hasattr(body, 'read'): self._fp = body if preload_content and not self._body: self._body = self.read(decode_content=decode_content) def get_redirect_location(self): """ Should we redirect and where to? :returns: Truthy redirect location string if we got a redirect status code and valid location. ``None`` if redirect status and no location. ``False`` if not a redirect status code. """ if self.status in self.REDIRECT_STATUSES: return self.headers.get('location') return False def release_conn(self): if not self._pool or not self._connection: return self._pool._put_conn(self._connection) self._connection = None @property def data(self): # For backwords-compat with earlier urllib3 0.4 and earlier. if self._body: return self._body if self._fp: return self.read(cache_content=True) def tell(self): """ Obtain the number of bytes pulled over the wire so far. May differ from the amount of content returned by :meth:``HTTPResponse.read`` if bytes are encoded on the wire (e.g, compressed). """ return self._fp_bytes_read def read(self, amt=None, decode_content=None, cache_content=False): """ Similar to :meth:`httplib.HTTPResponse.read`, but with two additional parameters: ``decode_content`` and ``cache_content``. :param amt: How much of the content to read. If specified, caching is skipped because it doesn't make sense to cache partial content as the full response. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. :param cache_content: If True, will save the returned data such that the same result is returned despite of the state of the underlying file object. This is useful if you want the ``.data`` property to continue working after having ``.read()`` the file object. (Overridden if ``amt`` is set.) """ # Note: content-encoding value should be case-insensitive, per RFC 2616 # Section 3.5 content_encoding = self.headers.get('content-encoding', '').lower() if self._decoder is None: if content_encoding in self.CONTENT_DECODERS: self._decoder = _get_decoder(content_encoding) if decode_content is None: decode_content = self.decode_content if self._fp is None: return flush_decoder = False try: if amt is None: # cStringIO doesn't like amt=None data = self._fp.read() flush_decoder = True else: cache_content = False data = self._fp.read(amt) if amt != 0 and not data: # Platform-specific: Buggy versions of Python. # Close the connection when no data is returned # # This is redundant to what httplib/http.client _should_ # already do. However, versions of python released before # December 15, 2012 (http://bugs.python.org/issue16298) do not # properly close the connection in all cases. There is no harm # in redundantly calling close. self._fp.close() flush_decoder = True self._fp_bytes_read += len(data) try: if decode_content and self._decoder: data = self._decoder.decompress(data) except (IOError, zlib.error) as e: raise DecodeError( "Received response with content-encoding: %s, but " "failed to decode it." % content_encoding, e) if flush_decoder and decode_content and self._decoder: buf = self._decoder.decompress(binary_type()) data += buf + self._decoder.flush() if cache_content: self._body = data return data finally: if self._original_response and self._original_response.isclosed(): self.release_conn() def stream(self, amt=2**16, decode_content=None): """ A generator wrapper for the read() method. A call will block until ``amt`` bytes have been read from the connection or until the connection is closed. :param amt: How much of the content to read. The generator will return up to much data per iteration, but may return less. This is particularly likely when using compressed data. However, the empty string will never be returned. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. """ while not is_fp_closed(self._fp): data = self.read(amt=amt, decode_content=decode_content) if data: yield data @classmethod def from_httplib(ResponseCls, r, **response_kw): """ Given an :class:`httplib.HTTPResponse` instance ``r``, return a corresponding :class:`urllib3.response.HTTPResponse` object. Remaining parameters are passed to the HTTPResponse constructor, along with ``original_response=r``. """ # Normalize headers between different versions of Python headers = {} for k, v in r.getheaders(): # Python 3: Header keys are returned capitalised k = k.lower() has_value = headers.get(k) if has_value: # Python 3: Repeating header keys are unmerged. v = ', '.join([has_value, v]) headers[k] = v # HTTPResponse objects in Python 3 don't have a .strict attribute strict = getattr(r, 'strict', 0) return ResponseCls(body=r, headers=headers, status=r.status, version=r.version, reason=r.reason, strict=strict, original_response=r, **response_kw) # Backwards-compatibility methods for httplib.HTTPResponse def getheaders(self): return self.headers def getheader(self, name, default=None): return self.headers.get(name, default) # Overrides from io.IOBase def close(self): if not self.closed: self._fp.close() @property def closed(self): if self._fp is None: return True elif hasattr(self._fp, 'closed'): return self._fp.closed elif hasattr(self._fp, 'isclosed'): # Python 2 return self._fp.isclosed() else: return True def fileno(self): if self._fp is None: raise IOError("HTTPResponse has no file to get a fileno from") elif hasattr(self._fp, "fileno"): return self._fp.fileno() else: raise IOError("The file-like object this HTTPResponse is wrapped " "around has no file descriptor") def flush(self): if self._fp is not None and hasattr(self._fp, 'flush'): return self._fp.flush() def readable(self): return True
apache-2.0
benslavin/heroku-buildpack-python
vendor/virtualenv-1.7.2/virtualenv_embedded/distribute_setup.py
20
16313
#!python """Bootstrap distribute installation If you want to use setuptools in your package's setup.py, just include this file in the same directory with it, and add this to the top of your setup.py:: from distribute_setup import use_setuptools use_setuptools() If you want to require a specific version of setuptools, set a download mirror, or use an alternate download directory, you can do so by supplying the appropriate options to ``use_setuptools()``. This file can also be run as a script to install or upgrade setuptools. """ import os import shutil import sys import time import fnmatch import tempfile import tarfile from distutils import log try: from site import USER_SITE except ImportError: USER_SITE = None try: import subprocess def _python_cmd(*args): args = (sys.executable,) + args return subprocess.call(args) == 0 except ImportError: # will be used for python 2.3 def _python_cmd(*args): args = (sys.executable,) + args # quoting arguments if windows if sys.platform == 'win32': def quote(arg): if ' ' in arg: return '"%s"' % arg return arg args = [quote(arg) for arg in args] return os.spawnl(os.P_WAIT, sys.executable, *args) == 0 DEFAULT_VERSION = "0.6.27" DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/" SETUPTOOLS_FAKED_VERSION = "0.6c11" SETUPTOOLS_PKG_INFO = """\ Metadata-Version: 1.0 Name: setuptools Version: %s Summary: xxxx Home-page: xxx Author: xxx Author-email: xxx License: xxx Description: xxx """ % SETUPTOOLS_FAKED_VERSION def _install(tarball, install_args=()): # extracting the tarball tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) tar = tarfile.open(tarball) _extractall(tar) tar.close() # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) # installing log.warn('Installing Distribute') if not _python_cmd('setup.py', 'install', *install_args): log.warn('Something went wrong during the installation.') log.warn('See the error message above.') finally: os.chdir(old_wd) shutil.rmtree(tmpdir) def _build_egg(egg, tarball, to_dir): # extracting the tarball tmpdir = tempfile.mkdtemp() log.warn('Extracting in %s', tmpdir) old_wd = os.getcwd() try: os.chdir(tmpdir) tar = tarfile.open(tarball) _extractall(tar) tar.close() # going in the directory subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) os.chdir(subdir) log.warn('Now working in %s', subdir) # building an egg log.warn('Building a Distribute egg in %s', to_dir) _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) finally: os.chdir(old_wd) shutil.rmtree(tmpdir) # returning the result log.warn(egg) if not os.path.exists(egg): raise IOError('Could not build the egg.') def _do_download(version, download_base, to_dir, download_delay): egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg' % (version, sys.version_info[0], sys.version_info[1])) if not os.path.exists(egg): tarball = download_setuptools(version, download_base, to_dir, download_delay) _build_egg(egg, tarball, to_dir) sys.path.insert(0, egg) import setuptools setuptools.bootstrap_install_from = egg def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, download_delay=15, no_fake=True): # making sure we use the absolute path to_dir = os.path.abspath(to_dir) was_imported = 'pkg_resources' in sys.modules or \ 'setuptools' in sys.modules try: try: import pkg_resources if not hasattr(pkg_resources, '_distribute'): if not no_fake: _fake_setuptools() raise ImportError except ImportError: return _do_download(version, download_base, to_dir, download_delay) try: pkg_resources.require("distribute>="+version) return except pkg_resources.VersionConflict: e = sys.exc_info()[1] if was_imported: sys.stderr.write( "The required version of distribute (>=%s) is not available,\n" "and can't be installed while this script is running. Please\n" "install a more recent version first, using\n" "'easy_install -U distribute'." "\n\n(Currently using %r)\n" % (version, e.args[0])) sys.exit(2) else: del pkg_resources, sys.modules['pkg_resources'] # reload ok return _do_download(version, download_base, to_dir, download_delay) except pkg_resources.DistributionNotFound: return _do_download(version, download_base, to_dir, download_delay) finally: if not no_fake: _create_fake_setuptools_pkg_info(to_dir) def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir, delay=15): """Download distribute from a specified location and return its filename `version` should be a valid distribute version number that is available as an egg for download under the `download_base` URL (which should end with a '/'). `to_dir` is the directory where the egg will be downloaded. `delay` is the number of seconds to pause before an actual download attempt. """ # making sure we use the absolute path to_dir = os.path.abspath(to_dir) try: from urllib.request import urlopen except ImportError: from urllib2 import urlopen tgz_name = "distribute-%s.tar.gz" % version url = download_base + tgz_name saveto = os.path.join(to_dir, tgz_name) src = dst = None if not os.path.exists(saveto): # Avoid repeated downloads try: log.warn("Downloading %s", url) src = urlopen(url) # Read/write all in one block, so we don't create a corrupt file # if the download is interrupted. data = src.read() dst = open(saveto, "wb") dst.write(data) finally: if src: src.close() if dst: dst.close() return os.path.realpath(saveto) def _no_sandbox(function): def __no_sandbox(*args, **kw): try: from setuptools.sandbox import DirectorySandbox if not hasattr(DirectorySandbox, '_old'): def violation(*args): pass DirectorySandbox._old = DirectorySandbox._violation DirectorySandbox._violation = violation patched = True else: patched = False except ImportError: patched = False try: return function(*args, **kw) finally: if patched: DirectorySandbox._violation = DirectorySandbox._old del DirectorySandbox._old return __no_sandbox def _patch_file(path, content): """Will backup the file then patch it""" existing_content = open(path).read() if existing_content == content: # already patched log.warn('Already patched.') return False log.warn('Patching...') _rename_path(path) f = open(path, 'w') try: f.write(content) finally: f.close() return True _patch_file = _no_sandbox(_patch_file) def _same_content(path, content): return open(path).read() == content def _rename_path(path): new_name = path + '.OLD.%s' % time.time() log.warn('Renaming %s into %s', path, new_name) os.rename(path, new_name) return new_name def _remove_flat_installation(placeholder): if not os.path.isdir(placeholder): log.warn('Unkown installation at %s', placeholder) return False found = False for file in os.listdir(placeholder): if fnmatch.fnmatch(file, 'setuptools*.egg-info'): found = True break if not found: log.warn('Could not locate setuptools*.egg-info') return log.warn('Removing elements out of the way...') pkg_info = os.path.join(placeholder, file) if os.path.isdir(pkg_info): patched = _patch_egg_dir(pkg_info) else: patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO) if not patched: log.warn('%s already patched.', pkg_info) return False # now let's move the files out of the way for element in ('setuptools', 'pkg_resources.py', 'site.py'): element = os.path.join(placeholder, element) if os.path.exists(element): _rename_path(element) else: log.warn('Could not find the %s element of the ' 'Setuptools distribution', element) return True _remove_flat_installation = _no_sandbox(_remove_flat_installation) def _after_install(dist): log.warn('After install bootstrap.') placeholder = dist.get_command_obj('install').install_purelib _create_fake_setuptools_pkg_info(placeholder) def _create_fake_setuptools_pkg_info(placeholder): if not placeholder or not os.path.exists(placeholder): log.warn('Could not find the install location') return pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1]) setuptools_file = 'setuptools-%s-py%s.egg-info' % \ (SETUPTOOLS_FAKED_VERSION, pyver) pkg_info = os.path.join(placeholder, setuptools_file) if os.path.exists(pkg_info): log.warn('%s already exists', pkg_info) return if not os.access(pkg_info, os.W_OK): log.warn("Don't have permissions to write %s, skipping", pkg_info) log.warn('Creating %s', pkg_info) f = open(pkg_info, 'w') try: f.write(SETUPTOOLS_PKG_INFO) finally: f.close() pth_file = os.path.join(placeholder, 'setuptools.pth') log.warn('Creating %s', pth_file) f = open(pth_file, 'w') try: f.write(os.path.join(os.curdir, setuptools_file)) finally: f.close() _create_fake_setuptools_pkg_info = _no_sandbox(_create_fake_setuptools_pkg_info) def _patch_egg_dir(path): # let's check if it's already patched pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') if os.path.exists(pkg_info): if _same_content(pkg_info, SETUPTOOLS_PKG_INFO): log.warn('%s already patched.', pkg_info) return False _rename_path(path) os.mkdir(path) os.mkdir(os.path.join(path, 'EGG-INFO')) pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO') f = open(pkg_info, 'w') try: f.write(SETUPTOOLS_PKG_INFO) finally: f.close() return True _patch_egg_dir = _no_sandbox(_patch_egg_dir) def _before_install(): log.warn('Before install bootstrap.') _fake_setuptools() def _under_prefix(location): if 'install' not in sys.argv: return True args = sys.argv[sys.argv.index('install')+1:] for index, arg in enumerate(args): for option in ('--root', '--prefix'): if arg.startswith('%s=' % option): top_dir = arg.split('root=')[-1] return location.startswith(top_dir) elif arg == option: if len(args) > index: top_dir = args[index+1] return location.startswith(top_dir) if arg == '--user' and USER_SITE is not None: return location.startswith(USER_SITE) return True def _fake_setuptools(): log.warn('Scanning installed packages') try: import pkg_resources except ImportError: # we're cool log.warn('Setuptools or Distribute does not seem to be installed.') return ws = pkg_resources.working_set try: setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools', replacement=False)) except TypeError: # old distribute API setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools')) if setuptools_dist is None: log.warn('No setuptools distribution found') return # detecting if it was already faked setuptools_location = setuptools_dist.location log.warn('Setuptools installation detected at %s', setuptools_location) # if --root or --preix was provided, and if # setuptools is not located in them, we don't patch it if not _under_prefix(setuptools_location): log.warn('Not patching, --root or --prefix is installing Distribute' ' in another location') return # let's see if its an egg if not setuptools_location.endswith('.egg'): log.warn('Non-egg installation') res = _remove_flat_installation(setuptools_location) if not res: return else: log.warn('Egg installation') pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO') if (os.path.exists(pkg_info) and _same_content(pkg_info, SETUPTOOLS_PKG_INFO)): log.warn('Already patched.') return log.warn('Patching...') # let's create a fake egg replacing setuptools one res = _patch_egg_dir(setuptools_location) if not res: return log.warn('Patched done.') _relaunch() def _relaunch(): log.warn('Relaunching...') # we have to relaunch the process # pip marker to avoid a relaunch bug if sys.argv[:3] == ['-c', 'install', '--single-version-externally-managed']: sys.argv[0] = 'setup.py' args = [sys.executable] + sys.argv sys.exit(subprocess.call(args)) def _extractall(self, path=".", members=None): """Extract all members from the archive to the current working directory and set owner, modification time and permissions on directories afterwards. `path' specifies a different directory to extract to. `members' is optional and must be a subset of the list returned by getmembers(). """ import copy import operator from tarfile import ExtractError directories = [] if members is None: members = self for tarinfo in members: if tarinfo.isdir(): # Extract directories with a safe mode. directories.append(tarinfo) tarinfo = copy.copy(tarinfo) tarinfo.mode = 448 # decimal for oct 0700 self.extract(tarinfo, path) # Reverse sort directories. if sys.version_info < (2, 4): def sorter(dir1, dir2): return cmp(dir1.name, dir2.name) directories.sort(sorter) directories.reverse() else: directories.sort(key=operator.attrgetter('name'), reverse=True) # Set correct owner, mtime and filemode on directories. for tarinfo in directories: dirpath = os.path.join(path, tarinfo.name) try: self.chown(tarinfo, dirpath) self.utime(tarinfo, dirpath) self.chmod(tarinfo, dirpath) except ExtractError: e = sys.exc_info()[1] if self.errorlevel > 1: raise else: self._dbg(1, "tarfile: %s" % e) def _build_install_args(argv): install_args = [] user_install = '--user' in argv if user_install and sys.version_info < (2,6): log.warn("--user requires Python 2.6 or later") raise SystemExit(1) if user_install: install_args.append('--user') return install_args def main(argv, version=DEFAULT_VERSION): """Install or upgrade setuptools and EasyInstall""" tarball = download_setuptools() _install(tarball, _build_install_args(argv)) if __name__ == '__main__': main(sys.argv[1:])
mit
scottdangelo/RemoveVolumeMangerLocks
cinder/volume/drivers/netapp/dataontap/nfs_7mode.py
1
8344
# Copyright (c) 2012 NetApp, Inc. All rights reserved. # Copyright (c) 2014 Ben Swartzlander. All rights reserved. # Copyright (c) 2014 Navneet Singh. All rights reserved. # Copyright (c) 2014 Clinton Knight. All rights reserved. # Copyright (c) 2014 Alex Meade. All rights reserved. # Copyright (c) 2014 Bob Callaway. All rights reserved. # Copyright (c) 2015 Tom Barron. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Volume driver for NetApp NFS storage. """ import os from oslo_log import log as logging import six from cinder import exception from cinder.i18n import _ from cinder import utils from cinder.volume.drivers.netapp.dataontap.client import client_7mode from cinder.volume.drivers.netapp.dataontap import nfs_base from cinder.volume.drivers.netapp import options as na_opts from cinder.volume.drivers.netapp import utils as na_utils LOG = logging.getLogger(__name__) @six.add_metaclass(utils.TraceWrapperWithABCMetaclass) class NetApp7modeNfsDriver(nfs_base.NetAppNfsDriver): """NetApp NFS driver for Data ONTAP (7-mode).""" def __init__(self, *args, **kwargs): super(NetApp7modeNfsDriver, self).__init__(*args, **kwargs) self.configuration.append_config_values(na_opts.netapp_7mode_opts) def do_setup(self, context): """Do the customized set up on client if any for 7 mode.""" super(NetApp7modeNfsDriver, self).do_setup(context) self.zapi_client = client_7mode.Client( transport_type=self.configuration.netapp_transport_type, username=self.configuration.netapp_login, password=self.configuration.netapp_password, hostname=self.configuration.netapp_server_hostname, port=self.configuration.netapp_server_port, vfiler=self.configuration.netapp_vfiler) self.ssc_enabled = False def check_for_setup_error(self): """Checks if setup occurred properly.""" api_version = self.zapi_client.get_ontapi_version() if api_version: major, minor = api_version if major == 1 and minor < 9: msg = _("Unsupported Data ONTAP version." " Data ONTAP version 7.3.1 and above is supported.") raise exception.VolumeBackendAPIException(data=msg) else: msg = _("Data ONTAP API version could not be determined.") raise exception.VolumeBackendAPIException(data=msg) super(NetApp7modeNfsDriver, self).check_for_setup_error() def _clone_backing_file_for_volume(self, volume_name, clone_name, volume_id, share=None): """Clone backing file for Cinder volume.""" (_host_ip, export_path) = self._get_export_ip_path(volume_id, share) storage_path = self.zapi_client.get_actual_path_for_export(export_path) target_path = '%s/%s' % (storage_path, clone_name) self.zapi_client.clone_file('%s/%s' % (storage_path, volume_name), target_path) def _update_volume_stats(self): """Retrieve stats info from vserver.""" self._ensure_shares_mounted() LOG.debug('Updating volume stats') data = {} netapp_backend = 'NetApp_NFS_7mode_direct' backend_name = self.configuration.safe_get('volume_backend_name') data['volume_backend_name'] = backend_name or netapp_backend data['vendor_name'] = 'NetApp' data['driver_version'] = self.VERSION data['storage_protocol'] = 'nfs' data['pools'] = self._get_pool_stats() self._spawn_clean_cache_job() self.zapi_client.provide_ems(self, netapp_backend, self._app_version, server_type="7mode") self._stats = data def _get_pool_stats(self): """Retrieve pool (i.e. NFS share) stats info from SSC volumes.""" pools = [] for nfs_share in self._mounted_shares: capacity = self._get_share_capacity_info(nfs_share) pool = dict() pool['pool_name'] = nfs_share pool['QoS_support'] = False pool.update(capacity) thick = not self.configuration.nfs_sparsed_volumes pool['thick_provisioning_support'] = thick pool['thin_provisioning_support'] = not thick pools.append(pool) return pools def _shortlist_del_eligible_files(self, share, old_files): """Prepares list of eligible files to be deleted from cache.""" file_list = [] (_, export_path) = self._get_export_ip_path(share=share) exported_volume = self.zapi_client.get_actual_path_for_export( export_path) for old_file in old_files: path = os.path.join(exported_volume, old_file) u_bytes = self.zapi_client.get_file_usage(path) file_list.append((old_file, u_bytes)) LOG.debug('Shortlisted files eligible for deletion: %s', file_list) return file_list def _is_filer_ip(self, ip): """Checks whether ip is on the same filer.""" try: ifconfig = self.zapi_client.get_ifconfig() if_info = ifconfig.get_child_by_name('interface-config-info') if if_info: ifs = if_info.get_children() for intf in ifs: v4_addr = intf.get_child_by_name('v4-primary-address') if v4_addr: ip_info = v4_addr.get_child_by_name('ip-address-info') if ip_info: address = ip_info.get_child_content('address') if ip == address: return True else: continue except Exception: return False return False def _share_match_for_ip(self, ip, shares): """Returns the share that is served by ip. Multiple shares can have same dir path but can be served using different ips. It finds the share which is served by ip on same nfs server. """ if self._is_filer_ip(ip) and shares: for share in shares: ip_sh = share.split(':')[0] if self._is_filer_ip(ip_sh): LOG.debug('Share match found for ip %s', ip) return share LOG.debug('No share match found for ip %s', ip) return None def _is_share_clone_compatible(self, volume, share): """Checks if share is compatible with volume to host its clone.""" thin = self.configuration.nfs_sparsed_volumes return self._share_has_space_for_clone(share, volume['size'], thin) def _check_volume_type(self, volume, share, file_name, extra_specs): """Matches a volume type for share file.""" qos_policy_group = extra_specs.pop('netapp:qos_policy_group', None) \ if extra_specs else None if qos_policy_group: raise exception.ManageExistingVolumeTypeMismatch( reason=(_("Setting file qos policy group is not supported" " on this storage family and ontap version."))) volume_type = na_utils.get_volume_type_from_volume(volume) if volume_type and 'qos_spec_id' in volume_type: raise exception.ManageExistingVolumeTypeMismatch( reason=_("QoS specs are not supported" " on this storage family and ONTAP version.")) def _do_qos_for_volume(self, volume, extra_specs, cleanup=False): """Set QoS policy on backend from volume type information.""" # 7-mode DOT does not support QoS. return
apache-2.0
prds21/repository-barrialTV
lib/mechanize/_testcase.py
136
5086
import os import shutil import subprocess import tempfile import unittest class SetupStack(object): def __init__(self): self._on_teardown = [] def add_teardown(self, teardown): self._on_teardown.append(teardown) def tear_down(self): for func in reversed(self._on_teardown): func() class TearDownConvenience(object): def __init__(self, setup_stack=None): self._own_setup_stack = setup_stack is None if setup_stack is None: setup_stack = SetupStack() self._setup_stack = setup_stack # only call this convenience method if no setup_stack was supplied to c'tor def tear_down(self): assert self._own_setup_stack self._setup_stack.tear_down() class TempDirMaker(TearDownConvenience): def make_temp_dir(self, dir_=None): temp_dir = tempfile.mkdtemp(prefix="tmp-%s-" % self.__class__.__name__, dir=dir_) def tear_down(): shutil.rmtree(temp_dir) self._setup_stack.add_teardown(tear_down) return temp_dir class MonkeyPatcher(TearDownConvenience): Unset = object() def monkey_patch(self, obj, name, value): orig_value = getattr(obj, name) setattr(obj, name, value) def reverse_patch(): setattr(obj, name, orig_value) self._setup_stack.add_teardown(reverse_patch) def _set_environ(self, env, name, value): if value is self.Unset: try: del env[name] except KeyError: pass else: env[name] = value def monkey_patch_environ(self, name, value, env=os.environ): orig_value = env.get(name, self.Unset) self._set_environ(env, name, value) def reverse_patch(): self._set_environ(env, name, orig_value) self._setup_stack.add_teardown(reverse_patch) class FixtureFactory(object): def __init__(self): self._setup_stack = SetupStack() self._context_managers = {} self._fixtures = {} def register_context_manager(self, name, context_manager): self._context_managers[name] = context_manager def get_fixture(self, name, add_teardown): context_manager = self._context_managers[name] fixture = context_manager.__enter__() add_teardown(lambda: context_manager.__exit__(None, None, None)) return fixture def get_cached_fixture(self, name): fixture = self._fixtures.get(name) if fixture is None: fixture = self.get_fixture(name, self._setup_stack.add_teardown) self._fixtures[name] = fixture return fixture def tear_down(self): self._setup_stack.tear_down() class TestCase(unittest.TestCase): def setUp(self): self._setup_stack = SetupStack() self._monkey_patcher = MonkeyPatcher(self._setup_stack) def tearDown(self): self._setup_stack.tear_down() def register_context_manager(self, name, context_manager): return self.fixture_factory.register_context_manager( name, context_manager) def get_fixture(self, name): return self.fixture_factory.get_fixture(name, self.add_teardown) def get_cached_fixture(self, name): return self.fixture_factory.get_cached_fixture(name) def add_teardown(self, *args, **kwds): self._setup_stack.add_teardown(*args, **kwds) def make_temp_dir(self, *args, **kwds): return TempDirMaker(self._setup_stack).make_temp_dir(*args, **kwds) def monkey_patch(self, *args, **kwds): return self._monkey_patcher.monkey_patch(*args, **kwds) def monkey_patch_environ(self, *args, **kwds): return self._monkey_patcher.monkey_patch_environ(*args, **kwds) def assert_contains(self, container, containee): self.assertTrue(containee in container, "%r not in %r" % (containee, container)) def assert_less_than(self, got, expected): self.assertTrue(got < expected, "%r >= %r" % (got, expected)) # http://lackingrhoticity.blogspot.com/2009/01/testing-using-golden-files-in-python.html class GoldenTestCase(TestCase): run_meld = False def assert_golden(self, dir_got, dir_expect): assert os.path.exists(dir_expect), dir_expect proc = subprocess.Popen(["diff", "--recursive", "-u", "-N", "--exclude=.*", dir_expect, dir_got], stdout=subprocess.PIPE) stdout, stderr = proc.communicate() if len(stdout) > 0: if self.run_meld: # Put expected output on the right because that is the # side we usually edit. subprocess.call(["meld", dir_got, dir_expect]) raise AssertionError( "Differences from golden files found.\n" "Try running with --meld to update golden files.\n" "%s" % stdout) self.assertEquals(proc.wait(), 0)
gpl-2.0
mixtile/loftq-linux
tools/perf/util/setup.py
97
1405
#!/usr/bin/python2 from distutils.core import setup, Extension from os import getenv from distutils.command.build_ext import build_ext as _build_ext from distutils.command.install_lib import install_lib as _install_lib class build_ext(_build_ext): def finalize_options(self): _build_ext.finalize_options(self) self.build_lib = build_lib self.build_temp = build_tmp class install_lib(_install_lib): def finalize_options(self): _install_lib.finalize_options(self) self.build_dir = build_lib cflags = ['-fno-strict-aliasing', '-Wno-write-strings'] cflags += getenv('CFLAGS', '').split() build_lib = getenv('PYTHON_EXTBUILD_LIB') build_tmp = getenv('PYTHON_EXTBUILD_TMP') perf = Extension('perf', sources = ['util/python.c', 'util/ctype.c', 'util/evlist.c', 'util/evsel.c', 'util/cpumap.c', 'util/thread_map.c', 'util/util.c', 'util/xyarray.c', 'util/cgroup.c', 'util/debugfs.c'], include_dirs = ['util/include'], extra_compile_args = cflags, ) setup(name='perf', version='0.1', description='Interface with the Linux profiling infrastructure', author='Arnaldo Carvalho de Melo', author_email='acme@redhat.com', license='GPLv2', url='http://perf.wiki.kernel.org', ext_modules=[perf], cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
gpl-2.0
openstack/networking-plumgrid
networking_plumgrid/neutronclient/policy/policy_tag.py
1
5554
# Copyright 2016 OpenStack Foundation. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # from networking_plumgrid._i18n import _ from neutronclient.common import extension class PolicyTag(extension.NeutronClientExtension): resource = 'policy_tag' resource_plural = 'policy_tags' path = 'policy-tags' object_path = '/%s' % path resource_path = '/%s/%%s' % path versions = ['2.0'] def args2body(self, parsed_args): try: if parsed_args.name: ptag_name = parsed_args.name body = {'policy_tag': {'name': ptag_name}} else: body = {'policy_tag': {}} if parsed_args.tag_type: if (str(parsed_args.tag_type).lower() == 'fip' or str(parsed_args.tag_type).lower() == 'dot1q' or str(parsed_args.tag_type).lower() == 'nsh'): body['policy_tag']['tag_type'] \ = parsed_args.tag_type else: raise Exception("Supported values for policy tag type are:" " 'fip', 'dot1q', 'nsh'") else: raise Exception("Policy tag type is required to be specified. " "Supported values for policy tag type are:" " 'fip', 'dot1q', 'nsh'") if parsed_args.tag_id: body['policy_tag']['tag_id'] = parsed_args.tag_id if parsed_args.router_id: body['policy_tag']['router_id'] = parsed_args.router_id if parsed_args.floatingip_id: body['policy_tag']['floatingip_id'] = parsed_args.floatingip_id if (parsed_args.tag_type and parsed_args.tag_type.lower() == 'fip' and not parsed_args.floatingip_id): raise Exception("Floating IP UUID must be specified when " "using tag type=fip") if (parsed_args.tag_type and (parsed_args.tag_type.lower() == 'dot1q' or parsed_args.tag_type.lower() == 'nsh') and not parsed_args.tag_id): raise Exception("ID in range (257-2047) must be specified when " "using tag type=dot1q or type=nsh") if (parsed_args.router_id and parsed_args.tag_type.lower() != 'fip'): raise Exception("Tag type='fip' must be specified when using " "Router ID") if (parsed_args.tag_type.lower() == 'fip' and parsed_args.tag_id): raise Exception("Tag type=='fip' does not support tag id.") if (parsed_args.floatingip_id and parsed_args.tag_type.lower() != 'fip'): raise Exception('Floating ip cannot be associated with tag type:' + parsed_args.tag_type.lower()) return body except KeyError as err: raise Exception("KeyError: " + str(err)) class PolicyTagCreate(extension.ClientExtensionCreate, PolicyTag): """Create a Policy Tag.""" shell_command = 'policy-tag-create' def add_known_arguments(self, parser): parser.add_argument( 'name', metavar='<POLICY-TAG-NAME>', help=_('Descriptive name for policy tag.')) parser.add_argument('--type', dest='tag_type', help=_('Type' ' of policy tag. Options:' ' fip, dot1q, nsh')) parser.add_argument('--floating-ip', dest='floatingip_id', help=_('UUID of Floating IP to associate ' ' with the Policy Tag.')) parser.add_argument('--tag-id', dest='tag_id', help=_('ID in range 257-2047 ')) parser.add_argument('--router-id', dest='router_id', help=_('Router ID to be specified in case ' 'of multiple External Gateways, when ' 'associating a Floating IP.')) def args2body(self, parsed_args): body = args2body(self, parsed_args) if parsed_args.tenant_id: (body['policy_tag'] ['tenant_id']) = parsed_args.tenant_id return body class PolicyTagList(extension.ClientExtensionList, PolicyTag): """List policy tags that belong to a given tenant.""" shell_command = 'policy-tag-list' list_columns = ['id', 'name', 'tag_type', 'tag_id', 'floating_ip_address'] pagination_support = True sorting_support = True class PolicyTagShow(extension.ClientExtensionShow, PolicyTag): """Show information of a given policy tag.""" shell_command = 'policy-tag-show' class PolicyTagDelete(extension.ClientExtensionDelete, PolicyTag): """Delete a given policy tag.""" shell_command = 'policy-tag-delete' class PolicyTagUpdate(extension.ClientExtensionUpdate, PolicyTag): """Update a given policy-tag.""" shell_command = 'policy-tag-update'
apache-2.0
fredkingham/blog-of-fred
django/template/debug.py
83
3586
from django.template.base import Lexer, Parser, tag_re, NodeList, VariableNode, TemplateSyntaxError from django.utils.encoding import force_unicode from django.utils.html import escape from django.utils.safestring import SafeData, EscapeData from django.utils.formats import localize from django.utils.timezone import localtime class DebugLexer(Lexer): def __init__(self, template_string, origin): super(DebugLexer, self).__init__(template_string, origin) def tokenize(self): "Return a list of tokens from a given template_string" result, upto = [], 0 for match in tag_re.finditer(self.template_string): start, end = match.span() if start > upto: result.append(self.create_token(self.template_string[upto:start], (upto, start), False)) upto = start result.append(self.create_token(self.template_string[start:end], (start, end), True)) upto = end last_bit = self.template_string[upto:] if last_bit: result.append(self.create_token(last_bit, (upto, upto + len(last_bit)), False)) return result def create_token(self, token_string, source, in_tag): token = super(DebugLexer, self).create_token(token_string, in_tag) token.source = self.origin, source return token class DebugParser(Parser): def __init__(self, lexer): super(DebugParser, self).__init__(lexer) self.command_stack = [] def enter_command(self, command, token): self.command_stack.append( (command, token.source) ) def exit_command(self): self.command_stack.pop() def error(self, token, msg): return self.source_error(token.source, msg) def source_error(self, source, msg): e = TemplateSyntaxError(msg) e.django_template_source = source return e def create_nodelist(self): return DebugNodeList() def create_variable_node(self, contents): return DebugVariableNode(contents) def extend_nodelist(self, nodelist, node, token): node.source = token.source super(DebugParser, self).extend_nodelist(nodelist, node, token) def unclosed_block_tag(self, parse_until): command, source = self.command_stack.pop() msg = "Unclosed tag '%s'. Looking for one of: %s " % (command, ', '.join(parse_until)) raise self.source_error(source, msg) def compile_function_error(self, token, e): if not hasattr(e, 'django_template_source'): e.django_template_source = token.source class DebugNodeList(NodeList): def render_node(self, node, context): try: return node.render(context) except Exception, e: if not hasattr(e, 'django_template_source'): e.django_template_source = node.source raise class DebugVariableNode(VariableNode): def render(self, context): try: output = self.filter_expression.resolve(context) output = localtime(output, use_tz=context.use_tz) output = localize(output, use_l10n=context.use_l10n) output = force_unicode(output) except UnicodeDecodeError: return '' except Exception, e: if not hasattr(e, 'django_template_source'): e.django_template_source = self.source raise if (context.autoescape and not isinstance(output, SafeData)) or isinstance(output, EscapeData): return escape(output) else: return output
bsd-3-clause
atopuzov/nitro-python
nssrc/com/citrix/netscaler/nitro/resource/config/network/__init__.py
4
1557
__all__ = ['Interface', 'arp', 'arpparam', 'bridgegroup', 'bridgegroup_binding', 'bridgegroup_nsip6_binding', 'bridgegroup_nsip_binding', 'bridgegroup_vlan_binding', 'bridgetable', 'channel', 'channel_binding', 'channel_interface_binding', 'ci', 'fis', 'fis_binding', 'fis_channel_binding', 'fis_interface_binding', 'forwardingsession', 'inat', 'inatparam', 'interfacepair', 'ip6tunnel', 'ip6tunnelparam', 'ipset', 'ipset_binding', 'ipset_nsip6_binding', 'ipset_nsip_binding', 'iptunnel', 'iptunnelparam', 'ipv6', 'l2param', 'l3param', 'l4param', 'lacp', 'linkset', 'linkset_binding', 'linkset_channel_binding', 'linkset_interface_binding', 'nat64', 'nd6', 'nd6ravariables', 'nd6ravariables_binding', 'nd6ravariables_onlinkipv6prefix_binding', 'netbridge', 'netbridge_binding', 'netbridge_iptunnel_binding', 'netbridge_nsip6_binding', 'netbridge_nsip_binding', 'netbridge_vlan_binding', 'netprofile', 'onlinkipv6prefix', 'ptp', 'rnat', 'rnat6', 'rnat6_binding', 'rnat6_nsip6_binding', 'rnatglobal_auditsyslogpolicy_binding', 'rnatglobal_binding', 'rnatparam', 'route', 'route6', 'rsskeytype', 'vlan', 'vlan_binding', 'vlan_channel_binding', 'vlan_interface_binding', 'vlan_linkset_binding', 'vlan_nsip6_binding', 'vlan_nsip_binding', 'vpath', 'vpathparam', 'vrid', 'vrid6', 'vrid6_binding', 'vrid6_interface_binding', 'vrid6_nsip6_binding', 'vrid6_nsip_binding', 'vrid_binding', 'vrid_interface_binding', 'vrid_nsip6_binding', 'vrid_nsip_binding', 'vridparam', 'vxlan', 'vxlan_binding', 'vxlan_iptunnel_binding', 'vxlan_nsip6_binding', 'vxlan_nsip_binding']
apache-2.0
rduivenvoorde/QGIS
python/plugins/db_manager/dlg_field_properties.py
30
3597
# -*- coding: utf-8 -*- """ *************************************************************************** dlg_field_properties.py --------------------- Date : April 2012 Copyright : (C) 2012 by Giuseppe Sucameli Email : brush dot tyler at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ from builtins import str __author__ = 'Giuseppe Sucameli' __date__ = 'April 2012' __copyright__ = '(C) 2012, Giuseppe Sucameli' from qgis.PyQt.QtWidgets import QDialog, QMessageBox from .db_plugins.plugin import TableField from .ui.ui_DlgFieldProperties import Ui_DbManagerDlgFieldProperties as Ui_Dialog class DlgFieldProperties(QDialog, Ui_Dialog): def __init__(self, parent=None, fld=None, table=None, db=None): QDialog.__init__(self, parent) self.fld = fld self.table = self.fld.table() if self.fld and self.fld.table() else table self.db = self.table.database() if self.table and self.table.database() else db self.setupUi(self) for item in self.db.connector.fieldTypes(): self.cboType.addItem(item) supportCom = self.db.supportsComment() if not supportCom: self.label_6.setVisible(False) self.editCom.setVisible(False) self.setField(fld) self.buttonBox.accepted.connect(self.onOK) def setField(self, fld): if fld is None: return self.editName.setText(fld.name) self.cboType.setEditText(fld.dataType) if fld.modifier: self.editLength.setText(str(fld.modifier)) self.chkNull.setChecked(not fld.notNull) if fld.hasDefault: self.editDefault.setText(fld.default) tab = self.table.name field = fld.name res = self.db.connector.getComment(tab, field) self.editCom.setText(res) # Set comment value def getField(self, newCopy=False): fld = TableField(self.table) if not self.fld or newCopy else self.fld fld.name = self.editName.text() fld.dataType = self.cboType.currentText() fld.notNull = not self.chkNull.isChecked() fld.default = self.editDefault.text() fld.hasDefault = fld.default != "" fld.comment = self.editCom.text() # length field also used for geometry definition, so we should # not cast its value to int if self.editLength.text() != "": fld.modifier = self.editLength.text() else: fld.modifier = None return fld def onOK(self): """ first check whether everything's fine """ fld = self.getField(True) # don't change the original copy if fld.name == "": QMessageBox.critical(self, self.tr("DB Manager"), self.tr("Field name must not be empty.")) return if fld.dataType == "": QMessageBox.critical(self, self.tr("DB Manager"), self.tr("Field type must not be empty.")) return self.accept()
gpl-2.0
RobCranfill/weewx
bin/weedb/__init__.py
6
5597
# # Copyright (c) 2009-2015 Tom Keffer <tkeffer@gmail.com> # # See the file LICENSE.txt for your full rights. # """Middleware that sits above DBAPI and makes it a little more database independent. Weedb generally follows the MySQL exception model. Specifically: - Operations on a non-existent database result in a weedb.OperationalError exception being raised. - Operations on a non-existent table result in a weedb.ProgrammingError exception being raised. - Select statements requesting non-existing columns result in a weedb.OperationalError exception being raised. - Attempt to add a duplicate key results in a weedb.IntegrityError exception being raised. """ import sys # The exceptions that the weedb package can raise: class DatabaseError(StandardError): """Base class of all weedb exceptions.""" class OperationalError(DatabaseError): """Runtime database errors.""" class ProgrammingError(DatabaseError): """SQL or other programming error.""" class DatabaseExists(DatabaseError): """Attempt to create a database that already exists""" class NoDatabase(DatabaseError): """Operation attempted on a database that does not exist.""" class IntegrityError(DatabaseError): """Operation attempted involving the relational integrity of the database.""" # In what follows, the test whether a database dictionary has function "dict" is # to get around a bug in ConfigObj. It seems to be unable to unpack (using the # '**' notation) a ConfigObj dictionary into a function. By calling .dict() a # regular dictionary is returned, which can be unpacked. def create(db_dict): """Create a database. If it already exists, an exception of type weedb.DatabaseExists will be raised.""" __import__(db_dict['driver']) driver_mod = sys.modules[db_dict['driver']] # See note above if hasattr(db_dict, "dict"): return driver_mod.create(**db_dict.dict()) else: return driver_mod.create(**db_dict) def connect(db_dict): """Return a connection to a database. If the database does not exist, an exception of type weedb.OperationalError will be raised.""" __import__(db_dict['driver']) driver_mod = sys.modules[db_dict['driver']] # See note above if hasattr(db_dict, "dict"): return driver_mod.connect(**db_dict.dict()) else: return driver_mod.connect(**db_dict) def drop(db_dict): """Drop (delete) a database. If the database does not exist, the exception weedb.NoDatabase will be raised.""" __import__(db_dict['driver']) driver_mod = sys.modules[db_dict['driver']] # See note above if hasattr(db_dict, "dict"): return driver_mod.drop(**db_dict.dict()) else: return driver_mod.drop(**db_dict) class Connection(object): """Abstract base class, representing a connection to a database.""" def __init__(self, connection, database_name, dbtype): """Superclass should raise exception of type weedb.OperationalError if the database does not exist.""" self.connection = connection self.database_name = database_name self.dbtype = dbtype def cursor(self): """Returns an appropriate database cursor.""" raise NotImplementedError def execute(self, sql_string, sql_tuple=()): """Execute a sql statement. This version does not return a cursor, so it can only be used for statements that do not return a result set.""" cursor = self.cursor() try: cursor.execute(sql_string, sql_tuple) finally: cursor.close() def tables(self): """Returns a list of the tables in the database. Returns an empty list if the database has no tables in it.""" raise NotImplementedError def genSchemaOf(self, table): """Generator function that returns a summary of the table's schema. It returns a 6-way tuple: (number, column_name, column_type, can_be_null, default_value, is_primary) Example: (2, 'mintime', 'INTEGER', True, None, False)""" raise NotImplementedError def columnsOf(self, table): """Returns a list of the column names in the specified table. Implementers should raise an exception of type weedb.ProgrammingError if the table does not exist.""" raise NotImplementedError def get_variable(self, var_name): """Return a database specific operational variable. Generally, things like pragmas, or optimization-related variables. It returns a 2-way tuple: (variable-name, variable-value) If the variable does not exist, it returns None. """ raise NotImplemented def begin(self): raise NotImplementedError def commit(self): raise NotImplementedError def rollback(self): raise NotImplementedError def close(self): try: self.connection.close() except DatabaseError: pass class Transaction(object): """Class to be used to wrap transactions in a 'with' clause.""" def __init__(self, connection): self.connection = connection self.cursor = self.connection.cursor() def __enter__(self): self.connection.begin() return self.cursor def __exit__(self, etyp, einst, etb): if etyp is None: self.connection.commit() else: self.connection.rollback() try: self.cursor.close() except DatabaseError: pass
gpl-3.0
gohin/django
django/contrib/staticfiles/management/commands/runserver.py
248
1361
from django.conf import settings from django.contrib.staticfiles.handlers import StaticFilesHandler from django.core.management.commands.runserver import \ Command as RunserverCommand class Command(RunserverCommand): help = "Starts a lightweight Web server for development and also serves static files." def add_arguments(self, parser): super(Command, self).add_arguments(parser) parser.add_argument('--nostatic', action="store_false", dest='use_static_handler', default=True, help='Tells Django to NOT automatically serve static files at STATIC_URL.') parser.add_argument('--insecure', action="store_true", dest='insecure_serving', default=False, help='Allows serving static files even if DEBUG is False.') def get_handler(self, *args, **options): """ Returns the static files serving handler wrapping the default handler, if static files should be served. Otherwise just returns the default handler. """ handler = super(Command, self).get_handler(*args, **options) use_static_handler = options.get('use_static_handler', True) insecure_serving = options.get('insecure_serving', False) if use_static_handler and (settings.DEBUG or insecure_serving): return StaticFilesHandler(handler) return handler
bsd-3-clause
nmartensen/pandas
pandas/io/formats/common.py
16
1094
# -*- coding: utf-8 -*- """ Common helper methods used in different submodules of pandas.io.formats """ def get_level_lengths(levels, sentinel=''): """For each index in each level the function returns lengths of indexes. Parameters ---------- levels : list of lists List of values on for level. sentinel : string, optional Value which states that no new index starts on there. Returns ---------- Returns list of maps. For each level returns map of indexes (key is index in row and value is length of index). """ if len(levels) == 0: return [] control = [True for x in levels[0]] result = [] for level in levels: last_index = 0 lengths = {} for i, key in enumerate(level): if control[i] and key == sentinel: pass else: control[i] = False lengths[last_index] = i - last_index last_index = i lengths[last_index] = len(level) - last_index result.append(lengths) return result
bsd-3-clause
BorisJeremic/Real-ESSI-Examples
analytic_solution/test_cases/27NodeBrick/cantilever_different_Poisson/NumberOfDivision4/PoissonRatio0.15/compare_essi_version.py
409
1085
#!/usr/bin/python import h5py import sys import numpy as np import os # automatically find the script directory. # script_dir=sys.argv[1] cur_dir=os.getcwd() sep='test_cases' test_DIR=cur_dir.split(sep,1)[0] scriptDIR=test_DIR+'compare_function' sys.path.append(scriptDIR) # import my own command line color function # from essi_max_disp_fun import find_max_disp from mycolor_fun import * print headblankline() print headblankline() print headstart(), "Original ESSI version information:" fin=open("original.log") for line in fin: if 'Version' in line: print headstep(), line, if 'Compiled' in line: print headstep(), line, if 'Time Now' in line: print headstep(), line, if not line: break print headblankline() print headstart(), "New ESSI version information:" fin=open("new.log") for line in fin: if 'Version' in line: print headstep(), line, if 'Compiled' in line: print headstep(), line, if 'Time Now' in line: print headstep(), line, if not line: break print headstart() print headblankline()
cc0-1.0
j-griffith/cinder
cinder/db/sqlalchemy/migrate_repo/versions/102_add_replication_status_to_groups_table.py
4
1082
# Copyright (C) 2017 Dell Inc. or its subsidiaries. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from sqlalchemy import Column from sqlalchemy import MetaData, String, Table def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine # Add replication_status column to groups table table = Table('groups', meta, autoload=True) if not hasattr(table.c, 'replication_status'): new_column = Column('replication_status', String(255), nullable=True) table.create_column(new_column)
apache-2.0
TeamEOS/external_chromium_org
mojo/tools/pylib/transitive_hash.py
43
2767
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import logging import subprocess import sys from hashlib import sha256 from os.path import basename, realpath _logging = logging.getLogger() # Based on/taken from # http://code.activestate.com/recipes/578231-probably-the-fastest-memoization-decorator-in-the-/ # (with cosmetic changes). def _memoize(f): """Memoization decorator for a function taking a single argument.""" class Memoize(dict): def __missing__(self, key): rv = self[key] = f(key) return rv return Memoize().__getitem__ @_memoize def _file_hash(filename): """Returns a string representing the hash of the given file.""" _logging.debug("Hashing %s ...", filename) rv = subprocess.check_output(['sha256sum', '-b', filename]).split(None, 1)[0] _logging.debug(" => %s", rv) return rv @_memoize def _get_dependencies(filename): """Returns a list of filenames for files that the given file depends on.""" _logging.debug("Getting dependencies for %s ...", filename) lines = subprocess.check_output(['ldd', filename]).splitlines() rv = [] for line in lines: i = line.find('/') if i < 0: _logging.debug(" => no file found in line: %s", line) continue rv.append(line[i:].split(None, 1)[0]) _logging.debug(" => %s", rv) return rv def transitive_hash(filename): """Returns a string that represents the "transitive" hash of the given file. The transitive hash is a hash of the file and all the shared libraries on which it depends (done in an order-independent way).""" hashes = set() to_hash = [filename] while to_hash: current_filename = realpath(to_hash.pop()) current_hash = _file_hash(current_filename) if current_hash in hashes: _logging.debug("Already seen %s (%s) ...", current_filename, current_hash) continue _logging.debug("Haven't seen %s (%s) ...", current_filename, current_hash) hashes.add(current_hash) to_hash.extend(_get_dependencies(current_filename)) return sha256('|'.join(sorted(hashes))).hexdigest() def main(argv): logging.basicConfig() # Uncomment to debug: # _logging.setLevel(logging.DEBUG) if len(argv) < 2: print """\ Usage: %s [file] ... Prints the \"transitive\" hash of each (executable) file. The transitive hash is a hash of the file and all the shared libraries on which it depends (done in an order-independent way).""" % basename(argv[0]) return 0 rv = 0 for filename in argv[1:]: try: print transitive_hash(filename), filename except: print "ERROR", filename rv = 1 return rv if __name__ == '__main__': sys.exit(main(sys.argv))
bsd-3-clause
bglassy/OpenBazaar
node/data_uri.py
17
2605
import mimetypes import re import urllib MIMETYPE_REGEX = r'[\w]+\/[\w\-\+\.]+' _MIMETYPE_RE = re.compile('^{}$'.format(MIMETYPE_REGEX)) CHARSET_REGEX = r'[\w\-\+\.]+' _CHARSET_RE = re.compile('^{}$'.format(CHARSET_REGEX)) DATA_URI_REGEX = ( r'data:' + r'(?P<mimetype>{})?'.format(MIMETYPE_REGEX) + r'(?:\;charset\=(?P<charset>{}))?'.format(CHARSET_REGEX) + r'(?P<base64>\;base64)?' + r',(?P<data>.*)') _DATA_URI_RE = re.compile(r'^{}$'.format(DATA_URI_REGEX), re.DOTALL) class DataURI(str): @classmethod def make(cls, mimetype, charset, base64, data): parts = ['data:'] if mimetype is not None: if not _MIMETYPE_RE.match(mimetype): raise ValueError("Invalid mimetype: %r" % mimetype) parts.append(mimetype) if charset is not None: if not _CHARSET_RE.match(charset): raise ValueError("Invalid charset: %r" % charset) parts.extend([';charset=', charset]) if base64: parts.append(';base64') encoded_data = data.encode('base64').replace('\n', '') else: encoded_data = urllib.quote(data) parts.extend([',', encoded_data]) return cls(''.join(parts)) @classmethod def from_file(cls, filename, charset=None, base64=True): mimetype, _ = mimetypes.guess_type(filename, strict=False) with open(filename) as fileobject: data = fileobject.read() return cls.make(mimetype, charset, base64, data) def __new__(cls, *args, **kwargs): uri = super(DataURI, cls).__new__(cls, *args, **kwargs) # Trigger any ValueErrors. uri._parse # pylint: disable=pointless-statement return uri def __repr__(self): return 'DataURI(%s)' % (super(DataURI, self).__repr__(),) @property def mimetype(self): return self._parse[0] @property def charset(self): return self._parse[1] @property def is_base64(self): return self._parse[2] @property def data(self): return self._parse[3] @property def _parse(self): match = _DATA_URI_RE.match(self) if not match: raise ValueError("Not a valid data URI: %r" % self) mimetype = match.group('mimetype') or None charset = match.group('charset') or None if match.group('base64'): data = match.group('data').decode('base64') else: data = urllib.unquote(match.group('data')) return mimetype, charset, bool(match.group('base64')), data
mit
TheWardoctor/Wardoctors-repo
script.module.schism.common/lib/bs4/__init__.py
23
20421
"""Beautiful Soup Elixir and Tonic "The Screen-Scraper's Friend" http://www.crummy.com/software/BeautifulSoup/ Beautiful Soup uses a pluggable XML or HTML parser to parse a (possibly invalid) document into a tree representation. Beautiful Soup provides methods and Pythonic idioms that make it easy to navigate, search, and modify the parse tree. Beautiful Soup works with Python 2.7 and up. It works better if lxml and/or html5lib is installed. For more than you ever wanted to know about Beautiful Soup, see the documentation: http://www.crummy.com/software/BeautifulSoup/bs4/doc/ """ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. __author__ = "Leonard Richardson (leonardr@segfault.org)" __version__ = "4.5.1" __copyright__ = "Copyright (c) 2004-2016 Leonard Richardson" __license__ = "MIT" __all__ = ['BeautifulSoup'] import os import re import traceback import warnings from .builder import builder_registry, ParserRejectedMarkup from .dammit import UnicodeDammit from .element import ( CData, Comment, DEFAULT_OUTPUT_ENCODING, Declaration, Doctype, NavigableString, PageElement, ProcessingInstruction, ResultSet, SoupStrainer, Tag, ) # The very first thing we do is give a useful error if someone is # running this code under Python 3 without converting it. 'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work.'<>'You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).' class BeautifulSoup(Tag): """ This class defines the basic interface called by the tree builders. These methods will be called by the parser: reset() feed(markup) The tree builder may call these methods from its feed() implementation: handle_starttag(name, attrs) # See note about return value handle_endtag(name) handle_data(data) # Appends to the current data node endData(containerClass=NavigableString) # Ends the current data node No matter how complicated the underlying parser is, you should be able to build a tree using 'start tag' events, 'end tag' events, 'data' events, and "done with data" events. If you encounter an empty-element tag (aka a self-closing tag, like HTML's <br> tag), call handle_starttag and then handle_endtag. """ ROOT_TAG_NAME = u'[document]' # If the end-user gives no indication which tree builder they # want, look for one with these features. DEFAULT_BUILDER_FEATURES = ['html', 'fast'] ASCII_SPACES = '\x20\x0a\x09\x0c\x0d' NO_PARSER_SPECIFIED_WARNING = "No parser was explicitly specified, so I'm using the best available %(markup_type)s parser for this system (\"%(parser)s\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n\nThe code that caused this warning is on line %(line_number)s of the file %(filename)s. To get rid of this warning, change code that looks like this:\n\n BeautifulSoup([your markup])\n\nto this:\n\n BeautifulSoup([your markup], \"%(parser)s\")\n" def __init__(self, markup="", features=None, builder=None, parse_only=None, from_encoding=None, exclude_encodings=None, **kwargs): """The Soup object is initialized as the 'root tag', and the provided markup (which can be a string or a file-like object) is fed into the underlying parser.""" if 'convertEntities' in kwargs: warnings.warn( "BS4 does not respect the convertEntities argument to the " "BeautifulSoup constructor. Entities are always converted " "to Unicode characters.") if 'markupMassage' in kwargs: del kwargs['markupMassage'] warnings.warn( "BS4 does not respect the markupMassage argument to the " "BeautifulSoup constructor. The tree builder is responsible " "for any necessary markup massage.") if 'smartQuotesTo' in kwargs: del kwargs['smartQuotesTo'] warnings.warn( "BS4 does not respect the smartQuotesTo argument to the " "BeautifulSoup constructor. Smart quotes are always converted " "to Unicode characters.") if 'selfClosingTags' in kwargs: del kwargs['selfClosingTags'] warnings.warn( "BS4 does not respect the selfClosingTags argument to the " "BeautifulSoup constructor. The tree builder is responsible " "for understanding self-closing tags.") if 'isHTML' in kwargs: del kwargs['isHTML'] warnings.warn( "BS4 does not respect the isHTML argument to the " "BeautifulSoup constructor. Suggest you use " "features='lxml' for HTML and features='lxml-xml' for " "XML.") def deprecated_argument(old_name, new_name): if old_name in kwargs: warnings.warn( 'The "%s" argument to the BeautifulSoup constructor ' 'has been renamed to "%s."' % (old_name, new_name)) value = kwargs[old_name] del kwargs[old_name] return value return None parse_only = parse_only or deprecated_argument( "parseOnlyThese", "parse_only") from_encoding = from_encoding or deprecated_argument( "fromEncoding", "from_encoding") if from_encoding and isinstance(markup, unicode): warnings.warn("You provided Unicode markup but also provided a value for from_encoding. Your from_encoding will be ignored.") from_encoding = None if len(kwargs) > 0: arg = kwargs.keys().pop() raise TypeError( "__init__() got an unexpected keyword argument '%s'" % arg) if builder is None: original_features = features if isinstance(features, basestring): features = [features] if features is None or len(features) == 0: features = self.DEFAULT_BUILDER_FEATURES builder_class = builder_registry.lookup(*features) if builder_class is None: raise FeatureNotFound( "Couldn't find a tree builder with the features you " "requested: %s. Do you need to install a parser library?" % ",".join(features)) builder = builder_class() if not (original_features == builder.NAME or original_features in builder.ALTERNATE_NAMES): if builder.is_xml: markup_type = "XML" else: markup_type = "HTML" caller = traceback.extract_stack()[0] filename = caller[0] line_number = caller[1] warnings.warn(self.NO_PARSER_SPECIFIED_WARNING % dict( filename=filename, line_number=line_number, parser=builder.NAME, markup_type=markup_type)) self.builder = builder self.is_xml = builder.is_xml self.known_xml = self.is_xml self.builder.soup = self self.parse_only = parse_only if hasattr(markup, 'read'): # It's a file-type object. markup = markup.read() elif len(markup) <= 256 and ( (isinstance(markup, bytes) and not b'<' in markup) or (isinstance(markup, unicode) and not u'<' in markup) ): # Print out warnings for a couple beginner problems # involving passing non-markup to Beautiful Soup. # Beautiful Soup will still parse the input as markup, # just in case that's what the user really wants. if (isinstance(markup, unicode) and not os.path.supports_unicode_filenames): possible_filename = markup.encode("utf8") else: possible_filename = markup is_file = False try: is_file = os.path.exists(possible_filename) except Exception, e: # This is almost certainly a problem involving # characters not valid in filenames on this # system. Just let it go. pass if is_file: if isinstance(markup, unicode): markup = markup.encode("utf8") warnings.warn( '"%s" looks like a filename, not markup. You should' 'probably open this file and pass the filehandle into' 'Beautiful Soup.' % markup) self._check_markup_is_url(markup) for (self.markup, self.original_encoding, self.declared_html_encoding, self.contains_replacement_characters) in ( self.builder.prepare_markup( markup, from_encoding, exclude_encodings=exclude_encodings)): self.reset() try: self._feed() break except ParserRejectedMarkup: pass # Clear out the markup and remove the builder's circular # reference to this object. self.markup = None self.builder.soup = None def __copy__(self): copy = type(self)( self.encode('utf-8'), builder=self.builder, from_encoding='utf-8' ) # Although we encoded the tree to UTF-8, that may not have # been the encoding of the original markup. Set the copy's # .original_encoding to reflect the original object's # .original_encoding. copy.original_encoding = self.original_encoding return copy def __getstate__(self): # Frequently a tree builder can't be pickled. d = dict(self.__dict__) if 'builder' in d and not self.builder.picklable: d['builder'] = None return d @staticmethod def _check_markup_is_url(markup): """ Check if markup looks like it's actually a url and raise a warning if so. Markup can be unicode or str (py2) / bytes (py3). """ if isinstance(markup, bytes): space = b' ' cant_start_with = (b"http:", b"https:") elif isinstance(markup, unicode): space = u' ' cant_start_with = (u"http:", u"https:") else: return if any(markup.startswith(prefix) for prefix in cant_start_with): if not space in markup: if isinstance(markup, bytes): decoded_markup = markup.decode('utf-8', 'replace') else: decoded_markup = markup warnings.warn( '"%s" looks like a URL. Beautiful Soup is not an' ' HTTP client. You should probably use an HTTP client like' ' requests to get the document behind the URL, and feed' ' that document to Beautiful Soup.' % decoded_markup ) def _feed(self): # Convert the document to Unicode. self.builder.reset() self.builder.feed(self.markup) # Close out any unfinished strings and close all the open tags. self.endData() while self.currentTag.name != self.ROOT_TAG_NAME: self.popTag() def reset(self): Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME) self.hidden = 1 self.builder.reset() self.current_data = [] self.currentTag = None self.tagStack = [] self.preserve_whitespace_tag_stack = [] self.pushTag(self) def new_tag(self, name, namespace=None, nsprefix=None, **attrs): """Create a new tag associated with this soup.""" return Tag(None, self.builder, name, namespace, nsprefix, attrs) def new_string(self, s, subclass=NavigableString): """Create a new NavigableString associated with this soup.""" return subclass(s) def insert_before(self, successor): raise NotImplementedError("BeautifulSoup objects don't support insert_before().") def insert_after(self, successor): raise NotImplementedError("BeautifulSoup objects don't support insert_after().") def popTag(self): tag = self.tagStack.pop() if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]: self.preserve_whitespace_tag_stack.pop() #print "Pop", tag.name if self.tagStack: self.currentTag = self.tagStack[-1] return self.currentTag def pushTag(self, tag): #print "Push", tag.name if self.currentTag: self.currentTag.contents.append(tag) self.tagStack.append(tag) self.currentTag = self.tagStack[-1] if tag.name in self.builder.preserve_whitespace_tags: self.preserve_whitespace_tag_stack.append(tag) def endData(self, containerClass=NavigableString): if self.current_data: current_data = u''.join(self.current_data) # If whitespace is not preserved, and this string contains # nothing but ASCII spaces, replace it with a single space # or newline. if not self.preserve_whitespace_tag_stack: strippable = True for i in current_data: if i not in self.ASCII_SPACES: strippable = False break if strippable: if '\n' in current_data: current_data = '\n' else: current_data = ' ' # Reset the data collector. self.current_data = [] # Should we add this string to the tree at all? if self.parse_only and len(self.tagStack) <= 1 and \ (not self.parse_only.text or \ not self.parse_only.search(current_data)): return o = containerClass(current_data) self.object_was_parsed(o) def object_was_parsed(self, o, parent=None, most_recent_element=None): """Add an object to the parse tree.""" parent = parent or self.currentTag previous_element = most_recent_element or self._most_recent_element next_element = previous_sibling = next_sibling = None if isinstance(o, Tag): next_element = o.next_element next_sibling = o.next_sibling previous_sibling = o.previous_sibling if not previous_element: previous_element = o.previous_element o.setup(parent, previous_element, next_element, previous_sibling, next_sibling) self._most_recent_element = o parent.contents.append(o) if parent.next_sibling: # This node is being inserted into an element that has # already been parsed. Deal with any dangling references. index = len(parent.contents)-1 while index >= 0: if parent.contents[index] is o: break index -= 1 else: raise ValueError( "Error building tree: supposedly %r was inserted " "into %r after the fact, but I don't see it!" % ( o, parent ) ) if index == 0: previous_element = parent previous_sibling = None else: previous_element = previous_sibling = parent.contents[index-1] if index == len(parent.contents)-1: next_element = parent.next_sibling next_sibling = None else: next_element = next_sibling = parent.contents[index+1] o.previous_element = previous_element if previous_element: previous_element.next_element = o o.next_element = next_element if next_element: next_element.previous_element = o o.next_sibling = next_sibling if next_sibling: next_sibling.previous_sibling = o o.previous_sibling = previous_sibling if previous_sibling: previous_sibling.next_sibling = o def _popToTag(self, name, nsprefix=None, inclusivePop=True): """Pops the tag stack up to and including the most recent instance of the given tag. If inclusivePop is false, pops the tag stack up to but *not* including the most recent instqance of the given tag.""" #print "Popping to %s" % name if name == self.ROOT_TAG_NAME: # The BeautifulSoup object itself can never be popped. return most_recently_popped = None stack_size = len(self.tagStack) for i in range(stack_size - 1, 0, -1): t = self.tagStack[i] if (name == t.name and nsprefix == t.prefix): if inclusivePop: most_recently_popped = self.popTag() break most_recently_popped = self.popTag() return most_recently_popped def handle_starttag(self, name, namespace, nsprefix, attrs): """Push a start tag on to the stack. If this method returns None, the tag was rejected by the SoupStrainer. You should proceed as if the tag had not occurred in the document. For instance, if this was a self-closing tag, don't call handle_endtag. """ # print "Start tag %s: %s" % (name, attrs) self.endData() if (self.parse_only and len(self.tagStack) <= 1 and (self.parse_only.text or not self.parse_only.search_tag(name, attrs))): return None tag = Tag(self, self.builder, name, namespace, nsprefix, attrs, self.currentTag, self._most_recent_element) if tag is None: return tag if self._most_recent_element: self._most_recent_element.next_element = tag self._most_recent_element = tag self.pushTag(tag) return tag def handle_endtag(self, name, nsprefix=None): #print "End tag: " + name self.endData() self._popToTag(name, nsprefix) def handle_data(self, data): self.current_data.append(data) def decode(self, pretty_print=False, eventual_encoding=DEFAULT_OUTPUT_ENCODING, formatter="minimal"): """Returns a string or Unicode representation of this document. To get Unicode, pass None for encoding.""" if self.is_xml: # Print the XML declaration encoding_part = '' if eventual_encoding != None: encoding_part = ' encoding="%s"' % eventual_encoding prefix = u'<?xml version="1.0"%s?>\n' % encoding_part else: prefix = u'' if not pretty_print: indent_level = None else: indent_level = 0 return prefix + super(BeautifulSoup, self).decode( indent_level, eventual_encoding, formatter) # Alias to make it easier to type import: 'from bs4 import _soup' _s = BeautifulSoup _soup = BeautifulSoup class BeautifulStoneSoup(BeautifulSoup): """Deprecated interface to an XML parser.""" def __init__(self, *args, **kwargs): kwargs['features'] = 'xml' warnings.warn( 'The BeautifulStoneSoup class is deprecated. Instead of using ' 'it, pass features="xml" into the BeautifulSoup constructor.') super(BeautifulStoneSoup, self).__init__(*args, **kwargs) class StopParsing(Exception): pass class FeatureNotFound(ValueError): pass #By default, act as an HTML pretty-printer. if __name__ == '__main__': import sys soup = BeautifulSoup(sys.stdin) print soup.prettify()
apache-2.0
vprime/puuuu
env/lib/python2.7/site-packages/django/contrib/gis/db/backends/postgis/creation.py
117
4498
from django.conf import settings from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation from django.utils.functional import cached_property class PostGISCreation(DatabaseCreation): geom_index_type = 'GIST' geom_index_ops = 'GIST_GEOMETRY_OPS' geom_index_ops_nd = 'GIST_GEOMETRY_OPS_ND' @cached_property def template_postgis(self): template_postgis = getattr(settings, 'POSTGIS_TEMPLATE', 'template_postgis') cursor = self.connection.cursor() cursor.execute('SELECT 1 FROM pg_database WHERE datname = %s LIMIT 1;', (template_postgis,)) if cursor.fetchone(): return template_postgis return None def sql_indexes_for_field(self, model, f, style): "Return any spatial index creation SQL for the field." from django.contrib.gis.db.models.fields import GeometryField output = super(PostGISCreation, self).sql_indexes_for_field(model, f, style) if isinstance(f, GeometryField): gqn = self.connection.ops.geo_quote_name qn = self.connection.ops.quote_name db_table = model._meta.db_table if f.geography or self.connection.ops.geometry: # Geography and Geometry (PostGIS 2.0+) columns are # created normally. pass else: # Geometry columns are created by `AddGeometryColumn` # stored procedure. output.append(style.SQL_KEYWORD('SELECT ') + style.SQL_TABLE('AddGeometryColumn') + '(' + style.SQL_TABLE(gqn(db_table)) + ', ' + style.SQL_FIELD(gqn(f.column)) + ', ' + style.SQL_FIELD(str(f.srid)) + ', ' + style.SQL_COLTYPE(gqn(f.geom_type)) + ', ' + style.SQL_KEYWORD(str(f.dim)) + ');') if not f.null: # Add a NOT NULL constraint to the field output.append(style.SQL_KEYWORD('ALTER TABLE ') + style.SQL_TABLE(qn(db_table)) + style.SQL_KEYWORD(' ALTER ') + style.SQL_FIELD(qn(f.column)) + style.SQL_KEYWORD(' SET NOT NULL') + ';') if f.spatial_index: # Spatial indexes created the same way for both Geometry and # Geography columns. # PostGIS 2.0 does not support GIST_GEOMETRY_OPS. So, on 1.5 # we use GIST_GEOMETRY_OPS, on 2.0 we use either "nd" ops # which are fast on multidimensional cases, or just plain # gist index for the 2d case. if f.geography: index_ops = '' elif self.connection.ops.geometry: if f.dim > 2: index_ops = ' ' + style.SQL_KEYWORD(self.geom_index_ops_nd) else: index_ops = '' else: index_ops = ' ' + style.SQL_KEYWORD(self.geom_index_ops) output.append(style.SQL_KEYWORD('CREATE INDEX ') + style.SQL_TABLE(qn('%s_%s_id' % (db_table, f.column))) + style.SQL_KEYWORD(' ON ') + style.SQL_TABLE(qn(db_table)) + style.SQL_KEYWORD(' USING ') + style.SQL_COLTYPE(self.geom_index_type) + ' ( ' + style.SQL_FIELD(qn(f.column)) + index_ops + ' );') return output def sql_table_creation_suffix(self): if self.template_postgis is not None: return ' TEMPLATE %s' % ( self.connection.ops.quote_name(self.template_postgis),) return '' def _create_test_db(self, verbosity, autoclobber): test_database_name = super(PostGISCreation, self)._create_test_db(verbosity, autoclobber) if self.template_postgis is None: # Connect to the test database in order to create the postgis extension self.connection.close() self.connection.settings_dict["NAME"] = test_database_name cursor = self.connection.cursor() cursor.execute("CREATE EXTENSION postgis") cursor.connection.commit() return test_database_name
mit
yencarnacion/jaikuengine
.google_appengine/lib/django-1.5/tests/regressiontests/i18n/contenttypes/tests.py
50
1132
# coding: utf-8 from __future__ import unicode_literals import os from django.contrib.contenttypes.models import ContentType from django.test import TestCase from django.test.utils import override_settings from django.utils._os import upath from django.utils import six from django.utils import translation @override_settings( USE_I18N=True, LOCALE_PATHS=( os.path.join(os.path.dirname(upath(__file__)), 'locale'), ), LANGUAGE_CODE='en', LANGUAGES=( ('en', 'English'), ('fr', 'French'), ), ) class ContentTypeTests(TestCase): def test_verbose_name(self): company_type = ContentType.objects.get(app_label='i18n', model='company') with translation.override('en'): self.assertEqual(six.text_type(company_type), 'Company') with translation.override('fr'): self.assertEqual(six.text_type(company_type), 'Société') def test_field_override(self): company_type = ContentType.objects.get(app_label='i18n', model='company') company_type.name = 'Other' self.assertEqual(six.text_type(company_type), 'Other')
apache-2.0
yasoob/youtube-dl-GUI
youtube_dl/extractor/telegraaf.py
12
2995
# coding: utf-8 from __future__ import unicode_literals from .common import InfoExtractor from ..utils import ( determine_ext, int_or_none, parse_iso8601, try_get, ) class TelegraafIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?telegraaf\.nl/video/(?P<id>\d+)' _TEST = { 'url': 'https://www.telegraaf.nl/video/734366489/historisch-scheepswrak-slaat-na-100-jaar-los', 'info_dict': { 'id': 'gaMItuoSeUg2', 'ext': 'mp4', 'title': 'Historisch scheepswrak slaat na 100 jaar los', 'description': 'md5:6f53b7c4f55596722ac24d6c0ec00cfb', 'thumbnail': r're:^https?://.*\.jpg', 'duration': 55, 'timestamp': 1572805527, 'upload_date': '20191103', }, 'params': { # m3u8 download 'skip_download': True, }, } def _real_extract(self, url): article_id = self._match_id(url) video_id = self._download_json( 'https://www.telegraaf.nl/graphql', article_id, query={ 'query': '''{ article(uid: %s) { videos { videoId } } }''' % article_id, })['data']['article']['videos'][0]['videoId'] item = self._download_json( 'https://content.tmgvideo.nl/playlist/item=%s/playlist.json' % video_id, video_id)['items'][0] title = item['title'] formats = [] locations = item.get('locations') or {} for location in locations.get('adaptive', []): manifest_url = location.get('src') if not manifest_url: continue ext = determine_ext(manifest_url) if ext == 'm3u8': formats.extend(self._extract_m3u8_formats( manifest_url, video_id, ext='mp4', m3u8_id='hls', fatal=False)) elif ext == 'mpd': formats.extend(self._extract_mpd_formats( manifest_url, video_id, mpd_id='dash', fatal=False)) else: self.report_warning('Unknown adaptive format %s' % ext) for location in locations.get('progressive', []): src = try_get(location, lambda x: x['sources'][0]['src']) if not src: continue label = location.get('label') formats.append({ 'url': src, 'width': int_or_none(location.get('width')), 'height': int_or_none(location.get('height')), 'format_id': 'http' + ('-%s' % label if label else ''), }) self._sort_formats(formats) return { 'id': video_id, 'title': title, 'description': item.get('description'), 'formats': formats, 'duration': int_or_none(item.get('duration')), 'thumbnail': item.get('poster'), 'timestamp': parse_iso8601(item.get('datecreated'), ' '), }
mit
ianatpn/nupictest
nupic/encoders/scalar.py
2
25504
# ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2013, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- import math import numbers import numpy from nupic.data import SENTINEL_VALUE_FOR_MISSING_DATA from nupic.data.fieldmeta import FieldMetaType from nupic.bindings.math import SM32, GetNTAReal from nupic.encoders.base import Encoder, EncoderResult ############################################################################ class ScalarEncoder(Encoder): """ A scalar encoder encodes a numeric (floating point) value into an array of bits. The output is 0's except for a contiguous block of 1's. The location of this contiguous block varies continuously with the input value. The encoding is linear. If you want a nonlinear encoding, just transform the scalar (e.g. by applying a logarithm function) before encoding. It is not recommended to bin the data as a pre-processing step, e.g. "1" = $0 - $.20, "2" = $.21-$0.80, "3" = $.81-$1.20, etc. as this removes a lot of information and prevents nearby values from overlapping in the output. Instead, use a continuous transformation that scales the data (a piecewise transformation is fine). Parameters: ----------------------------------------------------------------------------- w -- The number of bits that are set to encode a single value - the "width" of the output signal restriction: w must be odd to avoid centering problems. minval -- The minimum value of the input signal. maxval -- The upper bound of the input signal periodic -- If true, then the input value "wraps around" such that minval = maxval For a periodic value, the input must be strictly less than maxval, otherwise maxval is a true upper bound. There are three mutually exclusive parameters that determine the overall size of of the output. Only one of these should be specifed to the constructor: n -- The number of bits in the output. Must be greater than or equal to w radius -- Two inputs separated by more than the radius have non-overlapping representations. Two inputs separated by less than the radius will in general overlap in at least some of their bits. You can think of this as the radius of the input. resolution -- Two inputs separated by greater than, or equal to the resolution are guaranteed to have different representations. Note: radius and resolution are specified w.r.t the input, not output. w is specified w.r.t. the output. Example: day of week. w = 3 Minval = 1 (Monday) Maxval = 8 (Monday) periodic = true n = 14 [equivalently: radius = 1.5 or resolution = 0.5] The following values would encode midnight -- the start of the day monday (1) -> 11000000000001 tuesday(2) -> 01110000000000 wednesday(3) -> 00011100000000 ... sunday (7) -> 10000000000011 Since the resolution is 12 hours, we can also encode noon, as monday noon -> 11100000000000 monday midnt-> 01110000000000 tuesday noon -> 00111000000000 etc. It may not be natural to specify "n", especially with non-periodic data. For example, consider encoding an input with a range of 1-10 (inclusive) using an output width of 5. If you specify resolution = 1, this means that inputs of 1 and 2 have different outputs, though they overlap, but 1 and 1.5 might not have different outputs. This leads to a 14-bit representation like this: 1 -> 11111000000000 (14 bits total) 2 -> 01111100000000 ... 10-> 00000000011111 [resolution = 1; n=14; radius = 5] You could specify resolution = 0.5, which gives 1 -> 11111000... (22 bits total) 1.5 -> 011111..... 2.0 -> 0011111.... [resolution = 0.5; n=22; radius=2.5] You could specify radius = 1, which gives 1 -> 111110000000.... (50 bits total) 2 -> 000001111100.... 3 -> 000000000011111... ... 10 -> .....000011111 [radius = 1; resolution = 0.2; n=50] An N/M encoding can also be used to encode a binary value, where we want more than one bit to represent each state. For example, we could have: w = 5, minval = 0, maxval = 1, radius = 1 (which is equivalent to n=10) 0 -> 1111100000 1 -> 0000011111 Implementation details: -------------------------------------------------------------------------- range = maxval - minval h = (w-1)/2 (half-width) resolution = radius / w n = w * range/radius (periodic) n = w * range/radius + 2 * h (non-periodic) """ ############################################################################ def __init__(self, w, minval, maxval, periodic=False, n=0, radius=0, resolution=0, name=None, verbosity=0, clipInput=False, forced=False): """ w -- number of bits to set in output minval -- minimum input value maxval -- maximum input value (input is strictly less if periodic == True) Exactly one of n, radius, resolution must be set. "0" is a special value that means "not set". n -- number of bits in the representation (must be > w) radius -- inputs separated by more than, or equal to this distance will have non-overlapping representations resolution -- inputs separated by more than, or equal to this distance will have different representations name -- an optional string which will become part of the description clipInput -- if true, non-periodic inputs smaller than minval or greater than maxval will be clipped to minval/maxval forced -- if true, skip some safety checks (for compatibility reasons), default false See class documentation for more information. """ assert isinstance(w, int) self.encoders = None self.verbosity = verbosity self.w = w if (w % 2 == 0): raise Exception("Width must be an odd number (%f)" % w) self.minval = minval self.maxval = maxval self.periodic = periodic self.clipInput = clipInput self.halfwidth = (w - 1) / 2 # For non-periodic inputs, padding is the number of bits "outside" the range, # on each side. I.e. the representation of minval is centered on some bit, and # there are "padding" bits to the left of that centered bit; similarly with # bits to the right of the center bit of maxval if self.periodic: self.padding = 0 else: self.padding = self.halfwidth if (minval is not None and maxval is not None): if (minval >= maxval): raise Exception("The encoder for %s is invalid. minval %s is greater than " "or equal to maxval %s. minval must be strictly less " "than maxval." % (name, minval, maxval)) self.rangeInternal = float(self.maxval - self.minval) # There are three different ways of thinking about the representation. Handle # each case here. self._initEncoder(w, minval, maxval, n, radius, resolution) # nInternal represents the output area excluding the possible padding on each # side self.nInternal = self.n - 2 * self.padding # Our name if name is not None: self.name = name else: self.name = "[%s:%s]" % (self.minval, self.maxval) # This matrix is used for the topDownCompute. We build it the first time # topDownCompute is called self._topDownMappingM = None self._topDownValues = None # This list is created by getBucketValues() the first time it is called, # and re-created whenever our buckets would be re-arranged. self._bucketValues = None # checks for likely mistakes in encoder settings if not forced: self._checkReasonableSettings() ############################################################################ def _initEncoder(self, w, minval, maxval, n, radius, resolution): """ (helper function) There are three different ways of thinking about the representation. Handle each case here.""" if n != 0: assert radius == 0 assert resolution == 0 assert n > w self.n = n if (minval is not None and maxval is not None): if not self.periodic: self.resolution = float(self.rangeInternal) / (self.n - self.w) else: self.resolution = float(self.rangeInternal) / (self.n) self.radius = self.w * self.resolution if self.periodic: self.range = self.rangeInternal else: self.range = self.rangeInternal + self.resolution else: if radius != 0: assert resolution == 0 self.radius = radius self.resolution = float(self.radius) / w elif resolution != 0: self.resolution = resolution self.radius = self.resolution * self.w else: raise Exception("One of n, radius, resolution must be specified for a ScalarEncoder") if self.periodic: self.range = self.rangeInternal else: self.range = self.rangeInternal + self.resolution nfloat = self.w * (self.range / self.radius) + 2 * self.padding self.n = int(math.ceil(nfloat)) ############################################################################ def _checkReasonableSettings(self): """(helper function) check if the settings are reasonable for SP to work""" # checks for likely mistakes in encoder settings if self.w < 21: raise ValueError("Number of bits in the SDR (%d) must be greater than 2, and recommended >= 21 (use forced=True to override)" % self.w) ############################################################################ def getDecoderOutputFieldTypes(self): """ [Encoder class virtual method override] """ return (FieldMetaType.float, ) ############################################################################ def getWidth(self): return self.n ############################################################################ def _recalcParams(self): self.rangeInternal = float(self.maxval - self.minval) if not self.periodic: self.resolution = float(self.rangeInternal) / (self.n - self.w) else: self.resolution = float(self.rangeInternal) / (self.n) self.radius = self.w * self.resolution if self.periodic: self.range = self.rangeInternal else: self.range = self.rangeInternal + self.resolution name = "[%s:%s]" % (self.minval, self.maxval) ############################################################################ def getDescription(self): return [(self.name, 0)] ############################################################################ def _getFirstOnBit(self, input): """ Return the bit offset of the first bit to be set in the encoder output. For periodic encoders, this can be a negative number when the encoded output wraps around. """ if input == SENTINEL_VALUE_FOR_MISSING_DATA: return [None] else: if input < self.minval: # Don't clip periodic inputs. Out-of-range input is always an error if self.clipInput and not self.periodic: if self.verbosity > 0: print "Clipped input %s=%.2f to minval %.2f" % (self.name, input, self.minval) input = self.minval else: raise Exception('input (%s) less than range (%s - %s)' % (str(input), str(self.minval), str(self.maxval))) if self.periodic: # Don't clip periodic inputs. Out-of-range input is always an error if input >= self.maxval: raise Exception('input (%s) greater than periodic range (%s - %s)' % (str(input), str(self.minval), str(self.maxval))) else: if input > self.maxval: if self.clipInput: if self.verbosity > 0: print "Clipped input %s=%.2f to maxval %.2f" % (self.name, input, self.maxval) input = self.maxval else: raise Exception('input (%s) greater than range (%s - %s)' % (str(input), str(self.minval), str(self.maxval))) if self.periodic: centerbin = int((input - self.minval) * self.nInternal / self.range) \ + self.padding else: centerbin = int(((input - self.minval) + self.resolution/2) \ / self.resolution ) + self.padding # We use the first bit to be set in the encoded output as the bucket index minbin = centerbin - self.halfwidth return [minbin] ############################################################################ def getBucketIndices(self, input): """ See method description in base.py """ if type(input) is float and math.isnan(input): input = SENTINEL_VALUE_FOR_MISSING_DATA if input == SENTINEL_VALUE_FOR_MISSING_DATA: return [None] minbin = self._getFirstOnBit(input)[0] # For periodic encoders, the bucket index is the index of the center bit if self.periodic: bucketIdx = minbin + self.halfwidth if bucketIdx < 0: bucketIdx += self.n # for non-periodic encoders, the bucket index is the index of the left bit else: bucketIdx = minbin return [bucketIdx] ############################################################################ def encodeIntoArray(self, input, output, learn=True): """ See method description in base.py """ if input is not None and not isinstance(input, numbers.Number): raise TypeError( "Expected a scalar input but got input of type %s" % type(input)) if type(input) is float and math.isnan(input): input = SENTINEL_VALUE_FOR_MISSING_DATA # Get the bucket index to use bucketIdx = self._getFirstOnBit(input)[0] if bucketIdx is None: # None is returned for missing value output[0:self.n] = 0 #TODO: should all 1s, or random SDR be returned instead? else: # The bucket index is the index of the first bit to set in the output output[:self.n] = 0 minbin = bucketIdx maxbin = minbin + 2*self.halfwidth if self.periodic: # Handle the edges by computing wrap-around if maxbin >= self.n: bottombins = maxbin - self.n + 1 output[:bottombins] = 1 maxbin = self.n - 1 if minbin < 0: topbins = -minbin output[self.n - topbins:self.n] = 1 minbin = 0 assert minbin >= 0 assert maxbin < self.n # set the output (except for periodic wraparound) output[minbin:maxbin + 1] = 1 # Debug the decode() method if self.verbosity >= 2: print print "input:", input print "range:", self.minval, "-", self.maxval print "n:", self.n, "w:", self.w, "resolution:", self.resolution, \ "radius", self.radius, "periodic:", self.periodic print "output:", self.pprint(output) print "input desc:", self.decodedToStr(self.decode(output)) ############################################################################ def decode(self, encoded, parentFieldName=''): """ See the function description in base.py """ # For now, we simply assume any top-down output greater than 0 # is ON. Eventually, we will probably want to incorporate the strength # of each top-down output. tmpOutput = numpy.array(encoded[:self.n] > 0).astype(encoded.dtype) if not tmpOutput.any(): return (dict(), []) # ------------------------------------------------------------------------ # First, assume the input pool is not sampled 100%, and fill in the # "holes" in the encoded representation (which are likely to be present # if this is a coincidence that was learned by the SP). # Search for portions of the output that have "holes" maxZerosInARow = self.halfwidth for i in xrange(maxZerosInARow): searchStr = numpy.ones(i + 3, dtype=encoded.dtype) searchStr[1:-1] = 0 subLen = len(searchStr) # Does this search string appear in the output? if self.periodic: for j in xrange(self.n): outputIndices = numpy.arange(j, j + subLen) outputIndices %= self.n if numpy.array_equal(searchStr, tmpOutput[outputIndices]): tmpOutput[outputIndices] = 1 else: for j in xrange(self.n - subLen + 1): if numpy.array_equal(searchStr, tmpOutput[j:j + subLen]): tmpOutput[j:j + subLen] = 1 if self.verbosity >= 2: print "raw output:", encoded[:self.n] print "filtered output:", tmpOutput # ------------------------------------------------------------------------ # Find each run of 1's. nz = tmpOutput.nonzero()[0] runs = [] # will be tuples of (startIdx, runLength) run = [nz[0], 1] i = 1 while (i < len(nz)): if nz[i] == run[0] + run[1]: run[1] += 1 else: runs.append(run) run = [nz[i], 1] i += 1 runs.append(run) # If we have a periodic encoder, merge the first and last run if they # both go all the way to the edges if self.periodic and len(runs) > 1: if runs[0][0] == 0 and runs[-1][0] + runs[-1][1] == self.n: runs[-1][1] += runs[0][1] runs = runs[1:] # ------------------------------------------------------------------------ # Now, for each group of 1's, determine the "left" and "right" edges, where # the "left" edge is inset by halfwidth and the "right" edge is inset by # halfwidth. # For a group of width w or less, the "left" and "right" edge are both at # the center position of the group. ranges = [] for run in runs: (start, runLen) = run if runLen <= self.w: left = right = start + runLen / 2 else: left = start + self.halfwidth right = start + runLen - 1 - self.halfwidth # Convert to input space. if not self.periodic: inMin = (left - self.padding) * self.resolution + self.minval inMax = (right - self.padding) * self.resolution + self.minval else: inMin = (left - self.padding) * self.range / self.nInternal + self.minval inMax = (right - self.padding) * self.range / self.nInternal + self.minval # Handle wrap-around if periodic if self.periodic: if inMin >= self.maxval: inMin -= self.range inMax -= self.range # Clip low end if inMin < self.minval: inMin = self.minval if inMax < self.minval: inMax = self.minval # If we have a periodic encoder, and the max is past the edge, break into # 2 separate ranges if self.periodic and inMax >= self.maxval: ranges.append([inMin, self.maxval]) ranges.append([self.minval, inMax - self.range]) else: if inMax > self.maxval: inMax = self.maxval if inMin > self.maxval: inMin = self.maxval ranges.append([inMin, inMax]) desc = self._generateRangeDescription(ranges) # Return result if parentFieldName != '': fieldName = "%s.%s" % (parentFieldName, self.name) else: fieldName = self.name return ({fieldName: (ranges, desc)}, [fieldName]) ############################################################################# def _generateRangeDescription(self, ranges): """generate description from a text description of the ranges""" desc = "" numRanges = len(ranges) for i in xrange(numRanges): if ranges[i][0] != ranges[i][1]: desc += "%.2f-%.2f" % (ranges[i][0], ranges[i][1]) else: desc += "%.2f" % (ranges[i][0]) if i < numRanges - 1: desc += ", " return desc ############################################################################ def _getTopDownMapping(self): """ Return the interal _topDownMappingM matrix used for handling the bucketInfo() and topDownCompute() methods. This is a matrix, one row per category (bucket) where each row contains the encoded output for that category. """ # Do we need to build up our reverse mapping table? if self._topDownMappingM is None: # The input scalar value corresponding to each possible output encoding if self.periodic: self._topDownValues = numpy.arange(self.minval + self.resolution / 2.0, self.maxval, self.resolution) else: #Number of values is (max-min)/resolutions self._topDownValues = numpy.arange(self.minval, self.maxval + self.resolution / 2.0, self.resolution) # Each row represents an encoded output pattern numCategories = len(self._topDownValues) self._topDownMappingM = SM32(numCategories, self.n) outputSpace = numpy.zeros(self.n, dtype=GetNTAReal()) for i in xrange(numCategories): value = self._topDownValues[i] value = max(value, self.minval) value = min(value, self.maxval) self.encodeIntoArray(value, outputSpace, learn=False) self._topDownMappingM.setRowFromDense(i, outputSpace) return self._topDownMappingM ############################################################################ def getBucketValues(self): """ See the function description in base.py """ # Need to re-create? if self._bucketValues is None: topDownMappingM = self._getTopDownMapping() numBuckets = topDownMappingM.nRows() self._bucketValues = [] for bucketIdx in range(numBuckets): self._bucketValues.append(self.getBucketInfo([bucketIdx])[0].value) return self._bucketValues ############################################################################ def getBucketInfo(self, buckets): """ See the function description in base.py """ # Get/generate the topDown mapping table #NOTE: although variable topDownMappingM is unused, some (bad-style) actions #are executed during _getTopDownMapping() so this line must stay here topDownMappingM = self._getTopDownMapping() # The "category" is simply the bucket index category = buckets[0] encoding = self._topDownMappingM.getRow(category) # Which input value does this correspond to? if self.periodic: inputVal = self.minval + self.resolution/2 + category * self.resolution else: inputVal = self.minval + category * self.resolution return [EncoderResult(value=inputVal, scalar=inputVal, encoding=encoding)] ############################################################################ def topDownCompute(self, encoded): """ See the function description in base.py """ # Get/generate the topDown mapping table topDownMappingM = self._getTopDownMapping() # See which "category" we match the closest. category = topDownMappingM.rightVecProd(encoded).argmax() # Return that bucket info return self.getBucketInfo([category]) ############################################################################ def closenessScores(self, expValues, actValues, fractional=True): """ See the function description in base.py """ expValue = expValues[0] actValue = actValues[0] if self.periodic: expValue = expValue % self.maxval actValue = actValue % self.maxval err = abs(expValue - actValue) if self.periodic: err = min(err, self.maxval - err) if fractional: pctErr = float(err) / (self.maxval - self.minval) pctErr = min(1.0, pctErr) closeness = 1.0 - pctErr else: closeness = err return numpy.array([closeness]) ############################################################################ def dump(self): print "ScalarEncoder:" print " min: %f" % self.minval print " max: %f" % self.maxval print " w: %d" % self.w print " n: %d" % self.n print " resolution: %f" % self.resolution print " radius: %f" % self.radius print " periodic: %s" % self.periodic print " nInternal: %d" % self.nInternal print " rangeInternal: %f" % self.rangeInternal print " padding: %d" % self.padding
gpl-3.0
zmike/servo
tests/wpt/web-platform-tests/tools/pywebsocket/src/test/test_msgutil.py
413
54959
#!/usr/bin/env python # # Copyright 2012, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Tests for msgutil module.""" import array import Queue import random import struct import unittest import zlib import set_sys_path # Update sys.path to locate mod_pywebsocket module. from mod_pywebsocket import common from mod_pywebsocket.extensions import DeflateFrameExtensionProcessor from mod_pywebsocket.extensions import PerMessageCompressExtensionProcessor from mod_pywebsocket.extensions import PerMessageDeflateExtensionProcessor from mod_pywebsocket import msgutil from mod_pywebsocket.stream import InvalidUTF8Exception from mod_pywebsocket.stream import Stream from mod_pywebsocket.stream import StreamHixie75 from mod_pywebsocket.stream import StreamOptions from mod_pywebsocket import util from test import mock # We use one fixed nonce for testing instead of cryptographically secure PRNG. _MASKING_NONCE = 'ABCD' def _mask_hybi(frame): frame_key = map(ord, _MASKING_NONCE) frame_key_len = len(frame_key) result = array.array('B') result.fromstring(frame) count = 0 for i in xrange(len(result)): result[i] ^= frame_key[count] count = (count + 1) % frame_key_len return _MASKING_NONCE + result.tostring() def _install_extension_processor(processor, request, stream_options): response = processor.get_extension_response() if response is not None: processor.setup_stream_options(stream_options) request.ws_extension_processors.append(processor) def _create_request_from_rawdata( read_data, deflate_frame_request=None, permessage_compression_request=None, permessage_deflate_request=None): req = mock.MockRequest(connection=mock.MockConn(''.join(read_data))) req.ws_version = common.VERSION_HYBI_LATEST req.ws_extension_processors = [] processor = None if deflate_frame_request is not None: processor = DeflateFrameExtensionProcessor(deflate_frame_request) elif permessage_compression_request is not None: processor = PerMessageCompressExtensionProcessor( permessage_compression_request) elif permessage_deflate_request is not None: processor = PerMessageDeflateExtensionProcessor( permessage_deflate_request) stream_options = StreamOptions() if processor is not None: _install_extension_processor(processor, req, stream_options) req.ws_stream = Stream(req, stream_options) return req def _create_request(*frames): """Creates MockRequest using data given as frames. frames will be returned on calling request.connection.read() where request is MockRequest returned by this function. """ read_data = [] for (header, body) in frames: read_data.append(header + _mask_hybi(body)) return _create_request_from_rawdata(read_data) def _create_blocking_request(): """Creates MockRequest. Data written to a MockRequest can be read out by calling request.connection.written_data(). """ req = mock.MockRequest(connection=mock.MockBlockingConn()) req.ws_version = common.VERSION_HYBI_LATEST stream_options = StreamOptions() req.ws_stream = Stream(req, stream_options) return req def _create_request_hixie75(read_data=''): req = mock.MockRequest(connection=mock.MockConn(read_data)) req.ws_stream = StreamHixie75(req) return req def _create_blocking_request_hixie75(): req = mock.MockRequest(connection=mock.MockBlockingConn()) req.ws_stream = StreamHixie75(req) return req class BasicMessageTest(unittest.TestCase): """Basic tests for Stream.""" def test_send_message(self): request = _create_request() msgutil.send_message(request, 'Hello') self.assertEqual('\x81\x05Hello', request.connection.written_data()) payload = 'a' * 125 request = _create_request() msgutil.send_message(request, payload) self.assertEqual('\x81\x7d' + payload, request.connection.written_data()) def test_send_medium_message(self): payload = 'a' * 126 request = _create_request() msgutil.send_message(request, payload) self.assertEqual('\x81\x7e\x00\x7e' + payload, request.connection.written_data()) payload = 'a' * ((1 << 16) - 1) request = _create_request() msgutil.send_message(request, payload) self.assertEqual('\x81\x7e\xff\xff' + payload, request.connection.written_data()) def test_send_large_message(self): payload = 'a' * (1 << 16) request = _create_request() msgutil.send_message(request, payload) self.assertEqual('\x81\x7f\x00\x00\x00\x00\x00\x01\x00\x00' + payload, request.connection.written_data()) def test_send_message_unicode(self): request = _create_request() msgutil.send_message(request, u'\u65e5') # U+65e5 is encoded as e6,97,a5 in UTF-8 self.assertEqual('\x81\x03\xe6\x97\xa5', request.connection.written_data()) def test_send_message_fragments(self): request = _create_request() msgutil.send_message(request, 'Hello', False) msgutil.send_message(request, ' ', False) msgutil.send_message(request, 'World', False) msgutil.send_message(request, '!', True) self.assertEqual('\x01\x05Hello\x00\x01 \x00\x05World\x80\x01!', request.connection.written_data()) def test_send_fragments_immediate_zero_termination(self): request = _create_request() msgutil.send_message(request, 'Hello World!', False) msgutil.send_message(request, '', True) self.assertEqual('\x01\x0cHello World!\x80\x00', request.connection.written_data()) def test_receive_message(self): request = _create_request( ('\x81\x85', 'Hello'), ('\x81\x86', 'World!')) self.assertEqual('Hello', msgutil.receive_message(request)) self.assertEqual('World!', msgutil.receive_message(request)) payload = 'a' * 125 request = _create_request(('\x81\xfd', payload)) self.assertEqual(payload, msgutil.receive_message(request)) def test_receive_medium_message(self): payload = 'a' * 126 request = _create_request(('\x81\xfe\x00\x7e', payload)) self.assertEqual(payload, msgutil.receive_message(request)) payload = 'a' * ((1 << 16) - 1) request = _create_request(('\x81\xfe\xff\xff', payload)) self.assertEqual(payload, msgutil.receive_message(request)) def test_receive_large_message(self): payload = 'a' * (1 << 16) request = _create_request( ('\x81\xff\x00\x00\x00\x00\x00\x01\x00\x00', payload)) self.assertEqual(payload, msgutil.receive_message(request)) def test_receive_length_not_encoded_using_minimal_number_of_bytes(self): # Log warning on receiving bad payload length field that doesn't use # minimal number of bytes but continue processing. payload = 'a' # 1 byte can be represented without extended payload length field. request = _create_request( ('\x81\xff\x00\x00\x00\x00\x00\x00\x00\x01', payload)) self.assertEqual(payload, msgutil.receive_message(request)) def test_receive_message_unicode(self): request = _create_request(('\x81\x83', '\xe6\x9c\xac')) # U+672c is encoded as e6,9c,ac in UTF-8 self.assertEqual(u'\u672c', msgutil.receive_message(request)) def test_receive_message_erroneous_unicode(self): # \x80 and \x81 are invalid as UTF-8. request = _create_request(('\x81\x82', '\x80\x81')) # Invalid characters should raise InvalidUTF8Exception self.assertRaises(InvalidUTF8Exception, msgutil.receive_message, request) def test_receive_fragments(self): request = _create_request( ('\x01\x85', 'Hello'), ('\x00\x81', ' '), ('\x00\x85', 'World'), ('\x80\x81', '!')) self.assertEqual('Hello World!', msgutil.receive_message(request)) def test_receive_fragments_unicode(self): # UTF-8 encodes U+6f22 into e6bca2 and U+5b57 into e5ad97. request = _create_request( ('\x01\x82', '\xe6\xbc'), ('\x00\x82', '\xa2\xe5'), ('\x80\x82', '\xad\x97')) self.assertEqual(u'\u6f22\u5b57', msgutil.receive_message(request)) def test_receive_fragments_immediate_zero_termination(self): request = _create_request( ('\x01\x8c', 'Hello World!'), ('\x80\x80', '')) self.assertEqual('Hello World!', msgutil.receive_message(request)) def test_receive_fragments_duplicate_start(self): request = _create_request( ('\x01\x85', 'Hello'), ('\x01\x85', 'World')) self.assertRaises(msgutil.InvalidFrameException, msgutil.receive_message, request) def test_receive_fragments_intermediate_but_not_started(self): request = _create_request(('\x00\x85', 'Hello')) self.assertRaises(msgutil.InvalidFrameException, msgutil.receive_message, request) def test_receive_fragments_end_but_not_started(self): request = _create_request(('\x80\x85', 'Hello')) self.assertRaises(msgutil.InvalidFrameException, msgutil.receive_message, request) def test_receive_message_discard(self): request = _create_request( ('\x8f\x86', 'IGNORE'), ('\x81\x85', 'Hello'), ('\x8f\x89', 'DISREGARD'), ('\x81\x86', 'World!')) self.assertRaises(msgutil.UnsupportedFrameException, msgutil.receive_message, request) self.assertEqual('Hello', msgutil.receive_message(request)) self.assertRaises(msgutil.UnsupportedFrameException, msgutil.receive_message, request) self.assertEqual('World!', msgutil.receive_message(request)) def test_receive_close(self): request = _create_request( ('\x88\x8a', struct.pack('!H', 1000) + 'Good bye')) self.assertEqual(None, msgutil.receive_message(request)) self.assertEqual(1000, request.ws_close_code) self.assertEqual('Good bye', request.ws_close_reason) def test_send_longest_close(self): reason = 'a' * 123 request = _create_request( ('\x88\xfd', struct.pack('!H', common.STATUS_NORMAL_CLOSURE) + reason)) request.ws_stream.close_connection(common.STATUS_NORMAL_CLOSURE, reason) self.assertEqual(request.ws_close_code, common.STATUS_NORMAL_CLOSURE) self.assertEqual(request.ws_close_reason, reason) def test_send_close_too_long(self): request = _create_request() self.assertRaises(msgutil.BadOperationException, Stream.close_connection, request.ws_stream, common.STATUS_NORMAL_CLOSURE, 'a' * 124) def test_send_close_inconsistent_code_and_reason(self): request = _create_request() # reason parameter must not be specified when code is None. self.assertRaises(msgutil.BadOperationException, Stream.close_connection, request.ws_stream, None, 'a') def test_send_ping(self): request = _create_request() msgutil.send_ping(request, 'Hello World!') self.assertEqual('\x89\x0cHello World!', request.connection.written_data()) def test_send_longest_ping(self): request = _create_request() msgutil.send_ping(request, 'a' * 125) self.assertEqual('\x89\x7d' + 'a' * 125, request.connection.written_data()) def test_send_ping_too_long(self): request = _create_request() self.assertRaises(msgutil.BadOperationException, msgutil.send_ping, request, 'a' * 126) def test_receive_ping(self): """Tests receiving a ping control frame.""" def handler(request, message): request.called = True # Stream automatically respond to ping with pong without any action # by application layer. request = _create_request( ('\x89\x85', 'Hello'), ('\x81\x85', 'World')) self.assertEqual('World', msgutil.receive_message(request)) self.assertEqual('\x8a\x05Hello', request.connection.written_data()) request = _create_request( ('\x89\x85', 'Hello'), ('\x81\x85', 'World')) request.on_ping_handler = handler self.assertEqual('World', msgutil.receive_message(request)) self.assertTrue(request.called) def test_receive_longest_ping(self): request = _create_request( ('\x89\xfd', 'a' * 125), ('\x81\x85', 'World')) self.assertEqual('World', msgutil.receive_message(request)) self.assertEqual('\x8a\x7d' + 'a' * 125, request.connection.written_data()) def test_receive_ping_too_long(self): request = _create_request(('\x89\xfe\x00\x7e', 'a' * 126)) self.assertRaises(msgutil.InvalidFrameException, msgutil.receive_message, request) def test_receive_pong(self): """Tests receiving a pong control frame.""" def handler(request, message): request.called = True request = _create_request( ('\x8a\x85', 'Hello'), ('\x81\x85', 'World')) request.on_pong_handler = handler msgutil.send_ping(request, 'Hello') self.assertEqual('\x89\x05Hello', request.connection.written_data()) # Valid pong is received, but receive_message won't return for it. self.assertEqual('World', msgutil.receive_message(request)) # Check that nothing was written after receive_message call. self.assertEqual('\x89\x05Hello', request.connection.written_data()) self.assertTrue(request.called) def test_receive_unsolicited_pong(self): # Unsolicited pong is allowed from HyBi 07. request = _create_request( ('\x8a\x85', 'Hello'), ('\x81\x85', 'World')) msgutil.receive_message(request) request = _create_request( ('\x8a\x85', 'Hello'), ('\x81\x85', 'World')) msgutil.send_ping(request, 'Jumbo') # Body mismatch. msgutil.receive_message(request) def test_ping_cannot_be_fragmented(self): request = _create_request(('\x09\x85', 'Hello')) self.assertRaises(msgutil.InvalidFrameException, msgutil.receive_message, request) def test_ping_with_too_long_payload(self): request = _create_request(('\x89\xfe\x01\x00', 'a' * 256)) self.assertRaises(msgutil.InvalidFrameException, msgutil.receive_message, request) class DeflateFrameTest(unittest.TestCase): """Tests for checking deflate-frame extension.""" def test_send_message(self): compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) extension = common.ExtensionParameter(common.DEFLATE_FRAME_EXTENSION) request = _create_request_from_rawdata( '', deflate_frame_request=extension) msgutil.send_message(request, 'Hello') msgutil.send_message(request, 'World') expected = '' compressed_hello = compress.compress('Hello') compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH) compressed_hello = compressed_hello[:-4] expected += '\xc1%c' % len(compressed_hello) expected += compressed_hello compressed_world = compress.compress('World') compressed_world += compress.flush(zlib.Z_SYNC_FLUSH) compressed_world = compressed_world[:-4] expected += '\xc1%c' % len(compressed_world) expected += compressed_world self.assertEqual(expected, request.connection.written_data()) def test_send_message_bfinal(self): extension = common.ExtensionParameter(common.DEFLATE_FRAME_EXTENSION) request = _create_request_from_rawdata( '', deflate_frame_request=extension) self.assertEquals(1, len(request.ws_extension_processors)) deflate_frame_processor = request.ws_extension_processors[0] deflate_frame_processor.set_bfinal(True) msgutil.send_message(request, 'Hello') msgutil.send_message(request, 'World') expected = '' compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) compressed_hello = compress.compress('Hello') compressed_hello += compress.flush(zlib.Z_FINISH) compressed_hello = compressed_hello + chr(0) expected += '\xc1%c' % len(compressed_hello) expected += compressed_hello compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) compressed_world = compress.compress('World') compressed_world += compress.flush(zlib.Z_FINISH) compressed_world = compressed_world + chr(0) expected += '\xc1%c' % len(compressed_world) expected += compressed_world self.assertEqual(expected, request.connection.written_data()) def test_send_message_comp_bit(self): compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) extension = common.ExtensionParameter(common.DEFLATE_FRAME_EXTENSION) request = _create_request_from_rawdata( '', deflate_frame_request=extension) self.assertEquals(1, len(request.ws_extension_processors)) deflate_frame_processor = request.ws_extension_processors[0] msgutil.send_message(request, 'Hello') deflate_frame_processor.disable_outgoing_compression() msgutil.send_message(request, 'Hello') deflate_frame_processor.enable_outgoing_compression() msgutil.send_message(request, 'Hello') expected = '' compressed_hello = compress.compress('Hello') compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH) compressed_hello = compressed_hello[:-4] expected += '\xc1%c' % len(compressed_hello) expected += compressed_hello expected += '\x81\x05Hello' compressed_2nd_hello = compress.compress('Hello') compressed_2nd_hello += compress.flush(zlib.Z_SYNC_FLUSH) compressed_2nd_hello = compressed_2nd_hello[:-4] expected += '\xc1%c' % len(compressed_2nd_hello) expected += compressed_2nd_hello self.assertEqual(expected, request.connection.written_data()) def test_send_message_no_context_takeover_parameter(self): compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) extension = common.ExtensionParameter(common.DEFLATE_FRAME_EXTENSION) extension.add_parameter('no_context_takeover', None) request = _create_request_from_rawdata( '', deflate_frame_request=extension) for i in xrange(3): msgutil.send_message(request, 'Hello') compressed_message = compress.compress('Hello') compressed_message += compress.flush(zlib.Z_SYNC_FLUSH) compressed_message = compressed_message[:-4] expected = '\xc1%c' % len(compressed_message) expected += compressed_message self.assertEqual( expected + expected + expected, request.connection.written_data()) def test_bad_request_parameters(self): """Tests that if there's anything wrong with deflate-frame extension request, deflate-frame is rejected. """ extension = common.ExtensionParameter(common.DEFLATE_FRAME_EXTENSION) # max_window_bits less than 8 is illegal. extension.add_parameter('max_window_bits', '7') processor = DeflateFrameExtensionProcessor(extension) self.assertEqual(None, processor.get_extension_response()) extension = common.ExtensionParameter(common.DEFLATE_FRAME_EXTENSION) # max_window_bits greater than 15 is illegal. extension.add_parameter('max_window_bits', '16') processor = DeflateFrameExtensionProcessor(extension) self.assertEqual(None, processor.get_extension_response()) extension = common.ExtensionParameter(common.DEFLATE_FRAME_EXTENSION) # Non integer max_window_bits is illegal. extension.add_parameter('max_window_bits', 'foobar') processor = DeflateFrameExtensionProcessor(extension) self.assertEqual(None, processor.get_extension_response()) extension = common.ExtensionParameter(common.DEFLATE_FRAME_EXTENSION) # no_context_takeover must not have any value. extension.add_parameter('no_context_takeover', 'foobar') processor = DeflateFrameExtensionProcessor(extension) self.assertEqual(None, processor.get_extension_response()) def test_response_parameters(self): extension = common.ExtensionParameter(common.DEFLATE_FRAME_EXTENSION) processor = DeflateFrameExtensionProcessor(extension) processor.set_response_window_bits(8) response = processor.get_extension_response() self.assertTrue(response.has_parameter('max_window_bits')) self.assertEqual('8', response.get_parameter_value('max_window_bits')) extension = common.ExtensionParameter(common.DEFLATE_FRAME_EXTENSION) processor = DeflateFrameExtensionProcessor(extension) processor.set_response_no_context_takeover(True) response = processor.get_extension_response() self.assertTrue(response.has_parameter('no_context_takeover')) self.assertTrue( response.get_parameter_value('no_context_takeover') is None) def test_receive_message(self): compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) data = '' compressed_hello = compress.compress('Hello') compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH) compressed_hello = compressed_hello[:-4] data += '\xc1%c' % (len(compressed_hello) | 0x80) data += _mask_hybi(compressed_hello) compressed_websocket = compress.compress('WebSocket') compressed_websocket += compress.flush(zlib.Z_FINISH) compressed_websocket += '\x00' data += '\xc1%c' % (len(compressed_websocket) | 0x80) data += _mask_hybi(compressed_websocket) compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) compressed_world = compress.compress('World') compressed_world += compress.flush(zlib.Z_SYNC_FLUSH) compressed_world = compressed_world[:-4] data += '\xc1%c' % (len(compressed_world) | 0x80) data += _mask_hybi(compressed_world) # Close frame data += '\x88\x8a' + _mask_hybi(struct.pack('!H', 1000) + 'Good bye') extension = common.ExtensionParameter(common.DEFLATE_FRAME_EXTENSION) request = _create_request_from_rawdata( data, deflate_frame_request=extension) self.assertEqual('Hello', msgutil.receive_message(request)) self.assertEqual('WebSocket', msgutil.receive_message(request)) self.assertEqual('World', msgutil.receive_message(request)) self.assertEqual(None, msgutil.receive_message(request)) def test_receive_message_client_using_smaller_window(self): """Test that frames coming from a client which is using smaller window size that the server are correctly received. """ # Using the smallest window bits of 8 for generating input frames. compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -8) data = '' # Use a frame whose content is bigger than the clients' DEFLATE window # size before compression. The content mainly consists of 'a' but # repetition of 'b' is put at the head and tail so that if the window # size is big, the head is back-referenced but if small, not. payload = 'b' * 64 + 'a' * 1024 + 'b' * 64 compressed_hello = compress.compress(payload) compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH) compressed_hello = compressed_hello[:-4] data += '\xc1%c' % (len(compressed_hello) | 0x80) data += _mask_hybi(compressed_hello) # Close frame data += '\x88\x8a' + _mask_hybi(struct.pack('!H', 1000) + 'Good bye') extension = common.ExtensionParameter(common.DEFLATE_FRAME_EXTENSION) request = _create_request_from_rawdata( data, deflate_frame_request=extension) self.assertEqual(payload, msgutil.receive_message(request)) self.assertEqual(None, msgutil.receive_message(request)) def test_receive_message_comp_bit(self): compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) data = '' compressed_hello = compress.compress('Hello') compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH) compressed_hello = compressed_hello[:-4] data += '\xc1%c' % (len(compressed_hello) | 0x80) data += _mask_hybi(compressed_hello) data += '\x81\x85' + _mask_hybi('Hello') compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) compressed_2nd_hello = compress.compress('Hello') compressed_2nd_hello += compress.flush(zlib.Z_SYNC_FLUSH) compressed_2nd_hello = compressed_2nd_hello[:-4] data += '\xc1%c' % (len(compressed_2nd_hello) | 0x80) data += _mask_hybi(compressed_2nd_hello) extension = common.ExtensionParameter(common.DEFLATE_FRAME_EXTENSION) request = _create_request_from_rawdata( data, deflate_frame_request=extension) for i in xrange(3): self.assertEqual('Hello', msgutil.receive_message(request)) def test_receive_message_various_btype(self): compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) data = '' compressed_hello = compress.compress('Hello') compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH) compressed_hello = compressed_hello[:-4] data += '\xc1%c' % (len(compressed_hello) | 0x80) data += _mask_hybi(compressed_hello) compressed_websocket = compress.compress('WebSocket') compressed_websocket += compress.flush(zlib.Z_FINISH) compressed_websocket += '\x00' data += '\xc1%c' % (len(compressed_websocket) | 0x80) data += _mask_hybi(compressed_websocket) compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) compressed_world = compress.compress('World') compressed_world += compress.flush(zlib.Z_SYNC_FLUSH) compressed_world = compressed_world[:-4] data += '\xc1%c' % (len(compressed_world) | 0x80) data += _mask_hybi(compressed_world) # Close frame data += '\x88\x8a' + _mask_hybi(struct.pack('!H', 1000) + 'Good bye') extension = common.ExtensionParameter(common.DEFLATE_FRAME_EXTENSION) request = _create_request_from_rawdata( data, deflate_frame_request=extension) self.assertEqual('Hello', msgutil.receive_message(request)) self.assertEqual('WebSocket', msgutil.receive_message(request)) self.assertEqual('World', msgutil.receive_message(request)) self.assertEqual(None, msgutil.receive_message(request)) class PerMessageDeflateTest(unittest.TestCase): """Tests for permessage-deflate extension.""" def test_send_message(self): extension = common.ExtensionParameter( common.PERMESSAGE_DEFLATE_EXTENSION) request = _create_request_from_rawdata( '', permessage_deflate_request=extension) msgutil.send_message(request, 'Hello') compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) compressed_hello = compress.compress('Hello') compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH) compressed_hello = compressed_hello[:-4] expected = '\xc1%c' % len(compressed_hello) expected += compressed_hello self.assertEqual(expected, request.connection.written_data()) def test_send_empty_message(self): """Test that an empty message is compressed correctly.""" extension = common.ExtensionParameter( common.PERMESSAGE_DEFLATE_EXTENSION) request = _create_request_from_rawdata( '', permessage_deflate_request=extension) msgutil.send_message(request, '') # Payload in binary: 0b00000010 0b00000000 # From LSB, # - 1 bit of BFINAL (0) # - 2 bits of BTYPE (01 that means fixed Huffman) # - 7 bits of the first code (0000000 that is the code for the # end-of-block) # - 1 bit of BFINAL (0) # - 2 bits of BTYPE (no compression) # - 3 bits of padding self.assertEqual('\xc1\x02\x02\x00', request.connection.written_data()) def test_send_message_with_null_character(self): """Test that a simple payload (one null) is framed correctly.""" extension = common.ExtensionParameter( common.PERMESSAGE_DEFLATE_EXTENSION) request = _create_request_from_rawdata( '', permessage_deflate_request=extension) msgutil.send_message(request, '\x00') # Payload in binary: 0b01100010 0b00000000 0b00000000 # From LSB, # - 1 bit of BFINAL (0) # - 2 bits of BTYPE (01 that means fixed Huffman) # - 8 bits of the first code (00110000 that is the code for the literal # alphabet 0x00) # - 7 bits of the second code (0000000 that is the code for the # end-of-block) # - 1 bit of BFINAL (0) # - 2 bits of BTYPE (no compression) # - 2 bits of padding self.assertEqual('\xc1\x03\x62\x00\x00', request.connection.written_data()) def test_send_two_messages(self): extension = common.ExtensionParameter( common.PERMESSAGE_DEFLATE_EXTENSION) request = _create_request_from_rawdata( '', permessage_deflate_request=extension) msgutil.send_message(request, 'Hello') msgutil.send_message(request, 'World') compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) expected = '' compressed_hello = compress.compress('Hello') compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH) compressed_hello = compressed_hello[:-4] expected += '\xc1%c' % len(compressed_hello) expected += compressed_hello compressed_world = compress.compress('World') compressed_world += compress.flush(zlib.Z_SYNC_FLUSH) compressed_world = compressed_world[:-4] expected += '\xc1%c' % len(compressed_world) expected += compressed_world self.assertEqual(expected, request.connection.written_data()) def test_send_message_fragmented(self): extension = common.ExtensionParameter( common.PERMESSAGE_DEFLATE_EXTENSION) request = _create_request_from_rawdata( '', permessage_deflate_request=extension) msgutil.send_message(request, 'Hello', end=False) msgutil.send_message(request, 'Goodbye', end=False) msgutil.send_message(request, 'World') compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) compressed_hello = compress.compress('Hello') compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH) expected = '\x41%c' % len(compressed_hello) expected += compressed_hello compressed_goodbye = compress.compress('Goodbye') compressed_goodbye += compress.flush(zlib.Z_SYNC_FLUSH) expected += '\x00%c' % len(compressed_goodbye) expected += compressed_goodbye compressed_world = compress.compress('World') compressed_world += compress.flush(zlib.Z_SYNC_FLUSH) compressed_world = compressed_world[:-4] expected += '\x80%c' % len(compressed_world) expected += compressed_world self.assertEqual(expected, request.connection.written_data()) def test_send_message_fragmented_empty_first_frame(self): extension = common.ExtensionParameter( common.PERMESSAGE_DEFLATE_EXTENSION) request = _create_request_from_rawdata( '', permessage_deflate_request=extension) msgutil.send_message(request, '', end=False) msgutil.send_message(request, 'Hello') compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) compressed_hello = compress.compress('') compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH) expected = '\x41%c' % len(compressed_hello) expected += compressed_hello compressed_empty = compress.compress('Hello') compressed_empty += compress.flush(zlib.Z_SYNC_FLUSH) compressed_empty = compressed_empty[:-4] expected += '\x80%c' % len(compressed_empty) expected += compressed_empty print '%r' % expected self.assertEqual(expected, request.connection.written_data()) def test_send_message_fragmented_empty_last_frame(self): extension = common.ExtensionParameter( common.PERMESSAGE_DEFLATE_EXTENSION) request = _create_request_from_rawdata( '', permessage_deflate_request=extension) msgutil.send_message(request, 'Hello', end=False) msgutil.send_message(request, '') compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) compressed_hello = compress.compress('Hello') compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH) expected = '\x41%c' % len(compressed_hello) expected += compressed_hello compressed_empty = compress.compress('') compressed_empty += compress.flush(zlib.Z_SYNC_FLUSH) compressed_empty = compressed_empty[:-4] expected += '\x80%c' % len(compressed_empty) expected += compressed_empty self.assertEqual(expected, request.connection.written_data()) def test_send_message_using_small_window(self): common_part = 'abcdefghijklmnopqrstuvwxyz' test_message = common_part + '-' * 30000 + common_part extension = common.ExtensionParameter( common.PERMESSAGE_DEFLATE_EXTENSION) extension.add_parameter('server_max_window_bits', '8') request = _create_request_from_rawdata( '', permessage_deflate_request=extension) msgutil.send_message(request, test_message) expected_websocket_header_size = 2 expected_websocket_payload_size = 91 actual_frame = request.connection.written_data() self.assertEqual(expected_websocket_header_size + expected_websocket_payload_size, len(actual_frame)) actual_header = actual_frame[0:expected_websocket_header_size] actual_payload = actual_frame[expected_websocket_header_size:] self.assertEqual( '\xc1%c' % expected_websocket_payload_size, actual_header) decompress = zlib.decompressobj(-8) decompressed_message = decompress.decompress( actual_payload + '\x00\x00\xff\xff') decompressed_message += decompress.flush() self.assertEqual(test_message, decompressed_message) self.assertEqual(0, len(decompress.unused_data)) self.assertEqual(0, len(decompress.unconsumed_tail)) def test_send_message_no_context_takeover_parameter(self): extension = common.ExtensionParameter( common.PERMESSAGE_DEFLATE_EXTENSION) extension.add_parameter('server_no_context_takeover', None) request = _create_request_from_rawdata( '', permessage_deflate_request=extension) for i in xrange(3): msgutil.send_message(request, 'Hello', end=False) msgutil.send_message(request, 'Hello', end=True) compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) first_hello = compress.compress('Hello') first_hello += compress.flush(zlib.Z_SYNC_FLUSH) expected = '\x41%c' % len(first_hello) expected += first_hello second_hello = compress.compress('Hello') second_hello += compress.flush(zlib.Z_SYNC_FLUSH) second_hello = second_hello[:-4] expected += '\x80%c' % len(second_hello) expected += second_hello self.assertEqual( expected + expected + expected, request.connection.written_data()) def test_send_message_fragmented_bfinal(self): extension = common.ExtensionParameter( common.PERMESSAGE_DEFLATE_EXTENSION) request = _create_request_from_rawdata( '', permessage_deflate_request=extension) self.assertEquals(1, len(request.ws_extension_processors)) request.ws_extension_processors[0].set_bfinal(True) msgutil.send_message(request, 'Hello', end=False) msgutil.send_message(request, 'World', end=True) expected = '' compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) compressed_hello = compress.compress('Hello') compressed_hello += compress.flush(zlib.Z_FINISH) compressed_hello = compressed_hello + chr(0) expected += '\x41%c' % len(compressed_hello) expected += compressed_hello compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) compressed_world = compress.compress('World') compressed_world += compress.flush(zlib.Z_FINISH) compressed_world = compressed_world + chr(0) expected += '\x80%c' % len(compressed_world) expected += compressed_world self.assertEqual(expected, request.connection.written_data()) def test_receive_message_deflate(self): compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) compressed_hello = compress.compress('Hello') compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH) compressed_hello = compressed_hello[:-4] data = '\xc1%c' % (len(compressed_hello) | 0x80) data += _mask_hybi(compressed_hello) # Close frame data += '\x88\x8a' + _mask_hybi(struct.pack('!H', 1000) + 'Good bye') extension = common.ExtensionParameter( common.PERMESSAGE_DEFLATE_EXTENSION) request = _create_request_from_rawdata( data, permessage_deflate_request=extension) self.assertEqual('Hello', msgutil.receive_message(request)) self.assertEqual(None, msgutil.receive_message(request)) def test_receive_message_random_section(self): """Test that a compressed message fragmented into lots of chunks is correctly received. """ random.seed(a=0) payload = ''.join( [chr(random.randint(0, 255)) for i in xrange(1000)]) compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) compressed_payload = compress.compress(payload) compressed_payload += compress.flush(zlib.Z_SYNC_FLUSH) compressed_payload = compressed_payload[:-4] # Fragment the compressed payload into lots of frames. bytes_chunked = 0 data = '' frame_count = 0 chunk_sizes = [] while bytes_chunked < len(compressed_payload): # Make sure that # - the length of chunks are equal or less than 125 so that we can # use 1 octet length header format for all frames. # - at least 10 chunks are created. chunk_size = random.randint( 1, min(125, len(compressed_payload) / 10, len(compressed_payload) - bytes_chunked)) chunk_sizes.append(chunk_size) chunk = compressed_payload[ bytes_chunked:bytes_chunked + chunk_size] bytes_chunked += chunk_size first_octet = 0x00 if len(data) == 0: first_octet = first_octet | 0x42 if bytes_chunked == len(compressed_payload): first_octet = first_octet | 0x80 data += '%c%c' % (first_octet, chunk_size | 0x80) data += _mask_hybi(chunk) frame_count += 1 print "Chunk sizes: %r" % chunk_sizes self.assertTrue(len(chunk_sizes) > 10) # Close frame data += '\x88\x8a' + _mask_hybi(struct.pack('!H', 1000) + 'Good bye') extension = common.ExtensionParameter( common.PERMESSAGE_DEFLATE_EXTENSION) request = _create_request_from_rawdata( data, permessage_deflate_request=extension) self.assertEqual(payload, msgutil.receive_message(request)) self.assertEqual(None, msgutil.receive_message(request)) def test_receive_two_messages(self): compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) data = '' compressed_hello = compress.compress('HelloWebSocket') compressed_hello += compress.flush(zlib.Z_SYNC_FLUSH) compressed_hello = compressed_hello[:-4] split_position = len(compressed_hello) / 2 data += '\x41%c' % (split_position | 0x80) data += _mask_hybi(compressed_hello[:split_position]) data += '\x80%c' % ((len(compressed_hello) - split_position) | 0x80) data += _mask_hybi(compressed_hello[split_position:]) compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) compressed_world = compress.compress('World') compressed_world += compress.flush(zlib.Z_SYNC_FLUSH) compressed_world = compressed_world[:-4] data += '\xc1%c' % (len(compressed_world) | 0x80) data += _mask_hybi(compressed_world) # Close frame data += '\x88\x8a' + _mask_hybi(struct.pack('!H', 1000) + 'Good bye') extension = common.ExtensionParameter( common.PERMESSAGE_DEFLATE_EXTENSION) request = _create_request_from_rawdata( data, permessage_deflate_request=extension) self.assertEqual('HelloWebSocket', msgutil.receive_message(request)) self.assertEqual('World', msgutil.receive_message(request)) self.assertEqual(None, msgutil.receive_message(request)) def test_receive_message_mixed_btype(self): """Test that a message compressed using lots of DEFLATE blocks with various flush mode is correctly received. """ random.seed(a=0) payload = ''.join( [chr(random.randint(0, 255)) for i in xrange(1000)]) compress = None # Fragment the compressed payload into lots of frames. bytes_chunked = 0 compressed_payload = '' chunk_sizes = [] methods = [] sync_used = False finish_used = False while bytes_chunked < len(payload): # Make sure at least 10 chunks are created. chunk_size = random.randint( 1, min(100, len(payload) - bytes_chunked)) chunk_sizes.append(chunk_size) chunk = payload[bytes_chunked:bytes_chunked + chunk_size] bytes_chunked += chunk_size if compress is None: compress = zlib.compressobj( zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -zlib.MAX_WBITS) if bytes_chunked == len(payload): compressed_payload += compress.compress(chunk) compressed_payload += compress.flush(zlib.Z_SYNC_FLUSH) compressed_payload = compressed_payload[:-4] else: method = random.randint(0, 1) methods.append(method) if method == 0: compressed_payload += compress.compress(chunk) compressed_payload += compress.flush(zlib.Z_SYNC_FLUSH) sync_used = True else: compressed_payload += compress.compress(chunk) compressed_payload += compress.flush(zlib.Z_FINISH) compress = None finish_used = True print "Chunk sizes: %r" % chunk_sizes self.assertTrue(len(chunk_sizes) > 10) print "Methods: %r" % methods self.assertTrue(sync_used) self.assertTrue(finish_used) self.assertTrue(125 < len(compressed_payload)) self.assertTrue(len(compressed_payload) < 65536) data = '\xc2\xfe' + struct.pack('!H', len(compressed_payload)) data += _mask_hybi(compressed_payload) # Close frame data += '\x88\x8a' + _mask_hybi(struct.pack('!H', 1000) + 'Good bye') extension = common.ExtensionParameter( common.PERMESSAGE_DEFLATE_EXTENSION) request = _create_request_from_rawdata( data, permessage_deflate_request=extension) self.assertEqual(payload, msgutil.receive_message(request)) self.assertEqual(None, msgutil.receive_message(request)) class PerMessageCompressTest(unittest.TestCase): """Tests for checking permessage-compression extension.""" def test_deflate_response_parameters(self): extension = common.ExtensionParameter( common.PERMESSAGE_COMPRESSION_EXTENSION) extension.add_parameter('method', 'deflate') processor = PerMessageCompressExtensionProcessor(extension) response = processor.get_extension_response() self.assertEqual('deflate', response.get_parameter_value('method')) extension = common.ExtensionParameter( common.PERMESSAGE_COMPRESSION_EXTENSION) extension.add_parameter('method', 'deflate') processor = PerMessageCompressExtensionProcessor(extension) def _compression_processor_hook(compression_processor): compression_processor.set_client_max_window_bits(8) compression_processor.set_client_no_context_takeover(True) processor.set_compression_processor_hook( _compression_processor_hook) response = processor.get_extension_response() self.assertEqual( 'deflate; client_max_window_bits=8; client_no_context_takeover', response.get_parameter_value('method')) class MessageTestHixie75(unittest.TestCase): """Tests for draft-hixie-thewebsocketprotocol-76 stream class.""" def test_send_message(self): request = _create_request_hixie75() msgutil.send_message(request, 'Hello') self.assertEqual('\x00Hello\xff', request.connection.written_data()) def test_send_message_unicode(self): request = _create_request_hixie75() msgutil.send_message(request, u'\u65e5') # U+65e5 is encoded as e6,97,a5 in UTF-8 self.assertEqual('\x00\xe6\x97\xa5\xff', request.connection.written_data()) def test_receive_message(self): request = _create_request_hixie75('\x00Hello\xff\x00World!\xff') self.assertEqual('Hello', msgutil.receive_message(request)) self.assertEqual('World!', msgutil.receive_message(request)) def test_receive_message_unicode(self): request = _create_request_hixie75('\x00\xe6\x9c\xac\xff') # U+672c is encoded as e6,9c,ac in UTF-8 self.assertEqual(u'\u672c', msgutil.receive_message(request)) def test_receive_message_erroneous_unicode(self): # \x80 and \x81 are invalid as UTF-8. request = _create_request_hixie75('\x00\x80\x81\xff') # Invalid characters should be replaced with # U+fffd REPLACEMENT CHARACTER self.assertEqual(u'\ufffd\ufffd', msgutil.receive_message(request)) def test_receive_message_discard(self): request = _create_request_hixie75('\x80\x06IGNORE\x00Hello\xff' '\x01DISREGARD\xff\x00World!\xff') self.assertEqual('Hello', msgutil.receive_message(request)) self.assertEqual('World!', msgutil.receive_message(request)) class MessageReceiverTest(unittest.TestCase): """Tests the Stream class using MessageReceiver.""" def test_queue(self): request = _create_blocking_request() receiver = msgutil.MessageReceiver(request) self.assertEqual(None, receiver.receive_nowait()) request.connection.put_bytes('\x81\x86' + _mask_hybi('Hello!')) self.assertEqual('Hello!', receiver.receive()) def test_onmessage(self): onmessage_queue = Queue.Queue() def onmessage_handler(message): onmessage_queue.put(message) request = _create_blocking_request() receiver = msgutil.MessageReceiver(request, onmessage_handler) request.connection.put_bytes('\x81\x86' + _mask_hybi('Hello!')) self.assertEqual('Hello!', onmessage_queue.get()) class MessageReceiverHixie75Test(unittest.TestCase): """Tests the StreamHixie75 class using MessageReceiver.""" def test_queue(self): request = _create_blocking_request_hixie75() receiver = msgutil.MessageReceiver(request) self.assertEqual(None, receiver.receive_nowait()) request.connection.put_bytes('\x00Hello!\xff') self.assertEqual('Hello!', receiver.receive()) def test_onmessage(self): onmessage_queue = Queue.Queue() def onmessage_handler(message): onmessage_queue.put(message) request = _create_blocking_request_hixie75() receiver = msgutil.MessageReceiver(request, onmessage_handler) request.connection.put_bytes('\x00Hello!\xff') self.assertEqual('Hello!', onmessage_queue.get()) class MessageSenderTest(unittest.TestCase): """Tests the Stream class using MessageSender.""" def test_send(self): request = _create_blocking_request() sender = msgutil.MessageSender(request) sender.send('World') self.assertEqual('\x81\x05World', request.connection.written_data()) def test_send_nowait(self): # Use a queue to check the bytes written by MessageSender. # request.connection.written_data() cannot be used here because # MessageSender runs in a separate thread. send_queue = Queue.Queue() def write(bytes): send_queue.put(bytes) request = _create_blocking_request() request.connection.write = write sender = msgutil.MessageSender(request) sender.send_nowait('Hello') sender.send_nowait('World') self.assertEqual('\x81\x05Hello', send_queue.get()) self.assertEqual('\x81\x05World', send_queue.get()) class MessageSenderHixie75Test(unittest.TestCase): """Tests the StreamHixie75 class using MessageSender.""" def test_send(self): request = _create_blocking_request_hixie75() sender = msgutil.MessageSender(request) sender.send('World') self.assertEqual('\x00World\xff', request.connection.written_data()) def test_send_nowait(self): # Use a queue to check the bytes written by MessageSender. # request.connection.written_data() cannot be used here because # MessageSender runs in a separate thread. send_queue = Queue.Queue() def write(bytes): send_queue.put(bytes) request = _create_blocking_request_hixie75() request.connection.write = write sender = msgutil.MessageSender(request) sender.send_nowait('Hello') sender.send_nowait('World') self.assertEqual('\x00Hello\xff', send_queue.get()) self.assertEqual('\x00World\xff', send_queue.get()) if __name__ == '__main__': unittest.main() # vi:sts=4 sw=4 et
mpl-2.0
axinging/chromium-crosswalk
tools/perf/measurements/repaint_unittest.py
14
2776
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry import decorators from telemetry.page import page as page_module from telemetry.testing import options_for_unittests from telemetry.testing import page_test_test_case from telemetry.util import wpr_modes from measurements import smoothness from page_sets import repaint_helpers class TestRepaintPage(page_module.Page): def __init__(self, page_set, base_dir): super(TestRepaintPage, self).__init__('file://blank.html', page_set, base_dir) def RunPageInteractions(self, action_runner): repaint_helpers.Repaint(action_runner) class RepaintUnitTest(page_test_test_case.PageTestTestCase): """Smoke test for repaint measurement Runs repaint measurement on a simple page and verifies that all metrics were added to the results. The test is purely functional, i.e. it only checks if the metrics are present and non-zero. """ def setUp(self): self._options = options_for_unittests.GetCopy() self._options.browser_options.wpr_mode = wpr_modes.WPR_OFF @decorators.Disabled('chromeos') # crbug.com/483212 def testRepaint(self): ps = self.CreateEmptyPageSet() ps.AddStory(TestRepaintPage(ps, ps.base_dir)) measurement = smoothness.Repaint() results = self.RunMeasurement(measurement, ps, options=self._options) self.assertEquals(0, len(results.failures)) frame_times = results.FindAllPageSpecificValuesNamed('frame_times') self.assertEquals(len(frame_times), 1) self.assertGreater(frame_times[0].GetRepresentativeNumber(), 0) mean_frame_time = results.FindAllPageSpecificValuesNamed('mean_frame_time') self.assertEquals(len(mean_frame_time), 1) self.assertGreater(mean_frame_time[0].GetRepresentativeNumber(), 0) frame_time_discrepancy = results.FindAllPageSpecificValuesNamed( 'frame_time_discrepancy') self.assertEquals(len(frame_time_discrepancy), 1) self.assertGreater(frame_time_discrepancy[0].GetRepresentativeNumber(), 0) percentage_smooth = results.FindAllPageSpecificValuesNamed( 'percentage_smooth') self.assertEquals(len(percentage_smooth), 1) self.assertGreaterEqual(percentage_smooth[0].GetRepresentativeNumber(), 0) # Make sure that we don't have extra timeline based metrics that are not # related to smoothness. mainthread_jank = results.FindAllPageSpecificValuesNamed( 'responsive-total_big_jank_thread_time') self.assertEquals(len(mainthread_jank), 0) @decorators.Disabled('android') def testCleanUpTrace(self): self.TestTracingCleanedUp(smoothness.Repaint, self._options)
bsd-3-clause
JoshRosen/spark
examples/src/main/python/ml/polynomial_expansion_example.py
123
1522
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import print_function # $example on$ from pyspark.ml.feature import PolynomialExpansion from pyspark.ml.linalg import Vectors # $example off$ from pyspark.sql import SparkSession if __name__ == "__main__": spark = SparkSession\ .builder\ .appName("PolynomialExpansionExample")\ .getOrCreate() # $example on$ df = spark.createDataFrame([ (Vectors.dense([2.0, 1.0]),), (Vectors.dense([0.0, 0.0]),), (Vectors.dense([3.0, -1.0]),) ], ["features"]) polyExpansion = PolynomialExpansion(degree=3, inputCol="features", outputCol="polyFeatures") polyDF = polyExpansion.transform(df) polyDF.show(truncate=False) # $example off$ spark.stop()
apache-2.0
friend0/tower
tower/map/space.py
1
4589
# coding=utf-8 """ Region will serve as an abstract base class (ABC) to implement a standard interface amongst both Map and Surface objects """ from __future__ import (absolute_import, division, print_function, unicode_literals) import abc import collections from future.utils import with_metaclass from builtins import * class Space(with_metaclass(abc.ABCMeta, object)): """ `Space` represents a base class for flat, three-dimensional space. Concrete implementations of Space, will implement abstractions like curved space (geodetic map) while exposing only fundamental abstractions of flat space to planning algorithms. Space defines as attribute the notion of a Point. Concrete implementations of Space may extend this idea to geographic coordinates, and etc., for example by making `lat` a property of the class `Map,` implementing Space. Eg. Space has a class attribute `Point`, which provides the Cartesian idea of a point in a plane For our purposes, we'd like to align Optitrack's origin with that of the Surface defined by height-field or function. For Maps, we'd like to align the origin to some coordinate representing the center of the geographic region covered by the Tower. We take standard world coordinates as our convention. This means delta(y) is proportional to delta(lat) and that delta(x) corresponds to delta(lon). The relations between these quantities is abstracted """ # todo: include auto conversion dictionary, i.e. enable user to request target unit conversion from base unit @abc.abstractproperty def units(self): """ A point should travel with its units, in case it needs to be converted :return: """ pass @abc.abstractproperty def x(self): """ Define how we refer to the x axis in concrete implementation :return: A string corresponding to x axis in concrete implementation. For example, a map implementation could expose a point's longitude through the x variable by returning 'lon' """ pass @abc.abstractproperty def y(self): """ Define how we refer to the x axis in concrete implementation :return: A string corresponding to y axis in concrete implementation. For example, a map implementation could expose a point's longitude through the y variable by returning 'lat' """ pass @abc.abstractproperty def name(self): """ """ pass @abc.abstractmethod def point(self): """ The point function is a named-tuple factory that wraps the underlying `point` abstraction of a space into a universal container with x first, followed by y, and then the units. This gives users of the Space ABC a way to define x and y once, then retrive a custom named-tuple object that is universally indexed by [x, y, units], allowing them to be passed around with well-defined compatibility criteria. A Map implementation of a space might do: Coord = Map.point('Coordinate') With `x` and `y` defined appropriately as 'lon' and 'lat' respectively, we could do: point_a = Coord('lon'=-122.0264, 'lat=36.9741') :param name: Provide a custom name for `point`, default is `Point` :return: A named tuple with fields corresponding to x, y, and units for concrete implementation of Space """ return collections.namedtuple(self.name, [self.x, self.y, self.units]) @abc.abstractproperty def origin(self): """ ¯\_(ツ)_/¯ :return: """ pass @abc.abstractmethod def get_point_elevation(self): pass @abc.abstractmethod def get_distance_between(self, point_a, point_b, *args, **kwargs): """ :return: the distance between two """ pass @abc.abstractmethod def get_edge(self, from_, to): """ Sample data between two points :return: An array of points """ pass @abc.abstractmethod def get_elevation_along_edge(self, from_, to): """ Take as input a edge, which is an iterable of points, and get a set of elevations corresponding to the elevations at those points. :return: An iterable of the same length as input, where each output corresponds to the input coordinate given in the se """ pass @abc.abstractmethod def get_surrounding_elevation(self): pass
isc
cewood/ansible-modules-core
utilities/helper/accelerate.py
7
27481
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2013, James Cammarata <jcammarata@ansible.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- module: accelerate short_description: Enable accelerated mode on remote node description: - This modules launches an ephemeral I(accelerate) daemon on the remote node which Ansible can use to communicate with nodes at high speed. - The daemon listens on a configurable port for a configurable amount of time. - Fireball mode is AES encrypted version_added: "1.3" options: port: description: - TCP port for the socket connection required: false default: 5099 aliases: [] timeout: description: - The number of seconds the socket will wait for data. If none is received when the timeout value is reached, the connection will be closed. required: false default: 300 aliases: [] minutes: description: - The I(accelerate) listener daemon is started on nodes and will stay around for this number of minutes before turning itself off. required: false default: 30 ipv6: description: - The listener daemon on the remote host will bind to the ipv6 localhost socket if this parameter is set to true. required: false default: false multi_key: description: - When enabled, the daemon will open a local socket file which can be used by future daemon executions to upload a new key to the already running daemon, so that multiple users can connect using different keys. This access still requires an ssh connection as the uid for which the daemon is currently running. required: false default: no version_added: "1.6" notes: - See the advanced playbooks chapter for more about using accelerated mode. requirements: - "python >= 2.4" - "python-keyczar" author: "James Cammarata (@jimi-c)" ''' EXAMPLES = ''' # To use accelerate mode, simply add "accelerate: true" to your play. The initial # key exchange and starting up of the daemon will occur over SSH, but all commands and # subsequent actions will be conducted over the raw socket connection using AES encryption - hosts: devservers accelerate: true tasks: - command: /usr/bin/anything ''' import base64 import errno import getpass import json import os import os.path import pwd import signal import socket import struct import sys import syslog import tempfile import time import traceback import SocketServer import datetime from threading import Thread, Lock # import module snippets # we must import this here at the top so we can use get_module_path() from ansible.module_utils.basic import * # the chunk size to read and send, assuming mtu 1500 and # leaving room for base64 (+33%) encoding and header (100 bytes) # 4 * (975/3) + 100 = 1400 # which leaves room for the TCP/IP header CHUNK_SIZE=10240 # FIXME: this all should be moved to module_common, as it's # pretty much a copy from the callbacks/util code DEBUG_LEVEL=0 def log(msg, cap=0): global DEBUG_LEVEL if DEBUG_LEVEL >= cap: syslog.syslog(syslog.LOG_NOTICE|syslog.LOG_DAEMON, msg) def v(msg): log(msg, cap=1) def vv(msg): log(msg, cap=2) def vvv(msg): log(msg, cap=3) def vvvv(msg): log(msg, cap=4) HAS_KEYCZAR = False try: from keyczar.keys import AesKey HAS_KEYCZAR = True except ImportError: pass SOCKET_FILE = os.path.join(get_module_path(), '.ansible-accelerate', ".local.socket") def get_pid_location(module): """ Try to find a pid directory in the common locations, falling back to the user's home directory if no others exist """ for dir in ['/var/run', '/var/lib/run', '/run', os.path.expanduser("~/")]: try: if os.path.isdir(dir) and os.access(dir, os.R_OK|os.W_OK): return os.path.join(dir, '.accelerate.pid') except: pass module.fail_json(msg="couldn't find any valid directory to use for the accelerate pid file") # NOTE: this shares a fair amount of code in common with async_wrapper, if async_wrapper were a new module we could move # this into utils.module_common and probably should anyway def daemonize_self(module, password, port, minutes, pid_file): # daemonizing code: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66012 try: pid = os.fork() if pid > 0: vvv("exiting pid %s" % pid) # exit first parent module.exit_json(msg="daemonized accelerate on port %s for %s minutes with pid %s" % (port, minutes, str(pid))) except OSError: e = get_exception() message = "fork #1 failed: %d (%s)" % (e.errno, e.strerror) module.fail_json(msg=message) # decouple from parent environment os.chdir("/") os.setsid() os.umask(int('O22', 8)) # do second fork try: pid = os.fork() if pid > 0: log("daemon pid %s, writing %s" % (pid, pid_file)) pid_file = open(pid_file, "w") pid_file.write("%s" % pid) pid_file.close() vvv("pid file written") sys.exit(0) except OSError: e = get_exception() log('fork #2 failed: %d (%s)' % (e.errno, e.strerror)) sys.exit(1) dev_null = file('/dev/null','rw') os.dup2(dev_null.fileno(), sys.stdin.fileno()) os.dup2(dev_null.fileno(), sys.stdout.fileno()) os.dup2(dev_null.fileno(), sys.stderr.fileno()) log("daemonizing successful") class LocalSocketThread(Thread): server = None terminated = False def __init__(self, group=None, target=None, name=None, args=(), kwargs={}, Verbose=None): self.server = kwargs.get('server') Thread.__init__(self, group, target, name, args, kwargs, Verbose) def run(self): try: if os.path.exists(SOCKET_FILE): os.remove(SOCKET_FILE) else: dir = os.path.dirname(SOCKET_FILE) if os.path.exists(dir): if not os.path.isdir(dir): log("The socket file path (%s) exists, but is not a directory. No local connections will be available" % dir) return else: # make sure the directory is accessible only to this # user, as socket files derive their permissions from # the directory that contains them os.chmod(dir, int('0700', 8)) elif not os.path.exists(dir): os.makedirs(dir, int('O700', 8)) except OSError: pass self.s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.s.bind(SOCKET_FILE) self.s.listen(5) while not self.terminated: try: conn, addr = self.s.accept() vv("received local connection") data = "" while "\n" not in data: data += conn.recv(2048) try: try: new_key = AesKey.Read(data.strip()) found = False for key in self.server.key_list: try: new_key.Decrypt(key.Encrypt("foo")) found = True break except: pass if not found: vv("adding new key to the key list") self.server.key_list.append(new_key) conn.sendall("OK\n") else: vv("key already exists in the key list, ignoring") conn.sendall("EXISTS\n") # update the last event time so the server doesn't # shutdown sooner than expected for new cliets try: self.server.last_event_lock.acquire() self.server.last_event = datetime.datetime.now() finally: self.server.last_event_lock.release() except Exception: e = get_exception() vv("key loaded locally was invalid, ignoring (%s)" % e) conn.sendall("BADKEY\n") finally: try: conn.close() except: pass except: pass def terminate(self): super(LocalSocketThread, self).terminate() self.terminated = True self.s.shutdown(socket.SHUT_RDWR) self.s.close() class ThreadWithReturnValue(Thread): def __init__(self, group=None, target=None, name=None, args=(), kwargs={}, Verbose=None): Thread.__init__(self, group, target, name, args, kwargs, Verbose) self._return = None def run(self): if self._Thread__target is not None: self._return = self._Thread__target(*self._Thread__args, **self._Thread__kwargs) def join(self,timeout=None): Thread.join(self, timeout=timeout) return self._return class ThreadedTCPServer(SocketServer.ThreadingTCPServer): key_list = [] last_event = datetime.datetime.now() last_event_lock = Lock() def __init__(self, server_address, RequestHandlerClass, module, password, timeout, use_ipv6=False): self.module = module self.key_list.append(AesKey.Read(password)) self.allow_reuse_address = True self.timeout = timeout if use_ipv6: self.address_family = socket.AF_INET6 if self.module.params.get('multi_key', False): vv("starting thread to handle local connections for multiple keys") self.local_thread = LocalSocketThread(kwargs=dict(server=self)) self.local_thread.start() SocketServer.ThreadingTCPServer.__init__(self, server_address, RequestHandlerClass) def shutdown(self): self.running = False SocketServer.ThreadingTCPServer.shutdown(self) class ThreadedTCPRequestHandler(SocketServer.BaseRequestHandler): # the key to use for this connection active_key = None def send_data(self, data): try: self.server.last_event_lock.acquire() self.server.last_event = datetime.datetime.now() finally: self.server.last_event_lock.release() packed_len = struct.pack('!Q', len(data)) return self.request.sendall(packed_len + data) def recv_data(self): header_len = 8 # size of a packed unsigned long long data = "" vvvv("in recv_data(), waiting for the header") while len(data) < header_len: try: d = self.request.recv(header_len - len(data)) if not d: vvv("received nothing, bailing out") return None data += d except: # probably got a connection reset vvvv("exception received while waiting for recv(), returning None") return None vvvv("in recv_data(), got the header, unpacking") data_len = struct.unpack('!Q',data[:header_len])[0] data = data[header_len:] vvvv("data received so far (expecting %d): %d" % (data_len,len(data))) while len(data) < data_len: try: d = self.request.recv(data_len - len(data)) if not d: vvv("received nothing, bailing out") return None data += d vvvv("data received so far (expecting %d): %d" % (data_len,len(data))) except: # probably got a connection reset vvvv("exception received while waiting for recv(), returning None") return None vvvv("received all of the data, returning") try: self.server.last_event_lock.acquire() self.server.last_event = datetime.datetime.now() finally: self.server.last_event_lock.release() return data def handle(self): try: while True: vvvv("waiting for data") data = self.recv_data() if not data: vvvv("received nothing back from recv_data(), breaking out") break vvvv("got data, decrypting") if not self.active_key: for key in self.server.key_list: try: data = key.Decrypt(data) self.active_key = key break except: pass else: vv("bad decrypt, exiting the connection handler") return else: try: data = self.active_key.Decrypt(data) except: vv("bad decrypt, exiting the connection handler") return vvvv("decryption done, loading json from the data") data = json.loads(data) mode = data['mode'] response = {} last_pong = datetime.datetime.now() if mode == 'command': vvvv("received a command request, running it") twrv = ThreadWithReturnValue(target=self.command, args=(data,)) twrv.start() response = None while twrv.is_alive(): if (datetime.datetime.now() - last_pong).seconds >= 15: last_pong = datetime.datetime.now() vvvv("command still running, sending keepalive packet") data2 = json.dumps(dict(pong=True)) data2 = self.active_key.Encrypt(data2) self.send_data(data2) time.sleep(0.1) response = twrv._return vvvv("thread is done, response from join was %s" % response) elif mode == 'put': vvvv("received a put request, putting it") response = self.put(data) elif mode == 'fetch': vvvv("received a fetch request, getting it") response = self.fetch(data) elif mode == 'validate_user': vvvv("received a request to validate the user id") response = self.validate_user(data) vvvv("response result is %s" % str(response)) json_response = json.dumps(response) vvvv("dumped json is %s" % json_response) data2 = self.active_key.Encrypt(json_response) vvvv("sending the response back to the controller") self.send_data(data2) vvvv("done sending the response") if mode == 'validate_user' and response.get('rc') == 1: vvvv("detected a uid mismatch, shutting down") self.server.shutdown() except: tb = traceback.format_exc() log("encountered an unhandled exception in the handle() function") log("error was:\n%s" % tb) if self.active_key: data2 = json.dumps(dict(rc=1, failed=True, msg="unhandled error in the handle() function")) data2 = self.active_key.Encrypt(data2) self.send_data(data2) def validate_user(self, data): if 'username' not in data: return dict(failed=True, msg='No username specified') vvvv("validating we're running as %s" % data['username']) # get the current uid c_uid = os.getuid() try: # the target uid t_uid = pwd.getpwnam(data['username']).pw_uid except: vvvv("could not find user %s" % data['username']) return dict(failed=True, msg='could not find user %s' % data['username']) # and return rc=0 for success, rc=1 for failure if c_uid == t_uid: return dict(rc=0) else: return dict(rc=1) def command(self, data): if 'cmd' not in data: return dict(failed=True, msg='internal error: cmd is required') vvvv("executing: %s" % data['cmd']) use_unsafe_shell = False executable = data.get('executable') if executable: use_unsafe_shell = True rc, stdout, stderr = self.server.module.run_command(data['cmd'], executable=executable, use_unsafe_shell=use_unsafe_shell, close_fds=True) if stdout is None: stdout = '' if stderr is None: stderr = '' vvvv("got stdout: %s" % stdout) vvvv("got stderr: %s" % stderr) return dict(rc=rc, stdout=stdout, stderr=stderr) def fetch(self, data): if 'in_path' not in data: return dict(failed=True, msg='internal error: in_path is required') try: fd = file(data['in_path'], 'rb') fstat = os.stat(data['in_path']) vvv("FETCH file is %d bytes" % fstat.st_size) while fd.tell() < fstat.st_size: data = fd.read(CHUNK_SIZE) last = False if fd.tell() >= fstat.st_size: last = True data = dict(data=base64.b64encode(data), last=last) data = json.dumps(data) data = self.active_key.Encrypt(data) if self.send_data(data): return dict(failed=True, stderr="failed to send data") response = self.recv_data() if not response: log("failed to get a response, aborting") return dict(failed=True, stderr="Failed to get a response from %s" % self.host) response = self.active_key.Decrypt(response) response = json.loads(response) if response.get('failed',False): log("got a failed response from the master") return dict(failed=True, stderr="Master reported failure, aborting transfer") except Exception: e = get_exception() fd.close() tb = traceback.format_exc() log("failed to fetch the file: %s" % tb) return dict(failed=True, stderr="Could not fetch the file: %s" % str(e)) fd.close() return dict() def put(self, data): if 'data' not in data: return dict(failed=True, msg='internal error: data is required') if 'out_path' not in data: return dict(failed=True, msg='internal error: out_path is required') final_path = None if 'user' in data and data.get('user') != getpass.getuser(): vvv("the target user doesn't match this user, we'll move the file into place via sudo") tmp_path = os.path.expanduser('~/.ansible/tmp/') if not os.path.exists(tmp_path): try: os.makedirs(tmp_path, int('O700', 8)) except: return dict(failed=True, msg='could not create a temporary directory at %s' % tmp_path) (fd,out_path) = tempfile.mkstemp(prefix='ansible.', dir=tmp_path) out_fd = os.fdopen(fd, 'w', 0) final_path = data['out_path'] else: out_path = data['out_path'] out_fd = open(out_path, 'w') try: bytes=0 while True: out = base64.b64decode(data['data']) bytes += len(out) out_fd.write(out) response = json.dumps(dict()) response = self.active_key.Encrypt(response) self.send_data(response) if data['last']: break data = self.recv_data() if not data: raise "" data = self.active_key.Decrypt(data) data = json.loads(data) except: out_fd.close() tb = traceback.format_exc() log("failed to put the file: %s" % tb) return dict(failed=True, stdout="Could not write the file") vvvv("wrote %d bytes" % bytes) out_fd.close() if final_path: vvv("moving %s to %s" % (out_path, final_path)) self.server.module.atomic_move(out_path, final_path) return dict() def daemonize(module, password, port, timeout, minutes, use_ipv6, pid_file): try: daemonize_self(module, password, port, minutes, pid_file) def timer_handler(signum, _): try: try: server.last_event_lock.acquire() td = datetime.datetime.now() - server.last_event # older python timedelta objects don't have total_seconds(), # so we use the formula from the docs to calculate it total_seconds = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6 if total_seconds >= minutes * 60: log("server has been idle longer than the timeout, shutting down") server.running = False server.shutdown() else: # reschedule the check signal.alarm(1) except: pass finally: server.last_event_lock.release() signal.signal(signal.SIGALRM, timer_handler) signal.alarm(1) tries = 5 while tries > 0: try: if use_ipv6: address = ("::", port) else: address = ("0.0.0.0", port) server = ThreadedTCPServer(address, ThreadedTCPRequestHandler, module, password, timeout, use_ipv6=use_ipv6) server.allow_reuse_address = True break except Exception: e = get_exception() vv("Failed to create the TCP server (tries left = %d) (error: %s) " % (tries,e)) tries -= 1 time.sleep(0.2) if tries == 0: vv("Maximum number of attempts to create the TCP server reached, bailing out") raise Exception("max # of attempts to serve reached") # run the server in a separate thread to make signal handling work server_thread = Thread(target=server.serve_forever, kwargs=dict(poll_interval=0.1)) server_thread.start() server.running = True v("serving!") while server.running: time.sleep(1) # wait for the thread to exit fully server_thread.join() v("server thread terminated, exiting!") sys.exit(0) except Exception: e = get_exception() tb = traceback.format_exc() log("exception caught, exiting accelerated mode: %s\n%s" % (e, tb)) sys.exit(0) def main(): global DEBUG_LEVEL module = AnsibleModule( argument_spec = dict( port=dict(required=False, default=5099), ipv6=dict(required=False, default=False, type='bool'), multi_key=dict(required=False, default=False, type='bool'), timeout=dict(required=False, default=300), password=dict(required=True), minutes=dict(required=False, default=30), debug=dict(required=False, default=0, type='int') ), supports_check_mode=True ) syslog.openlog('ansible-%s' % module._name) password = base64.b64decode(module.params['password']) port = int(module.params['port']) timeout = int(module.params['timeout']) minutes = int(module.params['minutes']) debug = int(module.params['debug']) ipv6 = module.params['ipv6'] multi_key = module.params['multi_key'] if not HAS_KEYCZAR: module.fail_json(msg="keyczar is not installed (on the remote side)") DEBUG_LEVEL=debug pid_file = get_pid_location(module) daemon_pid = None daemon_running = False if os.path.exists(pid_file): try: daemon_pid = int(open(pid_file).read()) try: # sending signal 0 doesn't do anything to the # process, other than tell the calling program # whether other signals can be sent os.kill(daemon_pid, 0) except OSError: e = get_exception() message = 'the accelerate daemon appears to be running' message += 'as a different user that this user cannot access' message += 'pid=%s' % daemon_pid if e.errno == errno.EPERM: # no permissions means the pid is probably # running, but as a different user, so fail module.fail_json(msg=message) else: daemon_running = True except ValueError: # invalid pid file, unlink it - otherwise we don't care try: os.unlink(pid_file) except: pass if daemon_running and multi_key: # try to connect to the file socket for the daemon if it exists s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) try: try: s.connect(SOCKET_FILE) s.sendall(password + '\n') data = "" while '\n' not in data: data += s.recv(2048) res = data.strip() except: module.fail_json(msg="failed to connect to the local socket file") finally: try: s.close() except: pass if res in ("OK", "EXISTS"): module.exit_json(msg="transferred new key to the existing daemon") else: module.fail_json(msg="could not transfer new key: %s" % data.strip()) else: # try to start up the daemon daemonize(module, password, port, timeout, minutes, ipv6, pid_file) main()
gpl-3.0
louietsai/python-for-android
python-modules/twisted/twisted/manhole/telnet.py
81
3504
# Copyright (c) 2001-2004 Twisted Matrix Laboratories. # See LICENSE for details. """Telnet-based shell.""" # twisted imports from twisted.protocols import telnet from twisted.internet import protocol from twisted.python import log, failure # system imports import string, copy, sys from cStringIO import StringIO class Shell(telnet.Telnet): """A Python command-line shell.""" def connectionMade(self): telnet.Telnet.connectionMade(self) self.lineBuffer = [] def loggedIn(self): self.transport.write(">>> ") def checkUserAndPass(self, username, password): return ((self.factory.username == username) and (password == self.factory.password)) def write(self, data): """Write some data to the transport. """ self.transport.write(data) def telnet_Command(self, cmd): if self.lineBuffer: if not cmd: cmd = string.join(self.lineBuffer, '\n') + '\n\n\n' self.doCommand(cmd) self.lineBuffer = [] return "Command" else: self.lineBuffer.append(cmd) self.transport.write("... ") return "Command" else: self.doCommand(cmd) return "Command" def doCommand(self, cmd): # TODO -- refactor this, Reality.author.Author, and the manhole shell #to use common functionality (perhaps a twisted.python.code module?) fn = '$telnet$' result = None try: out = sys.stdout sys.stdout = self try: code = compile(cmd,fn,'eval') result = eval(code, self.factory.namespace) except: try: code = compile(cmd, fn, 'exec') exec code in self.factory.namespace except SyntaxError, e: if not self.lineBuffer and str(e)[:14] == "unexpected EOF": self.lineBuffer.append(cmd) self.transport.write("... ") return else: failure.Failure().printTraceback(file=self) log.deferr() self.write('\r\n>>> ') return except: io = StringIO() failure.Failure().printTraceback(file=self) log.deferr() self.write('\r\n>>> ') return finally: sys.stdout = out self.factory.namespace['_'] = result if result is not None: self.transport.write(repr(result)) self.transport.write('\r\n') self.transport.write(">>> ") class ShellFactory(protocol.Factory): username = "admin" password = "admin" protocol = Shell service = None def __init__(self): self.namespace = { 'factory': self, 'service': None, '_': None } def setService(self, service): self.namespace['service'] = self.service = service def __getstate__(self): """This returns the persistent state of this shell factory. """ dict = self.__dict__ ns = copy.copy(dict['namespace']) dict['namespace'] = ns if ns.has_key('__builtins__'): del ns['__builtins__'] return dict
apache-2.0
pcu4dros/pandora-core
workspace/lib/python3.5/site-packages/flask_cors/decorator.py
4
4937
# -*- coding: utf-8 -*- """ decorator ~~~~ This unit exposes a single decorator which should be used to wrap a Flask route with. It accepts all parameters and options as the CORS extension. :copyright: (c) 2016 by Cory Dolphin. :license: MIT, see LICENSE for more details. """ from functools import update_wrapper from flask import make_response, request, current_app from .core import * LOG = logging.getLogger(__name__) def cross_origin(*args, **kwargs): """ This function is the decorator which is used to wrap a Flask route with. In the simplest case, simply use the default parameters to allow all origins in what is the most permissive configuration. If this method modifies state or performs authentication which may be brute-forced, you should add some degree of protection, such as Cross Site Forgery Request protection. :param origins: The origin, or list of origins to allow requests from. The origin(s) may be regular expressions, case-sensitive strings, or else an asterisk Default : '*' :type origins: list, string or regex :param methods: The method or list of methods which the allowed origins are allowed to access for non-simple requests. Default : [GET, HEAD, POST, OPTIONS, PUT, PATCH, DELETE] :type methods: list or string :param expose_headers: The header or list which are safe to expose to the API of a CORS API specification. Default : None :type expose_headers: list or string :param allow_headers: The header or list of header field names which can be used when this resource is accessed by allowed origins. The header(s) may be regular expressions, case-sensitive strings, or else an asterisk. Default : '*', allow all headers :type allow_headers: list, string or regex :param supports_credentials: Allows users to make authenticated requests. If true, injects the `Access-Control-Allow-Credentials` header in responses. This allows cookies and credentials to be submitted across domains. :note: This option cannot be used in conjuction with a '*' origin Default : False :type supports_credentials: bool :param max_age: The maximum time for which this CORS request maybe cached. This value is set as the `Access-Control-Max-Age` header. Default : None :type max_age: timedelta, integer, string or None :param send_wildcard: If True, and the origins parameter is `*`, a wildcard `Access-Control-Allow-Origin` header is sent, rather than the request's `Origin` header. Default : False :type send_wildcard: bool :param vary_header: If True, the header Vary: Origin will be returned as per the W3 implementation guidelines. Setting this header when the `Access-Control-Allow-Origin` is dynamically generated (e.g. when there is more than one allowed origin, and an Origin than '*' is returned) informs CDNs and other caches that the CORS headers are dynamic, and cannot be cached. If False, the Vary header will never be injected or altered. Default : True :type vary_header: bool :param automatic_options: Only applies to the `cross_origin` decorator. If True, Flask-CORS will override Flask's default OPTIONS handling to return CORS headers for OPTIONS requests. Default : True :type automatic_options: bool """ _options = kwargs def decorator(f): LOG.debug("Enabling %s for cross_origin using options:%s", f, _options) # If True, intercept OPTIONS requests by modifying the view function, # replicating Flask's default behavior, and wrapping the response with # CORS headers. # # If f.provide_automatic_options is unset or True, Flask's route # decorator (which is actually wraps the function object we return) # intercepts OPTIONS handling, and requests will not have CORS headers if _options.get('automatic_options', True): f.required_methods = getattr(f, 'required_methods', set()) f.required_methods.add('OPTIONS') f.provide_automatic_options = False def wrapped_function(*args, **kwargs): # Handle setting of Flask-Cors parameters options = get_cors_options(current_app, _options) if options.get('automatic_options') and request.method == 'OPTIONS': resp = current_app.make_default_options_response() else: resp = make_response(f(*args, **kwargs)) set_cors_headers(resp, options) setattr(resp, FLASK_CORS_EVALUATED, True) return resp return update_wrapper(wrapped_function, f) return decorator
mit
sarahn/ganeti
lib/utils/__init__.py
2
22871
# # # Copyright (C) 2006, 2007, 2010, 2011 Google Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. """Ganeti utility module. This module holds functions that can be used in both daemons (all) and the command line scripts. """ # Allow wildcard import in pylint: disable=W0401 import os import re import errno import pwd import time import itertools import select import logging import signal from ganeti import errors from ganeti import constants from ganeti import compat from ganeti import pathutils from ganeti.utils.algo import * from ganeti.utils.filelock import * from ganeti.utils.hash import * from ganeti.utils.io import * from ganeti.utils.log import * from ganeti.utils.lvm import * from ganeti.utils.mlock import * from ganeti.utils.nodesetup import * from ganeti.utils.process import * from ganeti.utils.retry import * from ganeti.utils.text import * from ganeti.utils.wrapper import * from ganeti.utils.x509 import * _VALID_SERVICE_NAME_RE = re.compile("^[-_.a-zA-Z0-9]{1,128}$") UUID_RE = re.compile(constants.UUID_REGEX) def ForceDictType(target, key_types, allowed_values=None): """Force the values of a dict to have certain types. @type target: dict @param target: the dict to update @type key_types: dict @param key_types: dict mapping target dict keys to types in constants.ENFORCEABLE_TYPES @type allowed_values: list @keyword allowed_values: list of specially allowed values """ if allowed_values is None: allowed_values = [] if not isinstance(target, dict): msg = "Expected dictionary, got '%s'" % target raise errors.TypeEnforcementError(msg) for key in target: if key not in key_types: msg = "Unknown parameter '%s'" % key raise errors.TypeEnforcementError(msg) if target[key] in allowed_values: continue ktype = key_types[key] if ktype not in constants.ENFORCEABLE_TYPES: msg = "'%s' has non-enforceable type %s" % (key, ktype) raise errors.ProgrammerError(msg) if ktype in (constants.VTYPE_STRING, constants.VTYPE_MAYBE_STRING): if target[key] is None and ktype == constants.VTYPE_MAYBE_STRING: pass elif not isinstance(target[key], basestring): if isinstance(target[key], bool) and not target[key]: target[key] = "" else: msg = "'%s' (value %s) is not a valid string" % (key, target[key]) raise errors.TypeEnforcementError(msg) elif ktype == constants.VTYPE_BOOL: if isinstance(target[key], basestring) and target[key]: if target[key].lower() == constants.VALUE_FALSE: target[key] = False elif target[key].lower() == constants.VALUE_TRUE: target[key] = True else: msg = "'%s' (value %s) is not a valid boolean" % (key, target[key]) raise errors.TypeEnforcementError(msg) elif target[key]: target[key] = True else: target[key] = False elif ktype == constants.VTYPE_SIZE: try: target[key] = ParseUnit(target[key]) except errors.UnitParseError, err: msg = "'%s' (value %s) is not a valid size. error: %s" % \ (key, target[key], err) raise errors.TypeEnforcementError(msg) elif ktype == constants.VTYPE_INT: try: target[key] = int(target[key]) except (ValueError, TypeError): msg = "'%s' (value %s) is not a valid integer" % (key, target[key]) raise errors.TypeEnforcementError(msg) def ValidateServiceName(name): """Validate the given service name. @type name: number or string @param name: Service name or port specification """ try: numport = int(name) except (ValueError, TypeError): # Non-numeric service name valid = _VALID_SERVICE_NAME_RE.match(name) else: # Numeric port (protocols other than TCP or UDP might need adjustments # here) valid = (numport >= 0 and numport < (1 << 16)) if not valid: raise errors.OpPrereqError("Invalid service name '%s'" % name, errors.ECODE_INVAL) return name def _ComputeMissingKeys(key_path, options, defaults): """Helper functions to compute which keys a invalid. @param key_path: The current key path (if any) @param options: The user provided options @param defaults: The default dictionary @return: A list of invalid keys """ defaults_keys = frozenset(defaults.keys()) invalid = [] for key, value in options.items(): if key_path: new_path = "%s/%s" % (key_path, key) else: new_path = key if key not in defaults_keys: invalid.append(new_path) elif isinstance(value, dict): invalid.extend(_ComputeMissingKeys(new_path, value, defaults[key])) return invalid def VerifyDictOptions(options, defaults): """Verify a dict has only keys set which also are in the defaults dict. @param options: The user provided options @param defaults: The default dictionary @raise error.OpPrereqError: If one of the keys is not supported """ invalid = _ComputeMissingKeys("", options, defaults) if invalid: raise errors.OpPrereqError("Provided option keys not supported: %s" % CommaJoin(invalid), errors.ECODE_INVAL) def ListVolumeGroups(): """List volume groups and their size @rtype: dict @return: Dictionary with keys volume name and values the size of the volume """ command = "vgs --noheadings --units m --nosuffix -o name,size" result = RunCmd(command) retval = {} if result.failed: return retval for line in result.stdout.splitlines(): try: name, size = line.split() size = int(float(size)) except (IndexError, ValueError), err: logging.error("Invalid output from vgs (%s): %s", err, line) continue retval[name] = size return retval def BridgeExists(bridge): """Check whether the given bridge exists in the system @type bridge: str @param bridge: the bridge name to check @rtype: boolean @return: True if it does """ return os.path.isdir("/sys/class/net/%s/bridge" % bridge) def TryConvert(fn, val): """Try to convert a value ignoring errors. This function tries to apply function I{fn} to I{val}. If no C{ValueError} or C{TypeError} exceptions are raised, it will return the result, else it will return the original value. Any other exceptions are propagated to the caller. @type fn: callable @param fn: function to apply to the value @param val: the value to be converted @return: The converted value if the conversion was successful, otherwise the original value. """ try: nv = fn(val) except (ValueError, TypeError): nv = val return nv def ParseCpuMask(cpu_mask): """Parse a CPU mask definition and return the list of CPU IDs. CPU mask format: comma-separated list of CPU IDs or dash-separated ID ranges Example: "0-2,5" -> "0,1,2,5" @type cpu_mask: str @param cpu_mask: CPU mask definition @rtype: list of int @return: list of CPU IDs """ if not cpu_mask: return [] cpu_list = [] for range_def in cpu_mask.split(","): boundaries = range_def.split("-") n_elements = len(boundaries) if n_elements > 2: raise errors.ParseError("Invalid CPU ID range definition" " (only one hyphen allowed): %s" % range_def) try: lower = int(boundaries[0]) except (ValueError, TypeError), err: raise errors.ParseError("Invalid CPU ID value for lower boundary of" " CPU ID range: %s" % str(err)) try: higher = int(boundaries[-1]) except (ValueError, TypeError), err: raise errors.ParseError("Invalid CPU ID value for higher boundary of" " CPU ID range: %s" % str(err)) if lower > higher: raise errors.ParseError("Invalid CPU ID range definition" " (%d > %d): %s" % (lower, higher, range_def)) cpu_list.extend(range(lower, higher + 1)) return cpu_list def ParseMultiCpuMask(cpu_mask): """Parse a multiple CPU mask definition and return the list of CPU IDs. CPU mask format: colon-separated list of comma-separated list of CPU IDs or dash-separated ID ranges, with optional "all" as CPU value Example: "0-2,5:all:1,5,6:2" -> [ [ 0,1,2,5 ], [ -1 ], [ 1, 5, 6 ], [ 2 ] ] @type cpu_mask: str @param cpu_mask: multiple CPU mask definition @rtype: list of lists of int @return: list of lists of CPU IDs """ if not cpu_mask: return [] cpu_list = [] for range_def in cpu_mask.split(constants.CPU_PINNING_SEP): if range_def == constants.CPU_PINNING_ALL: cpu_list.append([constants.CPU_PINNING_ALL_VAL, ]) else: # Uniquify and sort the list before adding cpu_list.append(sorted(set(ParseCpuMask(range_def)))) return cpu_list def GetHomeDir(user, default=None): """Try to get the homedir of the given user. The user can be passed either as a string (denoting the name) or as an integer (denoting the user id). If the user is not found, the C{default} argument is returned, which defaults to C{None}. """ try: if isinstance(user, basestring): result = pwd.getpwnam(user) elif isinstance(user, (int, long)): result = pwd.getpwuid(user) else: raise errors.ProgrammerError("Invalid type passed to GetHomeDir (%s)" % type(user)) except KeyError: return default return result.pw_dir def FirstFree(seq, base=0): """Returns the first non-existing integer from seq. The seq argument should be a sorted list of positive integers. The first time the index of an element is smaller than the element value, the index will be returned. The base argument is used to start at a different offset, i.e. C{[3, 4, 6]} with I{offset=3} will return 5. Example: C{[0, 1, 3]} will return I{2}. @type seq: sequence @param seq: the sequence to be analyzed. @type base: int @param base: use this value as the base index of the sequence @rtype: int @return: the first non-used index in the sequence """ for idx, elem in enumerate(seq): assert elem >= base, "Passed element is higher than base offset" if elem > idx + base: # idx is not used return idx + base return None def SingleWaitForFdCondition(fdobj, event, timeout): """Waits for a condition to occur on the socket. Immediately returns at the first interruption. @type fdobj: integer or object supporting a fileno() method @param fdobj: entity to wait for events on @type event: integer @param event: ORed condition (see select module) @type timeout: float or None @param timeout: Timeout in seconds @rtype: int or None @return: None for timeout, otherwise occured conditions """ check = (event | select.POLLPRI | select.POLLNVAL | select.POLLHUP | select.POLLERR) if timeout is not None: # Poller object expects milliseconds timeout *= 1000 poller = select.poll() poller.register(fdobj, event) try: # TODO: If the main thread receives a signal and we have no timeout, we # could wait forever. This should check a global "quit" flag or something # every so often. io_events = poller.poll(timeout) except select.error, err: if err[0] != errno.EINTR: raise io_events = [] if io_events and io_events[0][1] & check: return io_events[0][1] else: return None class FdConditionWaiterHelper(object): """Retry helper for WaitForFdCondition. This class contains the retried and wait functions that make sure WaitForFdCondition can continue waiting until the timeout is actually expired. """ def __init__(self, timeout): self.timeout = timeout def Poll(self, fdobj, event): result = SingleWaitForFdCondition(fdobj, event, self.timeout) if result is None: raise RetryAgain() else: return result def UpdateTimeout(self, timeout): self.timeout = timeout def WaitForFdCondition(fdobj, event, timeout): """Waits for a condition to occur on the socket. Retries until the timeout is expired, even if interrupted. @type fdobj: integer or object supporting a fileno() method @param fdobj: entity to wait for events on @type event: integer @param event: ORed condition (see select module) @type timeout: float or None @param timeout: Timeout in seconds @rtype: int or None @return: None for timeout, otherwise occured conditions """ if timeout is not None: retrywaiter = FdConditionWaiterHelper(timeout) try: result = Retry(retrywaiter.Poll, RETRY_REMAINING_TIME, timeout, args=(fdobj, event), wait_fn=retrywaiter.UpdateTimeout) except RetryTimeout: result = None else: result = None while result is None: result = SingleWaitForFdCondition(fdobj, event, timeout) return result def EnsureDaemon(name): """Check for and start daemon if not alive. """ result = RunCmd([pathutils.DAEMON_UTIL, "check-and-start", name]) if result.failed: logging.error("Can't start daemon '%s', failure %s, output: %s", name, result.fail_reason, result.output) return False return True def StopDaemon(name): """Stop daemon """ result = RunCmd([pathutils.DAEMON_UTIL, "stop", name]) if result.failed: logging.error("Can't stop daemon '%s', failure %s, output: %s", name, result.fail_reason, result.output) return False return True def SplitTime(value): """Splits time as floating point number into a tuple. @param value: Time in seconds @type value: int or float @return: Tuple containing (seconds, microseconds) """ (seconds, microseconds) = divmod(int(value * 1000000), 1000000) assert 0 <= seconds, \ "Seconds must be larger than or equal to 0, but are %s" % seconds assert 0 <= microseconds <= 999999, \ "Microseconds must be 0-999999, but are %s" % microseconds return (int(seconds), int(microseconds)) def MergeTime(timetuple): """Merges a tuple into time as a floating point number. @param timetuple: Time as tuple, (seconds, microseconds) @type timetuple: tuple @return: Time as a floating point number expressed in seconds """ (seconds, microseconds) = timetuple assert 0 <= seconds, \ "Seconds must be larger than or equal to 0, but are %s" % seconds assert 0 <= microseconds <= 999999, \ "Microseconds must be 0-999999, but are %s" % microseconds return float(seconds) + (float(microseconds) * 0.000001) def FindMatch(data, name): """Tries to find an item in a dictionary matching a name. Callers have to ensure the data names aren't contradictory (e.g. a regexp that matches a string). If the name isn't a direct key, all regular expression objects in the dictionary are matched against it. @type data: dict @param data: Dictionary containing data @type name: string @param name: Name to look for @rtype: tuple; (value in dictionary, matched groups as list) """ if name in data: return (data[name], []) for key, value in data.items(): # Regex objects if hasattr(key, "match"): m = key.match(name) if m: return (value, list(m.groups())) return None def GetMounts(filename=constants.PROC_MOUNTS): """Returns the list of mounted filesystems. This function is Linux-specific. @param filename: path of mounts file (/proc/mounts by default) @rtype: list of tuples @return: list of mount entries (device, mountpoint, fstype, options) """ # TODO(iustin): investigate non-Linux options (e.g. via mount output) data = [] mountlines = ReadFile(filename).splitlines() for line in mountlines: device, mountpoint, fstype, options, _ = line.split(None, 4) data.append((device, mountpoint, fstype, options)) return data def SignalHandled(signums): """Signal Handled decoration. This special decorator installs a signal handler and then calls the target function. The function must accept a 'signal_handlers' keyword argument, which will contain a dict indexed by signal number, with SignalHandler objects as values. The decorator can be safely stacked with iself, to handle multiple signals with different handlers. @type signums: list @param signums: signals to intercept """ def wrap(fn): def sig_function(*args, **kwargs): assert "signal_handlers" not in kwargs or \ kwargs["signal_handlers"] is None or \ isinstance(kwargs["signal_handlers"], dict), \ "Wrong signal_handlers parameter in original function call" if "signal_handlers" in kwargs and kwargs["signal_handlers"] is not None: signal_handlers = kwargs["signal_handlers"] else: signal_handlers = {} kwargs["signal_handlers"] = signal_handlers sighandler = SignalHandler(signums) try: for sig in signums: signal_handlers[sig] = sighandler return fn(*args, **kwargs) finally: sighandler.Reset() return sig_function return wrap def TimeoutExpired(epoch, timeout, _time_fn=time.time): """Checks whether a timeout has expired. """ return _time_fn() > (epoch + timeout) class SignalWakeupFd(object): try: # This is only supported in Python 2.5 and above (some distributions # backported it to Python 2.4) _set_wakeup_fd_fn = signal.set_wakeup_fd except AttributeError: # Not supported def _SetWakeupFd(self, _): # pylint: disable=R0201 return -1 else: def _SetWakeupFd(self, fd): return self._set_wakeup_fd_fn(fd) def __init__(self): """Initializes this class. """ (read_fd, write_fd) = os.pipe() # Once these succeeded, the file descriptors will be closed automatically. # Buffer size 0 is important, otherwise .read() with a specified length # might buffer data and the file descriptors won't be marked readable. self._read_fh = os.fdopen(read_fd, "r", 0) self._write_fh = os.fdopen(write_fd, "w", 0) self._previous = self._SetWakeupFd(self._write_fh.fileno()) # Utility functions self.fileno = self._read_fh.fileno self.read = self._read_fh.read def Reset(self): """Restores the previous wakeup file descriptor. """ if hasattr(self, "_previous") and self._previous is not None: self._SetWakeupFd(self._previous) self._previous = None def Notify(self): """Notifies the wakeup file descriptor. """ self._write_fh.write("\0") def __del__(self): """Called before object deletion. """ self.Reset() class SignalHandler(object): """Generic signal handler class. It automatically restores the original handler when deconstructed or when L{Reset} is called. You can either pass your own handler function in or query the L{called} attribute to detect whether the signal was sent. @type signum: list @ivar signum: the signals we handle @type called: boolean @ivar called: tracks whether any of the signals have been raised """ def __init__(self, signum, handler_fn=None, wakeup=None): """Constructs a new SignalHandler instance. @type signum: int or list of ints @param signum: Single signal number or set of signal numbers @type handler_fn: callable @param handler_fn: Signal handling function """ assert handler_fn is None or callable(handler_fn) self.signum = set(signum) self.called = False self._handler_fn = handler_fn self._wakeup = wakeup self._previous = {} try: for signum in self.signum: # Setup handler prev_handler = signal.signal(signum, self._HandleSignal) try: self._previous[signum] = prev_handler except: # Restore previous handler signal.signal(signum, prev_handler) raise except: # Reset all handlers self.Reset() # Here we have a race condition: a handler may have already been called, # but there's not much we can do about it at this point. raise def __del__(self): self.Reset() def Reset(self): """Restore previous handler. This will reset all the signals to their previous handlers. """ for signum, prev_handler in self._previous.items(): signal.signal(signum, prev_handler) # If successful, remove from dict del self._previous[signum] def Clear(self): """Unsets the L{called} flag. This function can be used in case a signal may arrive several times. """ self.called = False def _HandleSignal(self, signum, frame): """Actual signal handling function. """ # This is not nice and not absolutely atomic, but it appears to be the only # solution in Python -- there are no atomic types. self.called = True if self._wakeup: # Notify whoever is interested in signals self._wakeup.Notify() if self._handler_fn: self._handler_fn(signum, frame) class FieldSet(object): """A simple field set. Among the features are: - checking if a string is among a list of static string or regex objects - checking if a whole list of string matches - returning the matching groups from a regex match Internally, all fields are held as regular expression objects. """ def __init__(self, *items): self.items = [re.compile("^%s$" % value) for value in items] def Extend(self, other_set): """Extend the field set with the items from another one""" self.items.extend(other_set.items) def Matches(self, field): """Checks if a field matches the current set @type field: str @param field: the string to match @return: either None or a regular expression match object """ for m in itertools.ifilter(None, (val.match(field) for val in self.items)): return m return None def NonMatching(self, items): """Returns the list of fields not matching the current set @type items: list @param items: the list of fields to check @rtype: list @return: list of non-matching fields """ return [val for val in items if not self.Matches(val)]
gpl-2.0
pengshp/GitPython
git/test/test_config.py
7
8370
# test_config.py # Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors # # This module is part of GitPython and is released under # the BSD License: http://www.opensource.org/licenses/bsd-license.php from git.test.lib import ( TestCase, fixture_path, assert_equal, ) from gitdb.test.lib import with_rw_directory from git import ( GitConfigParser ) from git.compat import ( string_types, ) import io import os from git.config import cp class TestBase(TestCase): def _to_memcache(self, file_path): fp = open(file_path, "rb") sio = io.BytesIO(fp.read()) sio.name = file_path return sio def test_read_write(self): # writer must create the exact same file as the one read before for filename in ("git_config", "git_config_global"): file_obj = self._to_memcache(fixture_path(filename)) w_config = GitConfigParser(file_obj, read_only=False) w_config.read() # enforce reading assert w_config._sections w_config.write() # enforce writing # we stripped lines when reading, so the results differ assert file_obj.getvalue() self.assertEqual(file_obj.getvalue(), self._to_memcache(fixture_path(filename)).getvalue()) # creating an additional config writer must fail due to exclusive access self.failUnlessRaises(IOError, GitConfigParser, file_obj, read_only=False) # should still have a lock and be able to make changes assert w_config._lock._has_lock() # changes should be written right away sname = "my_section" oname = "mykey" val = "myvalue" w_config.add_section(sname) assert w_config.has_section(sname) w_config.set(sname, oname, val) assert w_config.has_option(sname, oname) assert w_config.get(sname, oname) == val sname_new = "new_section" oname_new = "new_key" ival = 10 w_config.set_value(sname_new, oname_new, ival) assert w_config.get_value(sname_new, oname_new) == ival file_obj.seek(0) r_config = GitConfigParser(file_obj, read_only=True) assert r_config.has_section(sname) assert r_config.has_option(sname, oname) assert r_config.get(sname, oname) == val w_config.release() # END for each filename def test_multi_line_config(self): file_obj = self._to_memcache(fixture_path("git_config_with_comments")) config = GitConfigParser(file_obj, read_only=False) ev = "ruby -e '\n" ev += " system %(git), %(merge-file), %(--marker-size=%L), %(%A), %(%O), %(%B)\n" ev += " b = File.read(%(%A))\n" ev += " b.sub!(/^<+ .*\\nActiveRecord::Schema\\.define.:version => (\\d+). do\\n=+\\nActiveRecord::Schema\\." ev += "define.:version => (\\d+). do\\n>+ .*/) do\n" ev += " %(ActiveRecord::Schema.define(:version => #{[$1, $2].max}) do)\n" ev += " end\n" ev += " File.open(%(%A), %(w)) {|f| f.write(b)}\n" ev += " exit 1 if b.include?(%(<)*%L)'" assert_equal(config.get('merge "railsschema"', 'driver'), ev) assert_equal(config.get('alias', 'lg'), "log --graph --pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr)%Creset'" " --abbrev-commit --date=relative") assert len(config.sections()) == 23 def test_base(self): path_repo = fixture_path("git_config") path_global = fixture_path("git_config_global") r_config = GitConfigParser([path_repo, path_global], read_only=True) assert r_config.read_only num_sections = 0 num_options = 0 # test reader methods assert r_config._is_initialized is False for section in r_config.sections(): num_sections += 1 for option in r_config.options(section): num_options += 1 val = r_config.get(section, option) val_typed = r_config.get_value(section, option) assert isinstance(val_typed, (bool, int, float, ) + string_types) assert val assert "\n" not in option assert "\n" not in val # writing must fail self.failUnlessRaises(IOError, r_config.set, section, option, None) self.failUnlessRaises(IOError, r_config.remove_option, section, option) # END for each option self.failUnlessRaises(IOError, r_config.remove_section, section) # END for each section assert num_sections and num_options assert r_config._is_initialized is True # get value which doesnt exist, with default default = "my default value" assert r_config.get_value("doesnt", "exist", default) == default # it raises if there is no default though self.failUnlessRaises(cp.NoSectionError, r_config.get_value, "doesnt", "exist") @with_rw_directory def test_config_include(self, rw_dir): def write_test_value(cw, value): cw.set_value(value, 'value', value) # end def check_test_value(cr, value): assert cr.get_value(value, 'value') == value # end # PREPARE CONFIG FILE A fpa = os.path.join(rw_dir, 'a') cw = GitConfigParser(fpa, read_only=False) write_test_value(cw, 'a') fpb = os.path.join(rw_dir, 'b') fpc = os.path.join(rw_dir, 'c') cw.set_value('include', 'relative_path_b', 'b') cw.set_value('include', 'doesntexist', 'foobar') cw.set_value('include', 'relative_cycle_a_a', 'a') cw.set_value('include', 'absolute_cycle_a_a', fpa) cw.release() assert os.path.exists(fpa) # PREPARE CONFIG FILE B cw = GitConfigParser(fpb, read_only=False) write_test_value(cw, 'b') cw.set_value('include', 'relative_cycle_b_a', 'a') cw.set_value('include', 'absolute_cycle_b_a', fpa) cw.set_value('include', 'relative_path_c', 'c') cw.set_value('include', 'absolute_path_c', fpc) cw.release() # PREPARE CONFIG FILE C cw = GitConfigParser(fpc, read_only=False) write_test_value(cw, 'c') cw.release() cr = GitConfigParser(fpa, read_only=True) for tv in ('a', 'b', 'c'): check_test_value(cr, tv) # end for each test to verify assert len(cr.items('include')) == 8, "Expected all include sections to be merged" cr.release() # test writable config writers - assure write-back doesn't involve includes cw = GitConfigParser(fpa, read_only=False, merge_includes=True) tv = 'x' write_test_value(cw, tv) cw.release() cr = GitConfigParser(fpa, read_only=True) self.failUnlessRaises(cp.NoSectionError, check_test_value, cr, tv) cr.release() # But can make it skip includes alltogether, and thus allow write-backs cw = GitConfigParser(fpa, read_only=False, merge_includes=False) write_test_value(cw, tv) cw.release() cr = GitConfigParser(fpa, read_only=True) check_test_value(cr, tv) cr.release() def test_rename(self): file_obj = self._to_memcache(fixture_path('git_config')) cw = GitConfigParser(file_obj, read_only=False, merge_includes=False) self.failUnlessRaises(ValueError, cw.rename_section, "doesntexist", "foo") self.failUnlessRaises(ValueError, cw.rename_section, "core", "include") nn = "bee" assert cw.rename_section('core', nn) is cw assert not cw.has_section('core') assert len(cw.items(nn)) == 4 cw.release() def test_complex_aliases(self): file_obj = self._to_memcache(fixture_path('.gitconfig')) w_config = GitConfigParser(file_obj, read_only=False) self.assertEqual(w_config.get('alias', 'rbi'), '"!g() { git rebase -i origin/${1:-master} ; } ; g"') w_config.release() self.assertEqual(file_obj.getvalue(), self._to_memcache(fixture_path('.gitconfig')).getvalue())
bsd-3-clause
tensorflow/models
official/nlp/data/create_xlnet_pretraining_data_test.py
1
10930
# Copyright 2021 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for official.nlp.data.create_xlnet_pretraining_data.""" import os import tempfile from typing import List from absl import logging from absl.testing import parameterized import numpy as np import tensorflow as tf from official.nlp.data import create_xlnet_pretraining_data as cpd _VOCAB_WORDS = ["vocab_1", "vocab_2"] # pylint: disable=invalid-name def _create_files( temp_dir: str, file_contents: List[List[str]]) -> List[str]: """Writes arbitrary documents into files.""" root_dir = tempfile.mkdtemp(dir=temp_dir) files = [] for i, file_content in enumerate(file_contents): destination = os.path.join(root_dir, "%d.txt" % i) with open(destination, "wb") as f: for line in file_content: f.write(line.encode("utf-8")) files.append(destination) return files def _get_mock_tokenizer(): """Creates a mock tokenizer.""" class MockSpieceModel: """Mock Spiece model for testing.""" def __init__(self): self._special_piece_to_id = { "<unk>": 0, } for piece in set(list('!"#$%&\"()*+,-./:;?@[\\]^_`{|}~')): self._special_piece_to_id[piece] = 1 def EncodeAsPieces(self, inputs: str) -> List[str]: return inputs def SampleEncodeAsPieces(self, inputs: str, nbest_size: int, theta: float) -> List[str]: del nbest_size, theta return inputs def PieceToId(self, piece: str) -> int: return ord(piece[0]) def IdToPiece(self, id_: int) -> str: return chr(id_) * 3 class Tokenizer: """Mock Tokenizer for testing.""" def __init__(self): self.sp_model = MockSpieceModel() def convert_ids_to_tokens(self, ids: List[int]) -> List[str]: return [self.sp_model.IdToPiece(id_) for id_ in ids] return Tokenizer() class PreprocessDataTest(tf.test.TestCase): def test_remove_extraneous_space(self): line = " abc " output = cpd._preprocess_line(line) self.assertEqual(output, "abc") def test_symbol_replacements(self): self.assertEqual(cpd._preprocess_line("``abc``"), "\"abc\"") self.assertEqual(cpd._preprocess_line("''abc''"), "\"abc\"") def test_accent_replacements(self): self.assertEqual(cpd._preprocess_line("åbc"), "abc") def test_lower_case(self): self.assertEqual(cpd._preprocess_line("ABC", do_lower_case=True), "abc") def test_end_to_end(self): self.assertEqual( cpd._preprocess_line("HelLo ``wórLd``", do_lower_case=True), "hello \"world\"") class PreprocessAndTokenizeFilesTest(tf.test.TestCase): def test_basic_end_to_end(self): documents = [ [ "This is sentence 1.\n", "This is sentence 2.\n", "Sentence 3 is what this is.\n", ], [ "This is the second document.\n", "This is the second line of the second document.\n" ], ] input_files = _create_files(temp_dir=self.get_temp_dir(), file_contents=documents) all_data = cpd.preprocess_and_tokenize_input_files( input_files=input_files, tokenizer=_get_mock_tokenizer(), log_example_freq=1) self.assertEqual(len(all_data), len(documents)) for token_ids, sentence_ids in all_data: self.assertEqual(len(token_ids), len(sentence_ids)) def test_basic_correctness(self): documents = [["a\n", "b\n", "c\n"]] input_files = _create_files(temp_dir=self.get_temp_dir(), file_contents=documents) all_data = cpd.preprocess_and_tokenize_input_files( input_files=input_files, tokenizer=_get_mock_tokenizer(), log_example_freq=1) token_ids, sentence_ids = all_data[0] self.assertAllClose(token_ids, [97, 98, 99]) self.assertAllClose(sentence_ids, [True, False, True]) def test_correctness_with_spaces_and_accents(self): documents = [[ " å \n", "b \n", " c \n", ]] input_files = _create_files(temp_dir=self.get_temp_dir(), file_contents=documents) all_data = cpd.preprocess_and_tokenize_input_files( input_files=input_files, tokenizer=_get_mock_tokenizer(), log_example_freq=1) token_ids, sentence_ids = all_data[0] self.assertAllClose(token_ids, [97, 98, 99]) self.assertAllClose(sentence_ids, [True, False, True]) class BatchReshapeTests(tf.test.TestCase): def test_basic_functionality(self): per_host_batch_size = 3 mock_shape = (20,) # Should truncate and reshape. expected_result_shape = (3, 6) tokens = np.zeros(mock_shape) sentence_ids = np.zeros(mock_shape) reshaped_data = cpd._reshape_to_batch_dimensions( tokens=tokens, sentence_ids=sentence_ids, per_host_batch_size=per_host_batch_size) for values in reshaped_data: self.assertEqual(len(values.flatten()) % per_host_batch_size, 0) self.assertAllClose(values.shape, expected_result_shape) class CreateSegmentsTest(tf.test.TestCase): def test_basic_functionality(self): data_length = 10 tokens = np.arange(data_length) sentence_ids = np.concatenate([np.zeros(data_length // 2), np.ones(data_length // 2)]) begin_index = 0 total_length = 8 a_data, b_data, label = cpd._create_a_and_b_segments( tokens=tokens, sentence_ids=sentence_ids, begin_index=begin_index, total_length=total_length, no_cut_probability=0.) self.assertAllClose(a_data, [0, 1, 2, 3]) self.assertAllClose(b_data, [5, 6, 7, 8]) self.assertEqual(label, 1) def test_no_cut(self): data_length = 10 tokens = np.arange(data_length) sentence_ids = np.zeros(data_length) begin_index = 0 total_length = 8 a_data, b_data, label = cpd._create_a_and_b_segments( tokens=tokens, sentence_ids=sentence_ids, begin_index=begin_index, total_length=total_length, no_cut_probability=0.) self.assertGreater(len(a_data), 0) self.assertGreater(len(b_data), 0) self.assertEqual(label, 0) def test_no_cut_with_probability(self): data_length = 10 tokens = np.arange(data_length) sentence_ids = np.concatenate([np.zeros(data_length // 2), np.ones(data_length // 2)]) begin_index = 0 total_length = 8 a_data, b_data, label = cpd._create_a_and_b_segments( tokens=tokens, sentence_ids=sentence_ids, begin_index=begin_index, total_length=total_length, no_cut_probability=1.) self.assertGreater(len(a_data), 0) self.assertGreater(len(b_data), 0) self.assertEqual(label, 0) class CreateInstancesTest(tf.test.TestCase): """Tests conversions of Token/Sentence IDs to training instances.""" def test_basic(self): data_length = 12 tokens = np.arange(data_length) sentence_ids = np.zeros(data_length) seq_length = 8 instances = cpd._convert_tokens_to_instances( tokens=tokens, sentence_ids=sentence_ids, per_host_batch_size=2, seq_length=seq_length, reuse_length=4, tokenizer=_get_mock_tokenizer(), bi_data=False, num_cores_per_host=1, logging_frequency=1) for instance in instances: self.assertEqual(len(instance.data), seq_length) self.assertEqual(len(instance.segment_ids), seq_length) self.assertIsInstance(instance.label, int) self.assertIsInstance(instance.boundary_indices, list) class TFRecordPathTests(tf.test.TestCase): def test_basic(self): base_kwargs = dict( per_host_batch_size=1, num_cores_per_host=1, seq_length=2, reuse_length=1) config1 = dict( prefix="test", suffix="", bi_data=True, use_eod_token=False, do_lower_case=True) config1.update(base_kwargs) expectation1 = "test_seqlen-2_reuse-1_bs-1_cores-1_uncased_bi.tfrecord" self.assertEqual(cpd.get_tfrecord_name(**config1), expectation1) config2 = dict( prefix="", suffix="test", bi_data=False, use_eod_token=False, do_lower_case=False) config2.update(base_kwargs) expectation2 = "seqlen-2_reuse-1_bs-1_cores-1_cased_uni_test.tfrecord" self.assertEqual(cpd.get_tfrecord_name(**config2), expectation2) config3 = dict( prefix="", suffix="", use_eod_token=True, bi_data=False, do_lower_case=True) config3.update(base_kwargs) expectation3 = "seqlen-2_reuse-1_bs-1_cores-1_uncased_eod_uni.tfrecord" self.assertEqual(cpd.get_tfrecord_name(**config3), expectation3) class TestCreateTFRecords(parameterized.TestCase, tf.test.TestCase): @parameterized.named_parameters( ("bi_data_only", True, False, False), ("eod_token_only", False, True, True), ("lower_case_only", False, False, True), ("all_enabled", True, True, True), ) def test_end_to_end(self, bi_data: bool, use_eod_token: bool, do_lower_case: bool): tokenizer = _get_mock_tokenizer() num_documents = 5 sentences_per_document = 10 document_length = 50 documents = [ ["a " * document_length for _ in range(sentences_per_document)] for _ in range(num_documents)] save_dir = tempfile.mkdtemp(dir=self.get_temp_dir()) files = _create_files(temp_dir=self.get_temp_dir(), file_contents=documents) cpd.create_tfrecords( tokenizer=tokenizer, input_file_or_files=",".join(files), use_eod_token=use_eod_token, do_lower_case=do_lower_case, per_host_batch_size=8, seq_length=8, reuse_length=4, bi_data=bi_data, num_cores_per_host=2, save_dir=save_dir) self.assertTrue(any(filter(lambda x: x.endswith(".json"), os.listdir(save_dir)))) self.assertTrue(any(filter(lambda x: x.endswith(".tfrecord"), os.listdir(save_dir)))) if __name__ == "__main__": np.random.seed(0) logging.set_verbosity(logging.INFO) tf.test.main()
apache-2.0
getavalon/core
docs/source/conf.py
3
5010
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Avalon documentation build configuration file, created by # sphinx-quickstart on Tue Jun 20 20:12:06 2017. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # # import os # import sys # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.coverage', 'sphinx.ext.autosummary', # 'sphinx.ext.viewcode', 'sphinxcontrib.napoleon' ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = 'Avalon' copyright = '2017, Marcus Ottosson' author = 'Marcus Ottosson' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '2.0.0' # The full version, including alpha/beta/rc tags. release = '2.0.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. htmlhelp_basename = 'Avalondoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'Avalon.tex', 'Avalon Documentation', 'Marcus Ottosson', 'manual'), ] # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'avalon', 'Avalon Documentation', [author], 1) ] # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'Avalon', 'Avalon Documentation', author, 'Avalon', 'One line description of project.', 'Miscellaneous'), ] def setup(app): app.add_stylesheet('css/avalon.css') app.add_stylesheet('css/pygments.css') # add_module_names = False html_show_sourcelink = False
mit
phdowling/scikit-learn
sklearn/preprocessing/label.py
137
27165
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr> # Mathieu Blondel <mathieu@mblondel.org> # Olivier Grisel <olivier.grisel@ensta.org> # Andreas Mueller <amueller@ais.uni-bonn.de> # Joel Nothman <joel.nothman@gmail.com> # Hamzeh Alsalhi <ha258@cornell.edu> # License: BSD 3 clause from collections import defaultdict import itertools import array import numpy as np import scipy.sparse as sp from ..base import BaseEstimator, TransformerMixin from ..utils.fixes import np_version from ..utils.fixes import sparse_min_max from ..utils.fixes import astype from ..utils.fixes import in1d from ..utils import column_or_1d from ..utils.validation import check_array from ..utils.validation import check_is_fitted from ..utils.validation import _num_samples from ..utils.multiclass import unique_labels from ..utils.multiclass import type_of_target from ..externals import six zip = six.moves.zip map = six.moves.map __all__ = [ 'label_binarize', 'LabelBinarizer', 'LabelEncoder', 'MultiLabelBinarizer', ] def _check_numpy_unicode_bug(labels): """Check that user is not subject to an old numpy bug Fixed in master before 1.7.0: https://github.com/numpy/numpy/pull/243 """ if np_version[:3] < (1, 7, 0) and labels.dtype.kind == 'U': raise RuntimeError("NumPy < 1.7.0 does not implement searchsorted" " on unicode data correctly. Please upgrade" " NumPy to use LabelEncoder with unicode inputs.") class LabelEncoder(BaseEstimator, TransformerMixin): """Encode labels with value between 0 and n_classes-1. Read more in the :ref:`User Guide <preprocessing_targets>`. Attributes ---------- classes_ : array of shape (n_class,) Holds the label for each class. Examples -------- `LabelEncoder` can be used to normalize labels. >>> from sklearn import preprocessing >>> le = preprocessing.LabelEncoder() >>> le.fit([1, 2, 2, 6]) LabelEncoder() >>> le.classes_ array([1, 2, 6]) >>> le.transform([1, 1, 2, 6]) #doctest: +ELLIPSIS array([0, 0, 1, 2]...) >>> le.inverse_transform([0, 0, 1, 2]) array([1, 1, 2, 6]) It can also be used to transform non-numerical labels (as long as they are hashable and comparable) to numerical labels. >>> le = preprocessing.LabelEncoder() >>> le.fit(["paris", "paris", "tokyo", "amsterdam"]) LabelEncoder() >>> list(le.classes_) ['amsterdam', 'paris', 'tokyo'] >>> le.transform(["tokyo", "tokyo", "paris"]) #doctest: +ELLIPSIS array([2, 2, 1]...) >>> list(le.inverse_transform([2, 2, 1])) ['tokyo', 'tokyo', 'paris'] """ def fit(self, y): """Fit label encoder Parameters ---------- y : array-like of shape (n_samples,) Target values. Returns ------- self : returns an instance of self. """ y = column_or_1d(y, warn=True) _check_numpy_unicode_bug(y) self.classes_ = np.unique(y) return self def fit_transform(self, y): """Fit label encoder and return encoded labels Parameters ---------- y : array-like of shape [n_samples] Target values. Returns ------- y : array-like of shape [n_samples] """ y = column_or_1d(y, warn=True) _check_numpy_unicode_bug(y) self.classes_, y = np.unique(y, return_inverse=True) return y def transform(self, y): """Transform labels to normalized encoding. Parameters ---------- y : array-like of shape [n_samples] Target values. Returns ------- y : array-like of shape [n_samples] """ check_is_fitted(self, 'classes_') classes = np.unique(y) _check_numpy_unicode_bug(classes) if len(np.intersect1d(classes, self.classes_)) < len(classes): diff = np.setdiff1d(classes, self.classes_) raise ValueError("y contains new labels: %s" % str(diff)) return np.searchsorted(self.classes_, y) def inverse_transform(self, y): """Transform labels back to original encoding. Parameters ---------- y : numpy array of shape [n_samples] Target values. Returns ------- y : numpy array of shape [n_samples] """ check_is_fitted(self, 'classes_') diff = np.setdiff1d(y, np.arange(len(self.classes_))) if diff: raise ValueError("y contains new labels: %s" % str(diff)) y = np.asarray(y) return self.classes_[y] class LabelBinarizer(BaseEstimator, TransformerMixin): """Binarize labels in a one-vs-all fashion Several regression and binary classification algorithms are available in the scikit. A simple way to extend these algorithms to the multi-class classification case is to use the so-called one-vs-all scheme. At learning time, this simply consists in learning one regressor or binary classifier per class. In doing so, one needs to convert multi-class labels to binary labels (belong or does not belong to the class). LabelBinarizer makes this process easy with the transform method. At prediction time, one assigns the class for which the corresponding model gave the greatest confidence. LabelBinarizer makes this easy with the inverse_transform method. Read more in the :ref:`User Guide <preprocessing_targets>`. Parameters ---------- neg_label : int (default: 0) Value with which negative labels must be encoded. pos_label : int (default: 1) Value with which positive labels must be encoded. sparse_output : boolean (default: False) True if the returned array from transform is desired to be in sparse CSR format. Attributes ---------- classes_ : array of shape [n_class] Holds the label for each class. y_type_ : str, Represents the type of the target data as evaluated by utils.multiclass.type_of_target. Possible type are 'continuous', 'continuous-multioutput', 'binary', 'multiclass', 'mutliclass-multioutput', 'multilabel-indicator', and 'unknown'. multilabel_ : boolean True if the transformer was fitted on a multilabel rather than a multiclass set of labels. The ``multilabel_`` attribute is deprecated and will be removed in 0.18 sparse_input_ : boolean, True if the input data to transform is given as a sparse matrix, False otherwise. indicator_matrix_ : str 'sparse' when the input data to tansform is a multilable-indicator and is sparse, None otherwise. The ``indicator_matrix_`` attribute is deprecated as of version 0.16 and will be removed in 0.18 Examples -------- >>> from sklearn import preprocessing >>> lb = preprocessing.LabelBinarizer() >>> lb.fit([1, 2, 6, 4, 2]) LabelBinarizer(neg_label=0, pos_label=1, sparse_output=False) >>> lb.classes_ array([1, 2, 4, 6]) >>> lb.transform([1, 6]) array([[1, 0, 0, 0], [0, 0, 0, 1]]) Binary targets transform to a column vector >>> lb = preprocessing.LabelBinarizer() >>> lb.fit_transform(['yes', 'no', 'no', 'yes']) array([[1], [0], [0], [1]]) Passing a 2D matrix for multilabel classification >>> import numpy as np >>> lb.fit(np.array([[0, 1, 1], [1, 0, 0]])) LabelBinarizer(neg_label=0, pos_label=1, sparse_output=False) >>> lb.classes_ array([0, 1, 2]) >>> lb.transform([0, 1, 2, 1]) array([[1, 0, 0], [0, 1, 0], [0, 0, 1], [0, 1, 0]]) See also -------- label_binarize : function to perform the transform operation of LabelBinarizer with fixed classes. """ def __init__(self, neg_label=0, pos_label=1, sparse_output=False): if neg_label >= pos_label: raise ValueError("neg_label={0} must be strictly less than " "pos_label={1}.".format(neg_label, pos_label)) if sparse_output and (pos_label == 0 or neg_label != 0): raise ValueError("Sparse binarization is only supported with non " "zero pos_label and zero neg_label, got " "pos_label={0} and neg_label={1}" "".format(pos_label, neg_label)) self.neg_label = neg_label self.pos_label = pos_label self.sparse_output = sparse_output def fit(self, y): """Fit label binarizer Parameters ---------- y : numpy array of shape (n_samples,) or (n_samples, n_classes) Target values. The 2-d matrix should only contain 0 and 1, represents multilabel classification. Returns ------- self : returns an instance of self. """ self.y_type_ = type_of_target(y) if 'multioutput' in self.y_type_: raise ValueError("Multioutput target data is not supported with " "label binarization") if _num_samples(y) == 0: raise ValueError('y has 0 samples: %r' % y) self.sparse_input_ = sp.issparse(y) self.classes_ = unique_labels(y) return self def transform(self, y): """Transform multi-class labels to binary labels The output of transform is sometimes referred to by some authors as the 1-of-K coding scheme. Parameters ---------- y : numpy array or sparse matrix of shape (n_samples,) or (n_samples, n_classes) Target values. The 2-d matrix should only contain 0 and 1, represents multilabel classification. Sparse matrix can be CSR, CSC, COO, DOK, or LIL. Returns ------- Y : numpy array or CSR matrix of shape [n_samples, n_classes] Shape will be [n_samples, 1] for binary problems. """ check_is_fitted(self, 'classes_') y_is_multilabel = type_of_target(y).startswith('multilabel') if y_is_multilabel and not self.y_type_.startswith('multilabel'): raise ValueError("The object was not fitted with multilabel" " input.") return label_binarize(y, self.classes_, pos_label=self.pos_label, neg_label=self.neg_label, sparse_output=self.sparse_output) def inverse_transform(self, Y, threshold=None): """Transform binary labels back to multi-class labels Parameters ---------- Y : numpy array or sparse matrix with shape [n_samples, n_classes] Target values. All sparse matrices are converted to CSR before inverse transformation. threshold : float or None Threshold used in the binary and multi-label cases. Use 0 when: - Y contains the output of decision_function (classifier) Use 0.5 when: - Y contains the output of predict_proba If None, the threshold is assumed to be half way between neg_label and pos_label. Returns ------- y : numpy array or CSR matrix of shape [n_samples] Target values. Notes ----- In the case when the binary labels are fractional (probabilistic), inverse_transform chooses the class with the greatest value. Typically, this allows to use the output of a linear model's decision_function method directly as the input of inverse_transform. """ check_is_fitted(self, 'classes_') if threshold is None: threshold = (self.pos_label + self.neg_label) / 2. if self.y_type_ == "multiclass": y_inv = _inverse_binarize_multiclass(Y, self.classes_) else: y_inv = _inverse_binarize_thresholding(Y, self.y_type_, self.classes_, threshold) if self.sparse_input_: y_inv = sp.csr_matrix(y_inv) elif sp.issparse(y_inv): y_inv = y_inv.toarray() return y_inv def label_binarize(y, classes, neg_label=0, pos_label=1, sparse_output=False): """Binarize labels in a one-vs-all fashion Several regression and binary classification algorithms are available in the scikit. A simple way to extend these algorithms to the multi-class classification case is to use the so-called one-vs-all scheme. This function makes it possible to compute this transformation for a fixed set of class labels known ahead of time. Parameters ---------- y : array-like Sequence of integer labels or multilabel data to encode. classes : array-like of shape [n_classes] Uniquely holds the label for each class. neg_label : int (default: 0) Value with which negative labels must be encoded. pos_label : int (default: 1) Value with which positive labels must be encoded. sparse_output : boolean (default: False), Set to true if output binary array is desired in CSR sparse format Returns ------- Y : numpy array or CSR matrix of shape [n_samples, n_classes] Shape will be [n_samples, 1] for binary problems. Examples -------- >>> from sklearn.preprocessing import label_binarize >>> label_binarize([1, 6], classes=[1, 2, 4, 6]) array([[1, 0, 0, 0], [0, 0, 0, 1]]) The class ordering is preserved: >>> label_binarize([1, 6], classes=[1, 6, 4, 2]) array([[1, 0, 0, 0], [0, 1, 0, 0]]) Binary targets transform to a column vector >>> label_binarize(['yes', 'no', 'no', 'yes'], classes=['no', 'yes']) array([[1], [0], [0], [1]]) See also -------- LabelBinarizer : class used to wrap the functionality of label_binarize and allow for fitting to classes independently of the transform operation """ if not isinstance(y, list): # XXX Workaround that will be removed when list of list format is # dropped y = check_array(y, accept_sparse='csr', ensure_2d=False, dtype=None) else: if _num_samples(y) == 0: raise ValueError('y has 0 samples: %r' % y) if neg_label >= pos_label: raise ValueError("neg_label={0} must be strictly less than " "pos_label={1}.".format(neg_label, pos_label)) if (sparse_output and (pos_label == 0 or neg_label != 0)): raise ValueError("Sparse binarization is only supported with non " "zero pos_label and zero neg_label, got " "pos_label={0} and neg_label={1}" "".format(pos_label, neg_label)) # To account for pos_label == 0 in the dense case pos_switch = pos_label == 0 if pos_switch: pos_label = -neg_label y_type = type_of_target(y) if 'multioutput' in y_type: raise ValueError("Multioutput target data is not supported with label " "binarization") if y_type == 'unknown': raise ValueError("The type of target data is not known") n_samples = y.shape[0] if sp.issparse(y) else len(y) n_classes = len(classes) classes = np.asarray(classes) if y_type == "binary": if len(classes) == 1: Y = np.zeros((len(y), 1), dtype=np.int) Y += neg_label return Y elif len(classes) >= 3: y_type = "multiclass" sorted_class = np.sort(classes) if (y_type == "multilabel-indicator" and classes.size != y.shape[1]): raise ValueError("classes {0} missmatch with the labels {1}" "found in the data".format(classes, unique_labels(y))) if y_type in ("binary", "multiclass"): y = column_or_1d(y) # pick out the known labels from y y_in_classes = in1d(y, classes) y_seen = y[y_in_classes] indices = np.searchsorted(sorted_class, y_seen) indptr = np.hstack((0, np.cumsum(y_in_classes))) data = np.empty_like(indices) data.fill(pos_label) Y = sp.csr_matrix((data, indices, indptr), shape=(n_samples, n_classes)) elif y_type == "multilabel-indicator": Y = sp.csr_matrix(y) if pos_label != 1: data = np.empty_like(Y.data) data.fill(pos_label) Y.data = data else: raise ValueError("%s target data is not supported with label " "binarization" % y_type) if not sparse_output: Y = Y.toarray() Y = astype(Y, int, copy=False) if neg_label != 0: Y[Y == 0] = neg_label if pos_switch: Y[Y == pos_label] = 0 else: Y.data = astype(Y.data, int, copy=False) # preserve label ordering if np.any(classes != sorted_class): indices = np.searchsorted(sorted_class, classes) Y = Y[:, indices] if y_type == "binary": if sparse_output: Y = Y.getcol(-1) else: Y = Y[:, -1].reshape((-1, 1)) return Y def _inverse_binarize_multiclass(y, classes): """Inverse label binarization transformation for multiclass. Multiclass uses the maximal score instead of a threshold. """ classes = np.asarray(classes) if sp.issparse(y): # Find the argmax for each row in y where y is a CSR matrix y = y.tocsr() n_samples, n_outputs = y.shape outputs = np.arange(n_outputs) row_max = sparse_min_max(y, 1)[1] row_nnz = np.diff(y.indptr) y_data_repeated_max = np.repeat(row_max, row_nnz) # picks out all indices obtaining the maximum per row y_i_all_argmax = np.flatnonzero(y_data_repeated_max == y.data) # For corner case where last row has a max of 0 if row_max[-1] == 0: y_i_all_argmax = np.append(y_i_all_argmax, [len(y.data)]) # Gets the index of the first argmax in each row from y_i_all_argmax index_first_argmax = np.searchsorted(y_i_all_argmax, y.indptr[:-1]) # first argmax of each row y_ind_ext = np.append(y.indices, [0]) y_i_argmax = y_ind_ext[y_i_all_argmax[index_first_argmax]] # Handle rows of all 0 y_i_argmax[np.where(row_nnz == 0)[0]] = 0 # Handles rows with max of 0 that contain negative numbers samples = np.arange(n_samples)[(row_nnz > 0) & (row_max.ravel() == 0)] for i in samples: ind = y.indices[y.indptr[i]:y.indptr[i + 1]] y_i_argmax[i] = classes[np.setdiff1d(outputs, ind)][0] return classes[y_i_argmax] else: return classes.take(y.argmax(axis=1), mode="clip") def _inverse_binarize_thresholding(y, output_type, classes, threshold): """Inverse label binarization transformation using thresholding.""" if output_type == "binary" and y.ndim == 2 and y.shape[1] > 2: raise ValueError("output_type='binary', but y.shape = {0}". format(y.shape)) if output_type != "binary" and y.shape[1] != len(classes): raise ValueError("The number of class is not equal to the number of " "dimension of y.") classes = np.asarray(classes) # Perform thresholding if sp.issparse(y): if threshold > 0: if y.format not in ('csr', 'csc'): y = y.tocsr() y.data = np.array(y.data > threshold, dtype=np.int) y.eliminate_zeros() else: y = np.array(y.toarray() > threshold, dtype=np.int) else: y = np.array(y > threshold, dtype=np.int) # Inverse transform data if output_type == "binary": if sp.issparse(y): y = y.toarray() if y.ndim == 2 and y.shape[1] == 2: return classes[y[:, 1]] else: if len(classes) == 1: y = np.empty(len(y), dtype=classes.dtype) y.fill(classes[0]) return y else: return classes[y.ravel()] elif output_type == "multilabel-indicator": return y else: raise ValueError("{0} format is not supported".format(output_type)) class MultiLabelBinarizer(BaseEstimator, TransformerMixin): """Transform between iterable of iterables and a multilabel format Although a list of sets or tuples is a very intuitive format for multilabel data, it is unwieldy to process. This transformer converts between this intuitive format and the supported multilabel format: a (samples x classes) binary matrix indicating the presence of a class label. Parameters ---------- classes : array-like of shape [n_classes] (optional) Indicates an ordering for the class labels sparse_output : boolean (default: False), Set to true if output binary array is desired in CSR sparse format Attributes ---------- classes_ : array of labels A copy of the `classes` parameter where provided, or otherwise, the sorted set of classes found when fitting. Examples -------- >>> mlb = MultiLabelBinarizer() >>> mlb.fit_transform([(1, 2), (3,)]) array([[1, 1, 0], [0, 0, 1]]) >>> mlb.classes_ array([1, 2, 3]) >>> mlb.fit_transform([set(['sci-fi', 'thriller']), set(['comedy'])]) array([[0, 1, 1], [1, 0, 0]]) >>> list(mlb.classes_) ['comedy', 'sci-fi', 'thriller'] """ def __init__(self, classes=None, sparse_output=False): self.classes = classes self.sparse_output = sparse_output def fit(self, y): """Fit the label sets binarizer, storing `classes_` Parameters ---------- y : iterable of iterables A set of labels (any orderable and hashable object) for each sample. If the `classes` parameter is set, `y` will not be iterated. Returns ------- self : returns this MultiLabelBinarizer instance """ if self.classes is None: classes = sorted(set(itertools.chain.from_iterable(y))) else: classes = self.classes dtype = np.int if all(isinstance(c, int) for c in classes) else object self.classes_ = np.empty(len(classes), dtype=dtype) self.classes_[:] = classes return self def fit_transform(self, y): """Fit the label sets binarizer and transform the given label sets Parameters ---------- y : iterable of iterables A set of labels (any orderable and hashable object) for each sample. If the `classes` parameter is set, `y` will not be iterated. Returns ------- y_indicator : array or CSR matrix, shape (n_samples, n_classes) A matrix such that `y_indicator[i, j] = 1` iff `classes_[j]` is in `y[i]`, and 0 otherwise. """ if self.classes is not None: return self.fit(y).transform(y) # Automatically increment on new class class_mapping = defaultdict(int) class_mapping.default_factory = class_mapping.__len__ yt = self._transform(y, class_mapping) # sort classes and reorder columns tmp = sorted(class_mapping, key=class_mapping.get) # (make safe for tuples) dtype = np.int if all(isinstance(c, int) for c in tmp) else object class_mapping = np.empty(len(tmp), dtype=dtype) class_mapping[:] = tmp self.classes_, inverse = np.unique(class_mapping, return_inverse=True) yt.indices = np.take(inverse, yt.indices) if not self.sparse_output: yt = yt.toarray() return yt def transform(self, y): """Transform the given label sets Parameters ---------- y : iterable of iterables A set of labels (any orderable and hashable object) for each sample. If the `classes` parameter is set, `y` will not be iterated. Returns ------- y_indicator : array or CSR matrix, shape (n_samples, n_classes) A matrix such that `y_indicator[i, j] = 1` iff `classes_[j]` is in `y[i]`, and 0 otherwise. """ class_to_index = dict(zip(self.classes_, range(len(self.classes_)))) yt = self._transform(y, class_to_index) if not self.sparse_output: yt = yt.toarray() return yt def _transform(self, y, class_mapping): """Transforms the label sets with a given mapping Parameters ---------- y : iterable of iterables class_mapping : Mapping Maps from label to column index in label indicator matrix Returns ------- y_indicator : sparse CSR matrix, shape (n_samples, n_classes) Label indicator matrix """ indices = array.array('i') indptr = array.array('i', [0]) for labels in y: indices.extend(set(class_mapping[label] for label in labels)) indptr.append(len(indices)) data = np.ones(len(indices), dtype=int) return sp.csr_matrix((data, indices, indptr), shape=(len(indptr) - 1, len(class_mapping))) def inverse_transform(self, yt): """Transform the given indicator matrix into label sets Parameters ---------- yt : array or sparse matrix of shape (n_samples, n_classes) A matrix containing only 1s ands 0s. Returns ------- y : list of tuples The set of labels for each sample such that `y[i]` consists of `classes_[j]` for each `yt[i, j] == 1`. """ if yt.shape[1] != len(self.classes_): raise ValueError('Expected indicator for {0} classes, but got {1}' .format(len(self.classes_), yt.shape[1])) if sp.issparse(yt): yt = yt.tocsr() if len(yt.data) != 0 and len(np.setdiff1d(yt.data, [0, 1])) > 0: raise ValueError('Expected only 0s and 1s in label indicator.') return [tuple(self.classes_.take(yt.indices[start:end])) for start, end in zip(yt.indptr[:-1], yt.indptr[1:])] else: unexpected = np.setdiff1d(yt, [0, 1]) if len(unexpected) > 0: raise ValueError('Expected only 0s and 1s in label indicator. ' 'Also got {0}'.format(unexpected)) return [tuple(self.classes_.compress(indicators)) for indicators in yt]
bsd-3-clause
VisTrails/VisTrails
vistrails/db/versions/v0_8_1/translate/v0_8_0.py
2
4302
############################################################################### ## ## Copyright (C) 2014-2016, New York University. ## Copyright (C) 2011-2014, NYU-Poly. ## Copyright (C) 2006-2011, University of Utah. ## All rights reserved. ## Contact: contact@vistrails.org ## ## This file is part of VisTrails. ## ## "Redistribution and use in source and binary forms, with or without ## modification, are permitted provided that the following conditions are met: ## ## - Redistributions of source code must retain the above copyright notice, ## this list of conditions and the following disclaimer. ## - Redistributions in binary form must reproduce the above copyright ## notice, this list of conditions and the following disclaimer in the ## documentation and/or other materials provided with the distribution. ## - Neither the name of the New York University nor the names of its ## contributors may be used to endorse or promote products derived from ## this software without specific prior written permission. ## ## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" ## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, ## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR ## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR ## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, ## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, ## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; ## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, ## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR ## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." ## ############################################################################### from __future__ import division from vistrails.db import VistrailsDBException from vistrails.db.versions.v0_8_0.domain import DBAdd, DBAnnotation, DBChange, DBDelete # two step process # 1. remap all the old "notes" so that they exist in the id scope # 2. remap all the annotations that were numbered correctly # note that for 2, we don't need to worry about uniqueness -- they are unique # but step 1 may have taken some of their ids... def translateVistrail(vistrail): id_remap = {} for action in vistrail.db_get_actions(): # don't need to change key idx since none of that changes new_action_idx = {} for annotation in action.db_get_annotations(): annotation.db_id = vistrail.idScope.getNewId(DBAnnotation.vtType) new_action_idx[annotation.db_id] = annotation action.db_annotations_id_index = new_action_idx for operation in action.db_get_operations(): # never have annotations as parent objs so # don't have to worry about those ids if operation.db_what == DBAnnotation.vtType: if operation.vtType == 'add': new_id = vistrail.idScope.getNewId(DBAnnotation.vtType) old_id = operation.db_objectId operation.db_objectId = new_id operation.db_data.db_id = new_id id_remap[old_id] = new_id elif operation.vtType == 'change': changed_id = operation.db_oldObjId if id_remap.has_key(changed_id): operation.db_oldObjId = id_remap[changed_id] else: raise VistrailsDBException('cannot translate') new_id = vistrail.idScope.getNewId(DBAnnotation.vtType) old_id = operation.db_newObjId operation.db_newObjId = new_id operation.db_data.db_id = new_id id_remap[old_id] = new_id elif operation.vtType == 'delete': old_id = operation.db_objectId if id_remap.has_key(old_id): operation.db_objectId = id_remap[old_id] else: raise VistrailsDBException('cannot translate') vistrail.db_version = '0.8.1' return vistrail
bsd-3-clause
anselal/antminer-monitor
antminermonitor/app.py
1
3074
from flask import Flask from antminermonitor.blueprints.asicminer import antminer, antminer_json from antminermonitor.blueprints.user import user from antminermonitor.extensions import login_manager, migrate from antminermonitor.blueprints.asicminer.models.miner import Miner from antminermonitor.blueprints.asicminer.models.settings import Settings from antminermonitor.blueprints.user.models import User from antminermonitor.database import db_session, init_db import logging import os basedir = os.path.abspath(os.path.dirname(__file__)) def create_app(script_info=None, settings_override=None): """ Create a Flask application using the app factory pattern. :return: Flask app """ app = Flask(__name__, instance_relative_config=True) app.config.from_object('config.settings') app.config.from_pyfile('settings.py', silent=True) if settings_override: app.config.update(settings_override) app.register_blueprint(antminer) app.register_blueprint(antminer_json) app.register_blueprint(user, url_prefix='/user') authentication(app, User) extensions(app) @app.shell_context_processor def make_shell_context(): return dict(app=app, db=db, Miner=Miner, Settings=Settings, User=User) @app.teardown_appcontext def shutdown_session(exception=None): db_session.remove() return app def create_logger(app=None): """ """ app = app or create_app() gunicorn_error_logger = logging.getLogger('gunicorn.error') app.logger.handlers.extend(gunicorn_error_logger.handlers) app.logger.setLevel(app.config['LOG_LEVEL']) # logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) logger.setLevel(logging.WARNING) # create a file handler handler = logging.FileHandler(os.path.join( basedir, 'logs/antminer_monitor.log'), mode='a') # mode 'a' is default handler.setLevel(logging.WARNING) # create a logging format formatter = logging.Formatter( '%(asctime)s | %(name)s | %(levelname)s | %(message)s') handler.setFormatter(formatter) # add handlers to the logger logger.addHandler(handler) return logger def extensions(app): """ Register 0 or more extensions (mutates the app passed in). :param app: Flask application instance :return: None """ login_manager.init_app(app) migrate.init_app(app, db_session) return def authentication(app, user_model): """ Initialize the Flask-Login extension (mutates the app passed in). :param app: Flask application instance :param user_model: Model that contains the authentication information :type user_model: SQLAlchemy model :return: None """ login_manager.login_view = 'user.login' # login_manager.login_message = '' login_manager.refresh_view = 'user.login' login_manager.needs_refresh_message = 'You need to login again to access' ' this page!!!' @login_manager.user_loader def load_user(uid): return user_model.query.get(uid)
gpl-3.0
peterm-itr/edx-platform
common/djangoapps/geoinfo/tests/test_middleware.py
12
5360
""" Tests for CountryMiddleware. """ from mock import patch import pygeoip from django.contrib.sessions.middleware import SessionMiddleware from django.test import TestCase from django.test.utils import override_settings from django.test.client import RequestFactory from geoinfo.middleware import CountryMiddleware from xmodule.modulestore.tests.django_utils import TEST_DATA_MOCK_MODULESTORE from student.tests.factories import UserFactory, AnonymousUserFactory @override_settings(MODULESTORE=TEST_DATA_MOCK_MODULESTORE) class CountryMiddlewareTests(TestCase): """ Tests of CountryMiddleware. """ def setUp(self): self.country_middleware = CountryMiddleware() self.session_middleware = SessionMiddleware() self.authenticated_user = UserFactory.create() self.anonymous_user = AnonymousUserFactory.create() self.request_factory = RequestFactory() self.patcher = patch.object(pygeoip.GeoIP, 'country_code_by_addr', self.mock_country_code_by_addr) self.patcher.start() def tearDown(self): self.patcher.stop() def mock_country_code_by_addr(self, ip_addr): """ Gives us a fake set of IPs """ ip_dict = { '117.79.83.1': 'CN', '117.79.83.100': 'CN', '4.0.0.0': 'SD', '2001:da8:20f:1502:edcf:550b:4a9c:207d': 'CN', } return ip_dict.get(ip_addr, 'US') def test_country_code_added(self): request = self.request_factory.get( '/somewhere', HTTP_X_FORWARDED_FOR='117.79.83.1', ) request.user = self.authenticated_user self.session_middleware.process_request(request) # No country code exists before request. self.assertNotIn('country_code', request.session) self.assertNotIn('ip_address', request.session) self.country_middleware.process_request(request) # Country code added to session. self.assertEqual('CN', request.session.get('country_code')) self.assertEqual('117.79.83.1', request.session.get('ip_address')) def test_ip_address_changed(self): request = self.request_factory.get( '/somewhere', HTTP_X_FORWARDED_FOR='4.0.0.0', ) request.user = self.anonymous_user self.session_middleware.process_request(request) request.session['country_code'] = 'CN' request.session['ip_address'] = '117.79.83.1' self.country_middleware.process_request(request) # Country code is changed. self.assertEqual('SD', request.session.get('country_code')) self.assertEqual('4.0.0.0', request.session.get('ip_address')) def test_ip_address_is_not_changed(self): request = self.request_factory.get( '/somewhere', HTTP_X_FORWARDED_FOR='117.79.83.1', ) request.user = self.anonymous_user self.session_middleware.process_request(request) request.session['country_code'] = 'CN' request.session['ip_address'] = '117.79.83.1' self.country_middleware.process_request(request) # Country code is not changed. self.assertEqual('CN', request.session.get('country_code')) self.assertEqual('117.79.83.1', request.session.get('ip_address')) def test_same_country_different_ip(self): request = self.request_factory.get( '/somewhere', HTTP_X_FORWARDED_FOR='117.79.83.100', ) request.user = self.anonymous_user self.session_middleware.process_request(request) request.session['country_code'] = 'CN' request.session['ip_address'] = '117.79.83.1' self.country_middleware.process_request(request) # Country code is not changed. self.assertEqual('CN', request.session.get('country_code')) self.assertEqual('117.79.83.100', request.session.get('ip_address')) def test_ip_address_is_none(self): # IP address is not defined in request. request = self.request_factory.get('/somewhere') request.user = self.anonymous_user # Run process_request to set up the session in the request # to be able to override it. self.session_middleware.process_request(request) request.session['country_code'] = 'CN' request.session['ip_address'] = '117.79.83.1' self.country_middleware.process_request(request) # No country code exists after request processing. self.assertNotIn('country_code', request.session) self.assertNotIn('ip_address', request.session) def test_ip_address_is_ipv6(self): request = self.request_factory.get( '/somewhere', HTTP_X_FORWARDED_FOR='2001:da8:20f:1502:edcf:550b:4a9c:207d' ) request.user = self.authenticated_user self.session_middleware.process_request(request) # No country code exists before request. self.assertNotIn('country_code', request.session) self.assertNotIn('ip_address', request.session) self.country_middleware.process_request(request) # Country code added to session. self.assertEqual('CN', request.session.get('country_code')) self.assertEqual( '2001:da8:20f:1502:edcf:550b:4a9c:207d', request.session.get('ip_address'))
agpl-3.0
dschien/simple-MC
simplemc/__init__.py
1
3544
''' simple-MC: Main module Copyright 2014, Dan Schien Licensed under MIT. ''' import importlib import xlrd __author__ = 'schien' NAME = 'name' TYPE = 'type' PARAM_A = 'param_a' PARAM_B = 'param_b' PARAM_C = 'param_c' MODULE = 'module' LABEL = 'label' UNIT = 'unit' TABLE_STRUCT = { NAME: 0, MODULE: 1, TYPE: 2, PARAM_A: 3, PARAM_B: 4, PARAM_C: 5, UNIT: 6, LABEL: 7, 'comment': 8, 'description': 9 } class ModelLoader(object): def __init__(self, file, size=1): self.wb = load_workbook(file) self.size = size def get_row(self, name): i = [row[TABLE_STRUCT[NAME]] for row in self.wb].index(name) return self.wb[i] def get_val(self, name, args=None): """ Apply function to arguments from excel table args: optional additonal args If no args are given, applies default size from constructor """ row = self.get_row(name) f, p = build_distribution(row) if args is not None: ret = f(*p, **args) assert ret.shape == (self.size,) return ret else: ret = f(*p, size=self.size) assert ret.shape == (self.size,) return ret def get_label(self, name): try: row = self.get_row(name) except: return name return row[TABLE_STRUCT[LABEL]] def get_property(self, name, prop): try: row = self.get_row(name) except: return name return row[TABLE_STRUCT[prop]] def __getitem__(self, name): """ Get the distribution for a item name from the table Then execute and return the result array """ return self.get_val(name) def build_distribution(row): module = importlib.import_module(row[TABLE_STRUCT[MODULE]]) func = getattr(module, row[TABLE_STRUCT[TYPE]]) if row[TABLE_STRUCT[TYPE]] == 'choice': cell = row[TABLE_STRUCT[PARAM_A]] if type(cell) in [float, int]: params = ([cell],) else: tokens = cell.split(',') params = [float(token.strip()) for token in tokens] params = (params, ) elif row[TABLE_STRUCT[TYPE]] == 'Distribution': func = func() params = tuple(row[TABLE_STRUCT[i]] for i in [PARAM_A, PARAM_B, PARAM_C] if row[TABLE_STRUCT[i]]) else: params = tuple(row[TABLE_STRUCT[i]] for i in [PARAM_A, PARAM_B, PARAM_C] if row[TABLE_STRUCT[i]]) return func, params def load_workbook(file): wb = xlrd.open_workbook(file) sh = wb.sheet_by_index(0) var_column = sh.col_values(TABLE_STRUCT[NAME]) module_column = sh.col_values(TABLE_STRUCT[MODULE]) distribution_type_column = sh.col_values(TABLE_STRUCT[TYPE]) param_a_colum = sh.col_values(TABLE_STRUCT[PARAM_A]) param_b_colum = sh.col_values(TABLE_STRUCT[PARAM_B]) param_c_colum = sh.col_values(TABLE_STRUCT[PARAM_C]) unit_colum = sh.col_values(TABLE_STRUCT[UNIT]) label_colum = sh.col_values(TABLE_STRUCT[LABEL]) rows_es = zip(var_column, module_column, distribution_type_column, param_a_colum, param_b_colum, param_c_colum, unit_colum, label_colum) return rows_es def main(): ''' Main function of the boilerplate code is the entry point of the 'simplemc' executable script (defined in setup.py). Use doctests, those are very helpful. >>> main() Hello >>> 2 + 2 4 ''' print("Hello")
mit
jiangwei1221/django-virtualenv-demo
env/lib/python2.7/site-packages/django/core/paginator.py
141
5021
import collections from math import ceil from django.utils import six class InvalidPage(Exception): pass class PageNotAnInteger(InvalidPage): pass class EmptyPage(InvalidPage): pass class Paginator(object): def __init__(self, object_list, per_page, orphans=0, allow_empty_first_page=True): self.object_list = object_list self.per_page = int(per_page) self.orphans = int(orphans) self.allow_empty_first_page = allow_empty_first_page self._num_pages = self._count = None def validate_number(self, number): """ Validates the given 1-based page number. """ try: number = int(number) except (TypeError, ValueError): raise PageNotAnInteger('That page number is not an integer') if number < 1: raise EmptyPage('That page number is less than 1') if number > self.num_pages: if number == 1 and self.allow_empty_first_page: pass else: raise EmptyPage('That page contains no results') return number def page(self, number): """ Returns a Page object for the given 1-based page number. """ number = self.validate_number(number) bottom = (number - 1) * self.per_page top = bottom + self.per_page if top + self.orphans >= self.count: top = self.count return self._get_page(self.object_list[bottom:top], number, self) def _get_page(self, *args, **kwargs): """ Returns an instance of a single page. This hook can be used by subclasses to use an alternative to the standard :cls:`Page` object. """ return Page(*args, **kwargs) def _get_count(self): """ Returns the total number of objects, across all pages. """ if self._count is None: try: self._count = self.object_list.count() except (AttributeError, TypeError): # AttributeError if object_list has no count() method. # TypeError if object_list.count() requires arguments # (i.e. is of type list). self._count = len(self.object_list) return self._count count = property(_get_count) def _get_num_pages(self): """ Returns the total number of pages. """ if self._num_pages is None: if self.count == 0 and not self.allow_empty_first_page: self._num_pages = 0 else: hits = max(1, self.count - self.orphans) self._num_pages = int(ceil(hits / float(self.per_page))) return self._num_pages num_pages = property(_get_num_pages) def _get_page_range(self): """ Returns a 1-based range of pages for iterating through within a template for loop. """ return range(1, self.num_pages + 1) page_range = property(_get_page_range) QuerySetPaginator = Paginator # For backwards-compatibility. class Page(collections.Sequence): def __init__(self, object_list, number, paginator): self.object_list = object_list self.number = number self.paginator = paginator def __repr__(self): return '<Page %s of %s>' % (self.number, self.paginator.num_pages) def __len__(self): return len(self.object_list) def __getitem__(self, index): if not isinstance(index, (slice,) + six.integer_types): raise TypeError # The object_list is converted to a list so that if it was a QuerySet # it won't be a database hit per __getitem__. if not isinstance(self.object_list, list): self.object_list = list(self.object_list) return self.object_list[index] def has_next(self): return self.number < self.paginator.num_pages def has_previous(self): return self.number > 1 def has_other_pages(self): return self.has_previous() or self.has_next() def next_page_number(self): return self.paginator.validate_number(self.number + 1) def previous_page_number(self): return self.paginator.validate_number(self.number - 1) def start_index(self): """ Returns the 1-based index of the first object on this page, relative to total objects in the paginator. """ # Special case, return zero if no items. if self.paginator.count == 0: return 0 return (self.paginator.per_page * (self.number - 1)) + 1 def end_index(self): """ Returns the 1-based index of the last object on this page, relative to total objects found (hits). """ # Special case for the last page because there can be orphans. if self.number == self.paginator.num_pages: return self.paginator.count return self.number * self.paginator.per_page
unlicense
hogarthj/ansible
lib/ansible/modules/cloud/ovirt/ovirt_external_provider.py
66
10078
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (c) 2016 Red Hat, Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: ovirt_external_provider short_description: Module to manage external providers in oVirt/RHV version_added: "2.3" author: "Ondra Machacek (@machacekondra)" description: - "Module to manage external providers in oVirt/RHV" options: name: description: - "Name of the external provider to manage." state: description: - "Should the external be present or absent" choices: ['present', 'absent'] default: present description: description: - "Description of the external provider." type: description: - "Type of the external provider." choices: ['os_image', 'network', 'os_volume', 'foreman'] url: description: - "URL where external provider is hosted." - "Applicable for those types: I(os_image), I(os_volume), I(network) and I(foreman)." username: description: - "Username to be used for login to external provider." - "Applicable for all types." password: description: - "Password of the user specified in C(username) parameter." - "Applicable for all types." tenant_name: description: - "Name of the tenant." - "Applicable for those types: I(os_image), I(os_volume) and I(network)." aliases: ['tenant'] authentication_url: description: - "Keystone authentication URL of the openstack provider." - "Applicable for those types: I(os_image), I(os_volume) and I(network)." aliases: ['auth_url'] data_center: description: - "Name of the data center where provider should be attached." - "Applicable for those type: I(os_volume)." read_only: description: - "Specify if the network should be read only." - "Applicable if C(type) is I(network)." network_type: description: - "Type of the external network provider either external (for example OVN) or neutron." - "Applicable if C(type) is I(network)." choices: ['external', 'neutron'] default: ['external'] extends_documentation_fragment: ovirt ''' EXAMPLES = ''' # Examples don't contain auth parameter for simplicity, # look at ovirt_auth module to see how to reuse authentication: # Add image external provider: - ovirt_external_provider: name: image_provider type: os_image url: http://10.34.63.71:9292 username: admin password: 123456 tenant: admin auth_url: http://10.34.63.71:35357/v2.0/ # Add foreman provider: - ovirt_external_provider: name: foreman_provider type: foreman url: https://foreman.example.com username: admin password: 123456 # Add external network provider for OVN: - ovirt_external_provider: name: ovn_provider type: network network_type: external url: http://1.2.3.4:9696 # Remove image external provider: - ovirt_external_provider: state: absent name: image_provider type: os_image ''' RETURN = ''' id: description: ID of the external provider which is managed returned: On success if external provider is found. type: str sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c external_host_provider: description: "Dictionary of all the external_host_provider attributes. External provider attributes can be found on your oVirt/RHV instance at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/external_host_provider." returned: "On success and if parameter 'type: foreman' is used." type: dictionary openstack_image_provider: description: "Dictionary of all the openstack_image_provider attributes. External provider attributes can be found on your oVirt/RHV instance at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/openstack_image_provider." returned: "On success and if parameter 'type: os_image' is used." type: dictionary openstack_volume_provider: description: "Dictionary of all the openstack_volume_provider attributes. External provider attributes can be found on your oVirt/RHV instance at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/openstack_volume_provider." returned: "On success and if parameter 'type: os_volume' is used." type: dictionary openstack_network_provider: description: "Dictionary of all the openstack_network_provider attributes. External provider attributes can be found on your oVirt/RHV instance at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/openstack_network_provider." returned: "On success and if parameter 'type: network' is used." type: dictionary ''' import traceback try: import ovirtsdk4.types as otypes except ImportError: pass from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ovirt import ( BaseModule, check_params, check_sdk, create_connection, equal, ovirt_full_argument_spec, ) class ExternalProviderModule(BaseModule): def provider_type(self, provider_type): self._provider_type = provider_type def build_entity(self): provider_type = self._provider_type( requires_authentication=self._module.params.get('username') is not None, ) if self._module.params.pop('type') == 'network': setattr( provider_type, 'type', otypes.OpenStackNetworkProviderType(self._module.params.pop('network_type')) ) for key, value in self._module.params.items(): if hasattr(provider_type, key): setattr(provider_type, key, value) return provider_type def update_check(self, entity): return ( equal(self._module.params.get('description'), entity.description) and equal(self._module.params.get('url'), entity.url) and equal(self._module.params.get('authentication_url'), entity.authentication_url) and equal(self._module.params.get('tenant_name'), getattr(entity, 'tenant_name', None)) and equal(self._module.params.get('username'), entity.username) ) def _external_provider_service(provider_type, system_service): if provider_type == 'os_image': return otypes.OpenStackImageProvider, system_service.openstack_image_providers_service() elif provider_type == 'network': return otypes.OpenStackNetworkProvider, system_service.openstack_network_providers_service() elif provider_type == 'os_volume': return otypes.OpenStackVolumeProvider, system_service.openstack_volume_providers_service() elif provider_type == 'foreman': return otypes.ExternalHostProvider, system_service.external_host_providers_service() def main(): argument_spec = ovirt_full_argument_spec( state=dict( choices=['present', 'absent'], default='present', ), name=dict(default=None), description=dict(default=None), type=dict( default=None, required=True, choices=[ 'os_image', 'network', 'os_volume', 'foreman', ], aliases=['provider'], ), url=dict(default=None), username=dict(default=None), password=dict(default=None, no_log=True), tenant_name=dict(default=None, aliases=['tenant']), authentication_url=dict(default=None, aliases=['auth_url']), data_center=dict(default=None), read_only=dict(default=None, type='bool'), network_type=dict( default='external', choices=['external', 'neutron'], ), ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, ) if module._name == 'ovirt_external_providers': module.deprecate("The 'ovirt_external_providers' module is being renamed 'ovirt_external_provider'", version=2.8) check_sdk(module) check_params(module) try: auth = module.params.pop('auth') connection = create_connection(auth) provider_type, external_providers_service = _external_provider_service( provider_type=module.params.get('type'), system_service=connection.system_service(), ) external_providers_module = ExternalProviderModule( connection=connection, module=module, service=external_providers_service, ) external_providers_module.provider_type(provider_type) state = module.params.pop('state') if state == 'absent': ret = external_providers_module.remove() elif state == 'present': ret = external_providers_module.create() module.exit_json(**ret) except Exception as e: module.fail_json(msg=str(e), exception=traceback.format_exc()) finally: connection.close(logout=auth.get('token') is None) if __name__ == "__main__": main()
gpl-3.0
SwagColoredKitteh/servo
tests/wpt/web-platform-tests/tools/pytest/testing/test_pastebin.py
173
3845
# encoding: utf-8 import sys import pytest class TestPasteCapture: @pytest.fixture def pastebinlist(self, monkeypatch, request): pastebinlist = [] plugin = request.config.pluginmanager.getplugin('pastebin') monkeypatch.setattr(plugin, 'create_new_paste', pastebinlist.append) return pastebinlist def test_failed(self, testdir, pastebinlist): testpath = testdir.makepyfile(""" import pytest def test_pass(): pass def test_fail(): assert 0 def test_skip(): pytest.skip("") """) reprec = testdir.inline_run(testpath, "--paste=failed") assert len(pastebinlist) == 1 s = pastebinlist[0] assert s.find("def test_fail") != -1 assert reprec.countoutcomes() == [1,1,1] def test_all(self, testdir, pastebinlist): from _pytest.pytester import LineMatcher testpath = testdir.makepyfile(""" import pytest def test_pass(): pass def test_fail(): assert 0 def test_skip(): pytest.skip("") """) reprec = testdir.inline_run(testpath, "--pastebin=all", '-v') assert reprec.countoutcomes() == [1,1,1] assert len(pastebinlist) == 1 contents = pastebinlist[0].decode('utf-8') matcher = LineMatcher(contents.splitlines()) matcher.fnmatch_lines([ '*test_pass PASSED*', '*test_fail FAILED*', '*test_skip SKIPPED*', '*== 1 failed, 1 passed, 1 skipped in *' ]) def test_non_ascii_paste_text(self, testdir): """Make sure that text which contains non-ascii characters is pasted correctly. See #1219. """ testdir.makepyfile(test_unicode=""" # encoding: utf-8 def test(): assert '☺' == 1 """) result = testdir.runpytest('--pastebin=all') if sys.version_info[0] == 3: expected_msg = "*assert '☺' == 1*" else: expected_msg = "*assert '\\xe2\\x98\\xba' == 1*" result.stdout.fnmatch_lines([ expected_msg, "*== 1 failed in *", '*Sending information to Paste Service*', ]) class TestPaste: @pytest.fixture def pastebin(self, request): return request.config.pluginmanager.getplugin('pastebin') @pytest.fixture def mocked_urlopen(self, monkeypatch): """ monkeypatch the actual urlopen calls done by the internal plugin function that connects to bpaste service. """ calls = [] def mocked(url, data): calls.append((url, data)) class DummyFile: def read(self): # part of html of a normal response return b'View <a href="/raw/3c0c6750bd">raw</a>.' return DummyFile() if sys.version_info < (3, 0): import urllib monkeypatch.setattr(urllib, 'urlopen', mocked) else: import urllib.request monkeypatch.setattr(urllib.request, 'urlopen', mocked) return calls def test_create_new_paste(self, pastebin, mocked_urlopen): result = pastebin.create_new_paste(b'full-paste-contents') assert result == 'https://bpaste.net/show/3c0c6750bd' assert len(mocked_urlopen) == 1 url, data = mocked_urlopen[0] assert type(data) is bytes lexer = 'python3' if sys.version_info[0] == 3 else 'python' assert url == 'https://bpaste.net' assert 'lexer=%s' % lexer in data.decode() assert 'code=full-paste-contents' in data.decode() assert 'expiry=1week' in data.decode()
mpl-2.0
sordonia/hed-qs
baselines/VMM/vmm_builder.py
3
1080
""" __author__ Alessandro Sordoni """ import logging import cPickle import os import sys import argparse from Common.psteff import * logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) def build(session_file, epsilon): D = 5 pstree = PST(D) f = open(session_file, 'r') for num, session in enumerate(f): pstree.add_session(session.strip().split("\t")) if num % 100000 == 0: logger.info('{} sessions / {} nodes in the PST'.format(num, pstree.num_nodes)) f.close() if epsilon != 0.: logger.info("Pruning with epsilon = {}".format(epsilon)) pstree.prune(epsilon=epsilon) logger.info("End Pruning.") logger.info('-- Closing') logger.info('{} sessions / {} nodes in the PST'.format(num, pstree.num_nodes)) pstree.save(session_file + ('_e{}'.format(epsilon) if epsilon != 0 else '') + '_VMM.mdl') #print 'Testing loading' #pstree.load(args.output_prefix + '_pst_e{}_d{}.pkl'.format(epsilon, D)) #print '{} nodes'.format(pstree.num_nodes)
bsd-3-clause
ahamilton55/ansible
lib/ansible/modules/net_tools/omapi_host.py
30
12355
#!/usr/bin/python # -*- coding: utf-8 -*- """ Ansible module to configure DHCPd hosts using OMAPI protocol (c) 2016, Loic Blot <loic.blot@unix-experience.fr> Sponsored by Infopro Digital. http://www.infopro-digital.com/ Sponsored by E.T.A.I. http://www.etai.fr/ This file is part of Ansible Ansible is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Ansible is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Ansible. If not, see <http://www.gnu.org/licenses/>. """ ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: omapi_host short_description: Setup OMAPI hosts. description: - Create, update and remove OMAPI hosts into compatible DHCPd servers. version_added: "2.3" requirements: - pypureomapi author: "Loic Blot (@nerzhul)" options: state: description: - Create or remove OMAPI host. required: true choices: ['present', 'absent'] name: description: - Sets the host lease hostname (mandatory if state=present). default: None host: description: - Sets OMAPI server host to interact with. default: localhost port: description: - Sets the OMAPI server port to interact with. default: 7911 key_name: description: - Sets the TSIG key name for authenticating against OMAPI server. required: true key: description: - Sets the TSIG key content for authenticating against OMAPI server. required: true macaddr: description: - Sets the lease host MAC address. required: true ip: description: - Sets the lease host IP address. required: false default: None statements: description: - Attach a list of OMAPI DHCP statements with host lease (without ending semicolon). required: false default: [] ddns: description: - Enable dynamic DNS updates for this host. required: false default: false ''' EXAMPLES = ''' - name: Remove a host using OMAPI omapi_host: key_name: "defomapi" key: "+bFQtBCta6j2vWkjPkNFtgA==" host: "10.1.1.1" macaddr: "00:66:ab:dd:11:44" state: absent - name: Add a host using OMAPI omapi_host: key_name: "defomapi" key: "+bFQtBCta6j2vWkjPkNFtgA==" host: "10.98.4.55" macaddr: "44:dd:ab:dd:11:44" name: "server01" ip: "192.168.88.99" ddns: yes statements: - 'filename "pxelinux.0"' - 'next-server 1.1.1.1' state: present ''' RETURN = ''' changed: description: If module has modified a host returned: success type: string lease: description: dictionnary containing host informations returned: success type: complex contains: ip-address: description: IP address, if there is. returned: success type: string sample: '192.168.1.5' hardware-address: description: MAC address returned: success type: string sample: '00:11:22:33:44:55' hardware-type: description: hardware type, generally '1' returned: success type: int sample: 1 name: description: hostname returned: success type: string sample: 'mydesktop' ''' # import module snippets from ansible.module_utils.basic import AnsibleModule, get_exception, to_bytes from ansible.module_utils.six import iteritems import socket import struct import binascii try: from pypureomapi import Omapi, OmapiMessage, OmapiError, OmapiErrorNotFound from pypureomapi import pack_ip, unpack_ip, pack_mac, unpack_mac from pypureomapi import OMAPI_OP_STATUS, OMAPI_OP_UPDATE pureomapi_found = True except ImportError: pureomapi_found = False class OmapiHostManager: def __init__(self, module): self.module = module self.omapi = None self.connect() def connect(self): try: self.omapi = Omapi(self.module.params['host'], self.module.params['port'], self.module.params['key_name'], self.module.params['key']) except binascii.Error: self.module.fail_json(msg="Unable to open OMAPI connection. 'key' is not a valid base64 key.") except OmapiError: e = get_exception() self.module.fail_json(msg="Unable to open OMAPI connection. Ensure 'host', 'port', 'key' and 'key_name' " "are valid. Exception was: %s" % e) except socket.error: e = get_exception() self.module.fail_json(msg="Unable to connect to OMAPI server: %s" % e) def get_host(self, macaddr): msg = OmapiMessage.open(to_bytes("host", errors='surrogate_or_strict')) msg.obj.append((to_bytes("hardware-address", errors='surrogate_or_strict'), pack_mac(macaddr))) msg.obj.append((to_bytes("hardware-type", errors='surrogate_or_strict'), struct.pack("!I", 1))) response = self.omapi.query_server(msg) if response.opcode != OMAPI_OP_UPDATE: return None return response @staticmethod def unpack_facts(obj): result = dict(obj) if 'hardware-address' in result: result['hardware-address'] = unpack_mac(result['hardware-address']) if 'ip-address' in result: result['ip-address'] = unpack_ip(result['ip-address']) if 'hardware-type' in result: result['hardware-type'] = struct.unpack("!I", result['hardware-type']) return result def setup_host(self): if self.module.params['hostname'] is None or len(self.module.params['hostname']) == 0: self.module.fail_json(msg="name attribute could not be empty when adding or modifying host.") msg = None host_response = self.get_host(self.module.params['macaddr']) # If host was not found using macaddr, add create message if host_response is None: msg = OmapiMessage.open(to_bytes('host', errors='surrogate_or_strict')) msg.message.append(('create', struct.pack('!I', 1))) msg.message.append(('exclusive', struct.pack('!I', 1))) msg.obj.append(('hardware-address', pack_mac(self.module.params['macaddr']))) msg.obj.append(('hardware-type', struct.pack('!I', 1))) msg.obj.append(('name', self.module.params['hostname'])) if self.module.params['ip'] is not None: msg.obj.append((to_bytes("ip-address", errors='surrogate_or_strict'), pack_ip(self.module.params['ip']))) stmt_join = "" if self.module.params['ddns']: stmt_join += 'ddns-hostname "{0}"; '.format(self.module.params['hostname']) try: if len(self.module.params['statements']) > 0: stmt_join += "; ".join(self.module.params['statements']) stmt_join += "; " except TypeError: e = get_exception() self.module.fail_json(msg="Invalid statements found: %s" % e) if len(stmt_join) > 0: msg.obj.append(('statements', stmt_join)) try: response = self.omapi.query_server(msg) if response.opcode != OMAPI_OP_UPDATE: self.module.fail_json(msg="Failed to add host, ensure authentication and host parameters " "are valid.") self.module.exit_json(changed=True, lease=self.unpack_facts(response.obj)) except OmapiError: e = get_exception() self.module.fail_json(msg="OMAPI error: %s" % e) # Forge update message else: response_obj = self.unpack_facts(host_response.obj) fields_to_update = {} if to_bytes('ip-address', errors='surrogate_or_strict') not in response_obj or \ unpack_ip(response_obj[to_bytes('ip-address', errors='surrogate_or_strict')]) != self.module.params['ip']: fields_to_update['ip-address'] = pack_ip(self.module.params['ip']) # Name cannot be changed if 'name' not in response_obj or response_obj['name'] != self.module.params['hostname']: self.module.fail_json(msg="Changing hostname is not supported. Old was %s, new is %s. " "Please delete host and add new." % (response_obj['name'], self.module.params['hostname'])) """ # It seems statements are not returned by OMAPI, then we cannot modify them at this moment. if 'statements' not in response_obj and len(self.module.params['statements']) > 0 or \ response_obj['statements'] != self.module.params['statements']: with open('/tmp/omapi', 'w') as fb: for (k,v) in iteritems(response_obj): fb.writelines('statements: %s %s\n' % (k, v)) """ if len(fields_to_update) == 0: self.module.exit_json(changed=False, lease=response_obj) else: msg = OmapiMessage.update(host_response.handle) msg.update_object(fields_to_update) try: response = self.omapi.query_server(msg) if response.opcode != OMAPI_OP_STATUS: self.module.fail_json(msg="Failed to modify host, ensure authentication and host parameters " "are valid.") self.module.exit_json(changed=True) except OmapiError: e = get_exception() self.module.fail_json(msg="OMAPI error: %s" % e) def remove_host(self): try: self.omapi.del_host(self.module.params['macaddr']) self.module.exit_json(changed=True) except OmapiErrorNotFound: self.module.exit_json() except OmapiError: e = get_exception() self.module.fail_json(msg="OMAPI error: %s" % e) def main(): module = AnsibleModule( argument_spec=dict( state=dict(required=True, type='str', choices=['present', 'absent']), host=dict(type='str', default="localhost"), port=dict(type='int', default=7911), key_name=dict(required=True, type='str', default=None), key=dict(required=True, type='str', default=None, no_log=True), macaddr=dict(required=True, type='str', default=None), hostname=dict(type='str', default=None, aliases=['name']), ip=dict(type='str', default=None), ddns=dict(type='bool', default=False), statements=dict(type='list', default=[]) ), supports_check_mode=False ) if not pureomapi_found: module.fail_json(msg="pypureomapi library is required by this module.") if module.params['key'] is None or len(module.params["key"]) == 0: module.fail_json(msg="'key' parameter cannot be empty.") if module.params['key_name'] is None or len(module.params["key_name"]) == 0: module.fail_json(msg="'key_name' parameter cannot be empty.") host_manager = OmapiHostManager(module) try: if module.params['state'] == 'present': host_manager.setup_host() elif module.params['state'] == 'absent': host_manager.remove_host() except ValueError: e = get_exception() module.fail_json(msg="OMAPI input value error: %s" % e) if __name__ == '__main__': main()
gpl-3.0
wakatime/wakatime
tests/test_dependencies.py
1
16387
# -*- coding: utf-8 -*- from wakatime.main import execute from wakatime.packages import requests import logging import os import time import shutil from testfixtures import log_capture from wakatime.compat import is_py26, u from wakatime.constants import SUCCESS from wakatime.exceptions import NotYetImplemented from wakatime.dependencies import DependencyParser, TokenParser from wakatime.stats import get_lexer_by_name from .utils import mock, ANY, CustomResponse, TemporaryDirectory, TestCase if is_py26: from wakatime.packages.py26.pygments.lexers import ClassNotFound, PythonLexer else: from wakatime.packages.py27.pygments.lexers import ClassNotFound, PythonLexer class DependenciesTestCase(TestCase): patch_these = [ 'wakatime.packages.requests.adapters.HTTPAdapter.send', 'wakatime.offlinequeue.Queue.push', ['wakatime.offlinequeue.Queue.pop', None], ['wakatime.offlinequeue.Queue.connect', None], 'wakatime.session_cache.SessionCache.save', 'wakatime.session_cache.SessionCache.delete', ['wakatime.session_cache.SessionCache.get', requests.session], ['wakatime.session_cache.SessionCache.connect', None], ] def shared(self, expected_dependencies=[], expected_language=ANY, expected_lines=ANY, entity='', config='good_config.cfg', extra_args=[]): self.patched['wakatime.packages.requests.adapters.HTTPAdapter.send'].return_value = CustomResponse() config = os.path.join('tests/samples/configs', config) with TemporaryDirectory() as tempdir: shutil.copy(os.path.join('tests/samples/codefiles', entity), os.path.join(tempdir, os.path.basename(entity))) entity = os.path.realpath(os.path.join(tempdir, os.path.basename(entity))) now = u(int(time.time())) args = ['--file', entity, '--config', config, '--time', now] + extra_args retval = execute(args) self.assertEquals(retval, SUCCESS) self.assertNothingPrinted() heartbeat = { 'language': expected_language, 'lines': expected_lines, 'entity': os.path.realpath(entity), 'project': ANY, 'branch': ANY, 'dependencies': expected_dependencies, 'time': float(now), 'type': 'file', 'is_write': False, 'user_agent': ANY, } self.assertHeartbeatSent(heartbeat) self.assertHeartbeatNotSavedOffline() self.assertOfflineHeartbeatsSynced() self.assertSessionCacheSaved() def test_token_parser(self): with self.assertRaises(NotYetImplemented): source_file = 'tests/samples/codefiles/c_only/non_empty.h' parser = TokenParser(source_file) parser.parse() with mock.patch('wakatime.dependencies.TokenParser._extract_tokens') as mock_extract_tokens: source_file = 'tests/samples/codefiles/see.h' parser = TokenParser(source_file) parser.tokens mock_extract_tokens.assert_called_once_with() parser = TokenParser(None) parser.append('one.two.three', truncate=True, truncate_to=1) parser.append('one.two.three', truncate=True, truncate_to=2) parser.append('one.two.three', truncate=True, truncate_to=3) parser.append('one.two.three', truncate=True, truncate_to=4) expected = [ 'one', 'one.two', 'one.two.three', 'one.two.three', ] self.assertEquals(parser.dependencies, expected) @log_capture() def test_dependency_parser(self, logs): logging.disable(logging.NOTSET) lexer = PythonLexer lexer.__class__.__name__ = 'FooClass' parser = DependencyParser(None, lexer) dependencies = parser.parse() log_output = u("\n").join([u(' ').join(x) for x in logs.actual()]) self.assertEquals(log_output, '') self.assertNothingPrinted() expected = [] self.assertEquals(dependencies, expected) @log_capture() def test_missing_dependency_parser_in_debug_mode(self, logs): logging.disable(logging.NOTSET) # turn on debug mode log = logging.getLogger('WakaTime') log.setLevel(logging.DEBUG) lexer = PythonLexer lexer.__class__.__name__ = 'FooClass' parser = DependencyParser(None, lexer) # parse dependencies dependencies = parser.parse() log_output = u("\n").join([u(' ').join(x) for x in logs.actual()]) expected = 'WakaTime DEBUG Parsing dependencies not supported for python.FooClass' self.assertEquals(log_output, expected) self.assertNothingPrinted() expected = [] self.assertEquals(dependencies, expected) @log_capture() def test_missing_dependency_parser_importerror_in_debug_mode(self, logs): logging.disable(logging.NOTSET) # turn on debug mode log = logging.getLogger('WakaTime') log.setLevel(logging.DEBUG) with mock.patch('wakatime.dependencies.import_module') as mock_import: mock_import.side_effect = ImportError('foo') lexer = PythonLexer lexer.__class__.__name__ = 'FooClass' parser = DependencyParser(None, lexer) # parse dependencies dependencies = parser.parse() log_output = u("\n").join([u(' ').join(x) for x in logs.actual()]) expected = 'WakaTime DEBUG Parsing dependencies not supported for python.FooClass' self.assertEquals(log_output, expected) self.assertNothingPrinted() expected = [] self.assertEquals(dependencies, expected) def test_io_error_suppressed_when_parsing_dependencies(self): with mock.patch('wakatime.dependencies.open') as mock_open: mock_open.side_effect = IOError('') self.shared( expected_dependencies=[], expected_language='Python', expected_lines=38, entity='python.py', ) def test_classnotfound_error_raised_when_passing_none_to_pygments(self): with self.assertRaises(ClassNotFound): get_lexer_by_name(None) def test_classnotfound_error_suppressed_when_parsing_dependencies(self): with mock.patch('wakatime.stats.guess_lexer_using_filename') as mock_guess: mock_guess.return_value = (None, None) with mock.patch('wakatime.stats.get_filetype_from_buffer') as mock_filetype: mock_filetype.return_value = 'foo' self.shared( expected_dependencies=[], expected_language=None, expected_lines=38, entity='python.py', ) def test_dependencies_still_detected_when_alternate_language_used(self): with mock.patch('wakatime.stats.guess_lexer') as mock_guess_lexer: mock_guess_lexer.return_value = None self.shared( expected_dependencies=[ 'app', 'django', 'first', 'flask', 'jinja', 'mock', 'pygments', 'second', 'simplejson', 'sqlalchemy', 'unittest', ], expected_language='Python', expected_lines=38, entity='python.py', extra_args=['--alternate-language', 'PYTHON'], ) def test_long_dependencies_removed(self): self.shared( expected_dependencies=[ 'django', 'flask', 'notlongenoughnotlongenoughnotlongenoughnotlongenoughnotlongenoughnotlongenoughnotlongenoughnotlongenoughnotlongenoughnotlongenoughnotlongenoughnotlongenoughnotlongenoughnotlongenoughnotlongenoughnotlo', ], expected_language='Python', expected_lines=20, entity='python_with_long_import.py', ) def test_python_dependencies_detected(self): self.shared( expected_dependencies=[ 'app', 'django', 'first', 'flask', 'jinja', 'mock', 'pygments', 'second', 'simplejson', 'sqlalchemy', 'unittest', ], expected_language='Python', expected_lines=38, entity='python.py', ) def test_bower_dependencies_detected(self): self.shared( expected_dependencies=[ 'bootstrap', 'bootstrap-daterangepicker', 'moment', 'moment-timezone', 'bower', 'animate.css', ], expected_language='JSON', expected_lines=11, entity='bower.json', ) def test_grunt_dependencies_detected(self): self.shared( expected_dependencies=[ 'grunt', ], expected_language=None, expected_lines=23, entity='Gruntfile', ) def test_java_dependencies_detected(self): self.shared( expected_dependencies=[ 'colorfulwolf.webcamapplet', 'foobar', 'googlecode.javacv', 'apackage.something', 'anamespace.other', ], expected_language='Java', expected_lines=22, entity='java.java', ) def test_c_dependencies_detected(self): self.shared( expected_dependencies=[ 'openssl', ], expected_language='C', expected_lines=8, entity='c_only/non_empty.c', ) def test_cpp_dependencies_detected(self): self.shared( expected_dependencies=[ 'openssl', ], expected_language='C++', expected_lines=8, entity='c_and_cpp/non_empty.cpp', ) def test_csharp_dependencies_detected(self): self.shared( expected_dependencies=[ 'Proper', 'Fart', 'Math', 'WakaTime', ], expected_language='C#', expected_lines=18, entity='csharp/seesharp.cs', ) def test_php_dependencies_detected(self): self.shared( expected_dependencies=[ 'Interop', 'FooBarOne', 'FooBarTwo', 'FooBarThree', 'FooBarFour', 'FooBarSeven', 'FooBarEight', 'ArrayObject', "'ServiceLocator.php'", "'ServiceLocatorTwo.php'", ], expected_language='PHP', expected_lines=116, entity='php.php', ) def test_php_in_html_dependencies_detected(self): self.shared( expected_dependencies=[ '"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/js/bootstrap.min.js"', ], expected_language='HTML', expected_lines=22, entity='html-with-php.html', ) def test_html_django_dependencies_detected(self): self.shared( expected_dependencies=[ '"libs/json2.js"', ], expected_language='HTML', expected_lines=40, entity='html-django.html', ) def test_go_dependencies_detected(self): self.shared( expected_dependencies=[ '"compress/gzip"', '"direct"', '"foobar"', '"github.com/golang/example/stringutil"', '"image/gif"', '"log"', '"math"', '"oldname"', '"os"', '"supress"', ], expected_language='Go', expected_lines=24, entity='go.go', ) def test_es6_dependencies_detected(self): self.shared( expected_dependencies=[ 'bravo', 'foxtrot', 'india', 'kilo', 'november', 'oscar', 'quebec', 'tango', 'uniform', 'victor', 'whiskey', ], expected_language='JavaScript', expected_lines=37, entity='es6.js', ) def test_typescript_dependencies_detected(self): self.shared( expected_dependencies=[ 'bravo', 'foxtrot', 'india', 'kilo', 'november', 'oscar', 'quebec', 'tango', 'uniform', 'victor', 'whiskey', ], expected_language='TypeScript', expected_lines=37, entity='typescript.ts', ) def test_swift_dependencies_detected(self): self.shared( expected_dependencies=[ 'UIKit', 'PromiseKit', ], expected_language='Swift', expected_lines=16, entity='swift.swift', ) def test_objective_c_dependencies_detected(self): self.shared( expected_dependencies=[ 'SomeViewController', 'UIKit', 'PromiseKit', ], expected_language='Objective-C', expected_lines=18, entity='objective-c.m', ) def test_scala_dependencies_detected(self): self.shared( expected_dependencies=[ 'com.alpha.SomeClass', 'com.bravo.something', 'com.charlie', 'golf', 'com.hotel.india', 'juliett.kilo', ], expected_language='Scala', expected_lines=14, entity='scala.scala', ) def test_rust_dependencies_detected(self): self.shared( expected_dependencies=[ 'proc_macro', 'phrases', 'syn', 'quote', ], expected_language='Rust', expected_lines=21, entity='rust.rs', ) def test_kotlin_dependencies_detected(self): self.shared( expected_dependencies=[ 'alpha.time', 'bravo.charlie', 'delta.io', 'echo.Foxtrot', 'h', ], expected_language='Kotlin', expected_lines=24, entity='kotlin.kt', ) def test_haxe_dependencies_detected(self): self.shared( expected_dependencies=[ 'alpha', 'bravo', 'Math', 'charlie', 'delta', ], expected_language='Haxe', expected_lines=18, entity='haxe.hx', ) def test_haskell_dependencies_detected(self): self.shared( expected_dependencies=[ 'Control', 'Data', 'Network', 'System', ], expected_language='Haskell', expected_lines=20, entity='haskell.hs', ) def test_elm_dependencies_detected(self): self.shared( expected_dependencies=[ 'Color', 'Dict', 'TempFontAwesome', 'Html', 'Markdown', 'String', ], expected_language='Elm', expected_lines=21, entity='elm.elm', )
bsd-3-clause
code-sauce/tensorflow
tensorflow/contrib/framework/python/ops/__init__.py
86
1252
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """A module containing TensorFlow ops whose API may change in the future.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function # TODO(ptucker): Add these to tf.contrib.variables? # pylint: disable=wildcard-import from tensorflow.contrib.framework.python.ops.arg_scope import * from tensorflow.contrib.framework.python.ops.ops import * from tensorflow.contrib.framework.python.ops.prettyprint_ops import * from tensorflow.contrib.framework.python.ops.variables import * # pylint: enable=wildcard-import
apache-2.0
381426068/MissionPlanner
Lib/site-packages/scipy/special/tests/test_orthogonal_eval.py
61
4547
import numpy as np from numpy.testing import assert_ import scipy.special.orthogonal as orth from scipy.special._testutils import FuncData def test_eval_chebyt(): n = np.arange(0, 10000, 7) x = 2*np.random.rand() - 1 v1 = np.cos(n*np.arccos(x)) v2 = orth.eval_chebyt(n, x) assert_(np.allclose(v1, v2, rtol=1e-15)) def test_warnings(): # ticket 1334 olderr = np.seterr(all='raise') try: # these should raise no fp warnings orth.eval_legendre(1, 0) orth.eval_laguerre(1, 1) orth.eval_gegenbauer(1, 1, 0) finally: np.seterr(**olderr) class TestPolys(object): """ Check that the eval_* functions agree with the constructed polynomials """ def check_poly(self, func, cls, param_ranges=[], x_range=[], nn=10, nparam=10, nx=10, rtol=1e-8): np.random.seed(1234) dataset = [] for n in np.arange(nn): params = [a + (b-a)*np.random.rand(nparam) for a,b in param_ranges] params = np.asarray(params).T if not param_ranges: params = [0] for p in params: if param_ranges: p = (n,) + tuple(p) else: p = (n,) x = x_range[0] + (x_range[1] - x_range[0])*np.random.rand(nx) x[0] = x_range[0] # always include domain start point x[1] = x_range[1] # always include domain end point poly = np.poly1d(cls(*p)) z = np.c_[np.tile(p, (nx,1)), x, poly(x)] dataset.append(z) dataset = np.concatenate(dataset, axis=0) def polyfunc(*p): p = (p[0].astype(int),) + p[1:] return func(*p) olderr = np.seterr(all='raise') try: ds = FuncData(polyfunc, dataset, range(len(param_ranges)+2), -1, rtol=rtol) ds.check() finally: np.seterr(**olderr) def test_jacobi(self): self.check_poly(orth.eval_jacobi, orth.jacobi, param_ranges=[(-0.99, 10), (-0.99, 10)], x_range=[-1, 1], rtol=1e-5) def test_sh_jacobi(self): self.check_poly(orth.eval_sh_jacobi, orth.sh_jacobi, param_ranges=[(1, 10), (0, 1)], x_range=[0, 1], rtol=1e-5) def test_gegenbauer(self): self.check_poly(orth.eval_gegenbauer, orth.gegenbauer, param_ranges=[(-0.499, 10)], x_range=[-1, 1], rtol=1e-7) def test_chebyt(self): self.check_poly(orth.eval_chebyt, orth.chebyt, param_ranges=[], x_range=[-1, 1]) def test_chebyu(self): self.check_poly(orth.eval_chebyu, orth.chebyu, param_ranges=[], x_range=[-1, 1]) def test_chebys(self): self.check_poly(orth.eval_chebys, orth.chebys, param_ranges=[], x_range=[-2, 2]) def test_chebyc(self): self.check_poly(orth.eval_chebyc, orth.chebyc, param_ranges=[], x_range=[-2, 2]) def test_sh_chebyt(self): olderr = np.seterr(all='ignore') try: self.check_poly(orth.eval_sh_chebyt, orth.sh_chebyt, param_ranges=[], x_range=[0, 1]) finally: np.seterr(**olderr) def test_sh_chebyu(self): self.check_poly(orth.eval_sh_chebyu, orth.sh_chebyu, param_ranges=[], x_range=[0, 1]) def test_legendre(self): self.check_poly(orth.eval_legendre, orth.legendre, param_ranges=[], x_range=[-1, 1]) def test_sh_legendre(self): olderr = np.seterr(all='ignore') try: self.check_poly(orth.eval_sh_legendre, orth.sh_legendre, param_ranges=[], x_range=[0, 1]) finally: np.seterr(**olderr) def test_genlaguerre(self): self.check_poly(orth.eval_genlaguerre, orth.genlaguerre, param_ranges=[(-0.99, 10)], x_range=[0, 100]) def test_laguerre(self): self.check_poly(orth.eval_laguerre, orth.laguerre, param_ranges=[], x_range=[0, 100]) def test_hermite(self): self.check_poly(orth.eval_hermite, orth.hermite, param_ranges=[], x_range=[-100, 100]) def test_hermitenorm(self): self.check_poly(orth.eval_hermitenorm, orth.hermitenorm, param_ranges=[], x_range=[-100, 100])
gpl-3.0
cr1901/HDMI2USB-litex-firmware
targets/nexys_video/video.py
1
4745
from litevideo.input import HDMIIn from litevideo.output import VideoOut from litex.soc.cores.frequency_meter import FrequencyMeter from litescope import LiteScopeAnalyzer from targets.utils import csr_map_update, period_ns from targets.nexys_video.net import NetSoC as BaseSoC class VideoSoC(BaseSoC): csr_peripherals = ( "hdmi_out0", "hdmi_in0", "hdmi_in0_freq", "hdmi_in0_edid_mem", ) csr_map_update(BaseSoC.csr_map, csr_peripherals) interrupt_map = { "hdmi_in0": 4, } interrupt_map.update(BaseSoC.interrupt_map) def __init__(self, platform, *args, **kwargs): BaseSoC.__init__(self, platform, *args, **kwargs) mode = "ycbcr422" if mode == "ycbcr422": dw = 16 elif mode == "rgb": dw = 32 else: raise SystemError("Unknown pixel mode.") pix_freq = 148.50e6 # hdmi in 0 hdmi_in0_pads = platform.request("hdmi_in") self.submodules.hdmi_in0 = HDMIIn( hdmi_in0_pads, self.sdram.crossbar.get_port(mode="write"), fifo_depth=512, device="xc7") self.submodules.hdmi_in0_freq = FrequencyMeter(period=self.clk_freq) self.comb += [ self.hdmi_in0_freq.clk.eq(self.hdmi_in0.clocking.cd_pix.clk), hdmi_in0_pads.txen.eq(1) ] self.platform.add_period_constraint(self.hdmi_in0.clocking.cd_pix.clk, period_ns(1*pix_freq)) self.platform.add_period_constraint(self.hdmi_in0.clocking.cd_pix1p25x.clk, period_ns(1.25*pix_freq)) self.platform.add_period_constraint(self.hdmi_in0.clocking.cd_pix5x.clk, period_ns(5*pix_freq)) self.platform.add_false_path_constraints( self.crg.cd_sys.clk, self.hdmi_in0.clocking.cd_pix.clk, self.hdmi_in0.clocking.cd_pix1p25x.clk, self.hdmi_in0.clocking.cd_pix5x.clk) # hdmi out 0 hdmi_out0_pads = platform.request("hdmi_out") hdmi_out0_dram_port = self.sdram.crossbar.get_port( mode="read", dw=dw, cd="hdmi_out0_pix", reverse=True) self.submodules.hdmi_out0 = VideoOut( platform.device, hdmi_out0_pads, hdmi_out0_dram_port, mode=mode, fifo_depth=4096) self.platform.add_false_path_constraints( self.crg.cd_sys.clk, self.hdmi_out0.driver.clocking.cd_pix.clk) self.platform.add_period_constraint(self.hdmi_out0.driver.clocking.cd_pix.clk, period_ns(1*pix_freq)) self.platform.add_period_constraint(self.hdmi_out0.driver.clocking.cd_pix5x.clk, period_ns(5*pix_freq)) self.platform.add_false_path_constraints( self.crg.cd_sys.clk, self.hdmi_out0.driver.clocking.cd_pix.clk, self.hdmi_out0.driver.clocking.cd_pix5x.clk) for name, value in sorted(self.platform.hdmi_infos.items()): self.add_constant(name, value) class VideoSoCDebug(VideoSoC): csr_peripherals = ( "analyzer", ) csr_map_update(VideoSoC.csr_map, csr_peripherals) def __init__(self, platform, *args, **kwargs): VideoSoC.__init__(self, platform, *args, **kwargs) # # # # analyzer analyzer_signals = [ self.hdmi_in0.data0_charsync.raw_data, self.hdmi_in0.data1_charsync.raw_data, self.hdmi_in0.data2_charsync.raw_data, self.hdmi_in0.data0_charsync.synced, self.hdmi_in0.data1_charsync.synced, self.hdmi_in0.data2_charsync.synced, self.hdmi_in0.data0_charsync.data, self.hdmi_in0.data1_charsync.data, self.hdmi_in0.data2_charsync.data, self.hdmi_in0.syncpol.valid_o, self.hdmi_in0.syncpol.de, self.hdmi_in0.syncpol.hsync, self.hdmi_in0.syncpol.vsync, ] self.submodules.analyzer = LiteScopeAnalyzer(analyzer_signals, 1024, cd="hdmi_in0_pix", cd_ratio=2) # leds pix_counter = Signal(32) self.sync.hdmi_in0_pix += pix_counter.eq(pix_counter + 1) self.comb += platform.request("user_led", 0).eq(pix_counter[26]) pix1p25x_counter = Signal(32) self.sync.pix1p25x += pix1p25x_counter.eq(pix1p25x_counter + 1) self.comb += platform.request("user_led", 1).eq(pix1p25x_counter[26]) pix5x_counter = Signal(32) self.sync.hdmi_in0_pix5x += pix5x_counter.eq(pix5x_counter + 1) self.comb += platform.request("user_led", 2).eq(pix5x_counter[26]) def do_exit(self, vns): self.analyzer.export_csv(vns, "test/analyzer.csv") SoC = VideoSoC
bsd-2-clause
danfairs/django-lazysignup
custom_user_tests/migrations/0001_initial.py
2
2861
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.core.validators import django.utils.timezone import django.contrib.auth.models class Migration(migrations.Migration): dependencies = [ ('auth', '0001_initial'), ] operations = [ migrations.CreateModel( name='CustomUser', fields=[ ('id', models.AutoField(serialize=False, verbose_name='ID', auto_created=True, primary_key=True)), ('password', models.CharField(verbose_name='password', max_length=128)), ('last_login', models.DateTimeField(verbose_name='last login', null=True, blank=True)), ('is_superuser', models.BooleanField(verbose_name='superuser status', default=False, help_text='Designates that this user has all permissions without explicitly assigning them.')), ('my_custom_field', models.CharField(null=True, max_length=50, blank=True)), ('username', models.CharField(unique=True, verbose_name='username', help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', validators=[django.core.validators.RegexValidator('^[\\w.@+-]+$', 'Enter a valid username.', 'invalid')], max_length=30)), ('first_name', models.CharField(verbose_name='first name', max_length=30, blank=True)), ('last_name', models.CharField(verbose_name='last name', max_length=30, blank=True)), ('email', models.EmailField(verbose_name='email address', max_length=254, blank=True)), ('is_staff', models.BooleanField(verbose_name='staff status', default=False, help_text='Designates whether the user can log into this admin site.')), ('is_active', models.BooleanField(verbose_name='active', default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.')), ('date_joined', models.DateTimeField(verbose_name='date joined', default=django.utils.timezone.now)), ('groups', models.ManyToManyField(to='auth.Group', verbose_name='groups', blank=True, related_query_name='user', related_name='user_set', help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.')), ('user_permissions', models.ManyToManyField(to='auth.Permission', verbose_name='user permissions', blank=True, related_query_name='user', related_name='user_set', help_text='Specific permissions for this user.')), ], options={ 'verbose_name': 'user', 'verbose_name_plural': 'users', }, # managers=[ # ('objects', django.contrib.auth.models.UserManager()), # ], ), ]
bsd-3-clause
googleads/google-ads-python
google/ads/googleads/v6/errors/types/function_parsing_error.py
1
1514
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import proto # type: ignore __protobuf__ = proto.module( package="google.ads.googleads.v6.errors", marshal="google.ads.googleads.v6", manifest={"FunctionParsingErrorEnum",}, ) class FunctionParsingErrorEnum(proto.Message): r"""Container for enum describing possible function parsing errors. """ class FunctionParsingError(proto.Enum): r"""Enum describing possible function parsing errors.""" UNSPECIFIED = 0 UNKNOWN = 1 NO_MORE_INPUT = 2 EXPECTED_CHARACTER = 3 UNEXPECTED_SEPARATOR = 4 UNMATCHED_LEFT_BRACKET = 5 UNMATCHED_RIGHT_BRACKET = 6 TOO_MANY_NESTED_FUNCTIONS = 7 MISSING_RIGHT_HAND_OPERAND = 8 INVALID_OPERATOR_NAME = 9 FEED_ATTRIBUTE_OPERAND_ARGUMENT_NOT_INTEGER = 10 NO_OPERANDS = 11 TOO_MANY_OPERANDS = 12 __all__ = tuple(sorted(__protobuf__.manifest))
apache-2.0
bmander/dancecontraption
django/db/utils.py
78
6129
import inspect import os from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.utils.importlib import import_module DEFAULT_DB_ALIAS = 'default' # Define some exceptions that mirror the PEP249 interface. # We will rethrow any backend-specific errors using these # common wrappers class DatabaseError(Exception): pass class IntegrityError(DatabaseError): pass def load_backend(backend_name): try: module = import_module('.base', 'django.db.backends.%s' % backend_name) import warnings warnings.warn( "Short names for DATABASE_ENGINE are deprecated; prepend with 'django.db.backends.'", DeprecationWarning ) return module except ImportError, e: # Look for a fully qualified database backend name try: return import_module('.base', backend_name) except ImportError, e_user: # The database backend wasn't found. Display a helpful error message # listing all possible (built-in) database backends. backend_dir = os.path.join(os.path.dirname(__file__), 'backends') try: available_backends = [f for f in os.listdir(backend_dir) if os.path.isdir(os.path.join(backend_dir, f)) and not f.startswith('.')] except EnvironmentError: available_backends = [] available_backends.sort() if backend_name not in available_backends: error_msg = ("%r isn't an available database backend. \n" + "Try using django.db.backends.XXX, where XXX is one of:\n %s\n" + "Error was: %s") % \ (backend_name, ", ".join(map(repr, available_backends)), e_user) raise ImproperlyConfigured(error_msg) else: raise # If there's some other error, this must be an error in Django itself. class ConnectionDoesNotExist(Exception): pass class ConnectionHandler(object): def __init__(self, databases): self.databases = databases self._connections = {} def ensure_defaults(self, alias): """ Puts the defaults into the settings dictionary for a given connection where no settings is provided. """ try: conn = self.databases[alias] except KeyError: raise ConnectionDoesNotExist("The connection %s doesn't exist" % alias) conn.setdefault('ENGINE', 'django.db.backends.dummy') if conn['ENGINE'] == 'django.db.backends.' or not conn['ENGINE']: conn['ENGINE'] = 'django.db.backends.dummy' conn.setdefault('OPTIONS', {}) conn.setdefault('TEST_CHARSET', None) conn.setdefault('TEST_COLLATION', None) conn.setdefault('TEST_NAME', None) conn.setdefault('TEST_MIRROR', None) conn.setdefault('TIME_ZONE', settings.TIME_ZONE) for setting in ('NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'): conn.setdefault(setting, '') def __getitem__(self, alias): if alias in self._connections: return self._connections[alias] self.ensure_defaults(alias) db = self.databases[alias] backend = load_backend(db['ENGINE']) conn = backend.DatabaseWrapper(db, alias) self._connections[alias] = conn return conn def __iter__(self): return iter(self.databases) def all(self): return [self[alias] for alias in self] class ConnectionRouter(object): def __init__(self, routers): self.routers = [] for r in routers: if isinstance(r, basestring): try: module_name, klass_name = r.rsplit('.', 1) module = import_module(module_name) except ImportError, e: raise ImproperlyConfigured('Error importing database router %s: "%s"' % (klass_name, e)) try: router_class = getattr(module, klass_name) except AttributeError: raise ImproperlyConfigured('Module "%s" does not define a database router name "%s"' % (module, klass_name)) else: router = router_class() else: router = r self.routers.append(router) def _router_func(action): def _route_db(self, model, **hints): chosen_db = None for router in self.routers: try: method = getattr(router, action) except AttributeError: # If the router doesn't have a method, skip to the next one. pass else: chosen_db = method(model, **hints) if chosen_db: return chosen_db try: return hints['instance']._state.db or DEFAULT_DB_ALIAS except KeyError: return DEFAULT_DB_ALIAS return _route_db db_for_read = _router_func('db_for_read') db_for_write = _router_func('db_for_write') def allow_relation(self, obj1, obj2, **hints): for router in self.routers: try: method = router.allow_relation except AttributeError: # If the router doesn't have a method, skip to the next one. pass else: allow = method(obj1, obj2, **hints) if allow is not None: return allow return obj1._state.db == obj2._state.db def allow_syncdb(self, db, model): for router in self.routers: try: method = router.allow_syncdb except AttributeError: # If the router doesn't have a method, skip to the next one. pass else: allow = method(db, model) if allow is not None: return allow return True
bsd-3-clause
BakanovKirill/Medicine
src/medicine/settings.py
1
5625
# Django settings for medicine project. import os from django.conf import global_settings DEBUG = True TEMPLATE_DEBUG = DEBUG PROJECT_ROOT = os.path.join(os.path.abspath(os.path.dirname(__file__)), '../../') ADMINS = ( # ('Your Name', 'your_email@example.com'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'. 'NAME': 'medicine.db', # Or path to database file if using sqlite3. 'USER': '', # Not used with sqlite3. 'PASSWORD': '', # Not used with sqlite3. 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # On Unix systems, a value of None will cause Django to use the same # timezone as the operating system. # If running in a Windows environment this must be set to the same as your # system time zone. TIME_ZONE = 'America/Chicago' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale. USE_L10N = True # If you set this to False, Django will not use timezone-aware datetimes. USE_TZ = True # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/home/media/media.lawrence.com/media/" MEDIA_ROOT = '' # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash. # Examples: "http://media.lawrence.com/media/", "http://example.com/media/" MEDIA_URL = '' # Absolute path to the directory static files should be collected to. # Don't put anything in this directory yourself; store your static files # in apps' "static/" subdirectories and in STATICFILES_DIRS. # Example: "/home/media/media.lawrence.com/static/" STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static') # URL prefix for static files. # Example: "http://media.lawrence.com/static/" STATIC_URL = '/static/' # Additional locations of static files STATICFILES_DIRS = ( # Put strings here, like "/home/html/static" or "C:/www/django/static". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. ) # List of finder classes that know how to find static files in # various locations. STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', # 'django.contrib.staticfiles.finders.DefaultStorageFinder', ) # Make this unique, and don't share it with anybody. SECRET_KEY = 'ocyom8ze-5%rlr5^tysfor2!xy%q6-#(+f9wnmp#aq@k0*q)h^' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', # Uncomment the next line for simple clickjacking protection: # 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) ROOT_URLCONF = 'medicine.urls' # Python dotted path to the WSGI application used by Django's runserver. WSGI_APPLICATION = 'medicine.wsgi.application' import os TEMPLATE_DIRS = (os.path.join(os.path.dirname(__file__), '..', 'templates').replace('\\','/'),) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', 'django.contrib.admin', 'south', 'medicine', ) TEMPLATE_CONTEXT_PROCESSORS = global_settings.TEMPLATE_CONTEXT_PROCESSORS + ( 'django.core.context_processors.request', 'django.core.context_processors.csrf', 'medicine.context_processors.settings', ) LOGIN_URL = '/login/' # A sample logging configuration. The only tangible logging # performed by this configuration is to send an email to # the site admins on every HTTP 500 error when DEBUG=False. # See http://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } } MIDDLEWARE_CLASSES = ('medicine.middleware.QueryCountDebugMiddleware',) + MIDDLEWARE_CLASSES
mit
hale36/SRTV
sickbeard/notifiers/nma.py
18
2488
import sickbeard from sickbeard import logger, common from pynma import pynma class NMA_Notifier: def test_notify(self, nma_api, nma_priority): return self._sendNMA(nma_api, nma_priority, event="Test", message="Testing NMA settings from SickRage", force=True) def notify_snatch(self, ep_name): if sickbeard.NMA_NOTIFY_ONSNATCH: self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name) def notify_download(self, ep_name): if sickbeard.NMA_NOTIFY_ONDOWNLOAD: self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], message=ep_name) def notify_subtitle_download(self, ep_name, lang): if sickbeard.NMA_NOTIFY_ONSUBTITLEDOWNLOAD: self._sendNMA(nma_api=None, nma_priority=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + ": " + lang) def notify_git_update(self, new_version = "??"): if sickbeard.USE_NMA: update_text=common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT] title=common.notifyStrings[common.NOTIFY_GIT_UPDATE] self._sendNMA(nma_api=None, nma_priority=None, event=title, message=update_text + new_version) def _sendNMA(self, nma_api=None, nma_priority=None, event=None, message=None, force=False): title = 'SickRage' if not sickbeard.USE_NMA and not force: return False if nma_api == None: nma_api = sickbeard.NMA_API if nma_priority == None: nma_priority = sickbeard.NMA_PRIORITY batch = False p = pynma.PyNMA() keys = nma_api.split(',') p.addkey(keys) if len(keys) > 1: batch = True logger.log("NMA: Sending notice with details: event=\"%s\", message=\"%s\", priority=%s, batch=%s" % (event, message, nma_priority, batch), logger.DEBUG) response = p.push(application=title, event=event, description=message, priority=nma_priority, batch_mode=batch) if not response[nma_api][u'code'] == u'200': logger.log(u'Could not send notification to NotifyMyAndroid', logger.ERROR) return False else: logger.log(u"NMA: Notification sent to NotifyMyAndroid", logger.INFO) return True notifier = NMA_Notifier
gpl-3.0
tracierenea/gnuradio
gr-filter/python/filter/gui/pyqt_filter_stacked.py
58
111310
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'pyqt_filter_stacked.ui' # # Created: Wed Aug 8 11:42:47 2012 # by: PyQt4 UI code generator 4.9.1 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: _fromUtf8 = lambda s: s class Ui_MainWindow(object): def setupUi(self, MainWindow): MainWindow.setObjectName(_fromUtf8("MainWindow")) MainWindow.resize(1128, 649) self.centralwidget = QtGui.QWidget(MainWindow) self.centralwidget.setObjectName(_fromUtf8("centralwidget")) self.gridLayout = QtGui.QGridLayout(self.centralwidget) self.gridLayout.setObjectName(_fromUtf8("gridLayout")) self.stackedWindows = QtGui.QStackedWidget(self.centralwidget) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.stackedWindows.sizePolicy().hasHeightForWidth()) self.stackedWindows.setSizePolicy(sizePolicy) self.stackedWindows.setObjectName(_fromUtf8("stackedWindows")) self.classic = QtGui.QWidget() sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.classic.sizePolicy().hasHeightForWidth()) self.classic.setSizePolicy(sizePolicy) self.classic.setObjectName(_fromUtf8("classic")) self.horizontalLayout = QtGui.QHBoxLayout(self.classic) self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout")) self.splitter = QtGui.QSplitter(self.classic) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(1) sizePolicy.setHeightForWidth(self.splitter.sizePolicy().hasHeightForWidth()) self.splitter.setSizePolicy(sizePolicy) self.splitter.setMinimumSize(QtCore.QSize(600, 0)) self.splitter.setOrientation(QtCore.Qt.Vertical) self.splitter.setObjectName(_fromUtf8("splitter")) self.tabGroup = QtGui.QTabWidget(self.splitter) self.tabGroup.setEnabled(True) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(1) sizePolicy.setHeightForWidth(self.tabGroup.sizePolicy().hasHeightForWidth()) self.tabGroup.setSizePolicy(sizePolicy) self.tabGroup.setTabsClosable(False) self.tabGroup.setMovable(False) self.tabGroup.setObjectName(_fromUtf8("tabGroup")) self.freqTab = QtGui.QWidget() self.freqTab.setObjectName(_fromUtf8("freqTab")) self.horizontalLayout_9 = QtGui.QHBoxLayout(self.freqTab) self.horizontalLayout_9.setObjectName(_fromUtf8("horizontalLayout_9")) self.freqPlot = Qwt5.QwtPlot(self.freqTab) self.freqPlot.setObjectName(_fromUtf8("freqPlot")) self.horizontalLayout_9.addWidget(self.freqPlot) self.tabGroup.addTab(self.freqTab, _fromUtf8("")) self.timeTab = QtGui.QWidget() self.timeTab.setObjectName(_fromUtf8("timeTab")) self.horizontalLayout_10 = QtGui.QHBoxLayout(self.timeTab) self.horizontalLayout_10.setObjectName(_fromUtf8("horizontalLayout_10")) self.timePlot = Qwt5.QwtPlot(self.timeTab) self.timePlot.setObjectName(_fromUtf8("timePlot")) self.horizontalLayout_10.addWidget(self.timePlot) self.tabGroup.addTab(self.timeTab, _fromUtf8("")) self.phaseTab = QtGui.QWidget() self.phaseTab.setObjectName(_fromUtf8("phaseTab")) self.horizontalLayout_11 = QtGui.QHBoxLayout(self.phaseTab) self.horizontalLayout_11.setObjectName(_fromUtf8("horizontalLayout_11")) self.phasePlot = Qwt5.QwtPlot(self.phaseTab) self.phasePlot.setObjectName(_fromUtf8("phasePlot")) self.horizontalLayout_11.addWidget(self.phasePlot) self.tabGroup.addTab(self.phaseTab, _fromUtf8("")) self.groupTab = QtGui.QWidget() self.groupTab.setObjectName(_fromUtf8("groupTab")) self.horizontalLayout_12 = QtGui.QHBoxLayout(self.groupTab) self.horizontalLayout_12.setObjectName(_fromUtf8("horizontalLayout_12")) self.groupPlot = Qwt5.QwtPlot(self.groupTab) self.groupPlot.setObjectName(_fromUtf8("groupPlot")) self.horizontalLayout_12.addWidget(self.groupPlot) self.tabGroup.addTab(self.groupTab, _fromUtf8("")) self.fcTab = QtGui.QWidget() self.fcTab.setObjectName(_fromUtf8("fcTab")) self.horizontalLayout_7 = QtGui.QHBoxLayout(self.fcTab) self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7")) self.filterCoeff = QtGui.QTextBrowser(self.fcTab) self.filterCoeff.setObjectName(_fromUtf8("filterCoeff")) self.horizontalLayout_7.addWidget(self.filterCoeff) self.tabGroup.addTab(self.fcTab, _fromUtf8("")) self.impresTab = QtGui.QWidget() self.impresTab.setObjectName(_fromUtf8("impresTab")) self.horizontalLayout_6 = QtGui.QHBoxLayout(self.impresTab) self.horizontalLayout_6.setObjectName(_fromUtf8("horizontalLayout_6")) self.impresPlot = Qwt5.QwtPlot(self.impresTab) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.impresPlot.sizePolicy().hasHeightForWidth()) self.impresPlot.setSizePolicy(sizePolicy) self.impresPlot.setObjectName(_fromUtf8("impresPlot")) self.horizontalLayout_6.addWidget(self.impresPlot) self.tabGroup.addTab(self.impresTab, _fromUtf8("")) self.stepresTab = QtGui.QWidget() self.stepresTab.setObjectName(_fromUtf8("stepresTab")) self.horizontalLayout_8 = QtGui.QHBoxLayout(self.stepresTab) self.horizontalLayout_8.setObjectName(_fromUtf8("horizontalLayout_8")) self.stepresPlot = Qwt5.QwtPlot(self.stepresTab) self.stepresPlot.setObjectName(_fromUtf8("stepresPlot")) self.horizontalLayout_8.addWidget(self.stepresPlot) self.tabGroup.addTab(self.stepresTab, _fromUtf8("")) self.pdelayTab = QtGui.QWidget() self.pdelayTab.setObjectName(_fromUtf8("pdelayTab")) self.horizontalLayout_17 = QtGui.QHBoxLayout(self.pdelayTab) self.horizontalLayout_17.setObjectName(_fromUtf8("horizontalLayout_17")) self.pdelayPlot = Qwt5.QwtPlot(self.pdelayTab) self.pdelayPlot.setObjectName(_fromUtf8("pdelayPlot")) self.horizontalLayout_17.addWidget(self.pdelayPlot) self.tabGroup.addTab(self.pdelayTab, _fromUtf8("")) self.filterspecView = QtGui.QTabWidget(self.splitter) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.filterspecView.sizePolicy().hasHeightForWidth()) self.filterspecView.setSizePolicy(sizePolicy) self.filterspecView.setMinimumSize(QtCore.QSize(0, 100)) self.filterspecView.setBaseSize(QtCore.QSize(0, 100)) self.filterspecView.setDocumentMode(False) self.filterspecView.setTabsClosable(False) self.filterspecView.setObjectName(_fromUtf8("filterspecView")) self.bandDiagram = QtGui.QWidget() sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.bandDiagram.sizePolicy().hasHeightForWidth()) self.bandDiagram.setSizePolicy(sizePolicy) self.bandDiagram.setObjectName(_fromUtf8("bandDiagram")) self.horizontalLayout_13 = QtGui.QHBoxLayout(self.bandDiagram) self.horizontalLayout_13.setObjectName(_fromUtf8("horizontalLayout_13")) self.bandView = BandGraphicsView(self.bandDiagram) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(1) sizePolicy.setHeightForWidth(self.bandView.sizePolicy().hasHeightForWidth()) self.bandView.setSizePolicy(sizePolicy) self.bandView.setMinimumSize(QtCore.QSize(525, 249)) self.bandView.setObjectName(_fromUtf8("bandView")) self.horizontalLayout_13.addWidget(self.bandView) self.filterspecView.addTab(self.bandDiagram, _fromUtf8("")) self.poleZero = QtGui.QWidget() self.poleZero.setAutoFillBackground(False) self.poleZero.setObjectName(_fromUtf8("poleZero")) self.gridLayout_2 = QtGui.QGridLayout(self.poleZero) self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2")) self.pzPlot = PzPlot(self.poleZero) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.pzPlot.sizePolicy().hasHeightForWidth()) self.pzPlot.setSizePolicy(sizePolicy) self.pzPlot.setObjectName(_fromUtf8("pzPlot")) self.gridLayout_2.addWidget(self.pzPlot, 0, 0, 1, 1) self.pzgroupBox = QtGui.QGroupBox(self.poleZero) self.pzgroupBox.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.pzgroupBox.setTitle(_fromUtf8("")) self.pzgroupBox.setFlat(False) self.pzgroupBox.setCheckable(False) self.pzgroupBox.setObjectName(_fromUtf8("pzgroupBox")) self.verticalLayout_3 = QtGui.QVBoxLayout(self.pzgroupBox) self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3")) self.addzeroPush = QtGui.QToolButton(self.pzgroupBox) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/add_zero.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.addzeroPush.setIcon(icon) self.addzeroPush.setIconSize(QtCore.QSize(16, 16)) self.addzeroPush.setCheckable(True) self.addzeroPush.setObjectName(_fromUtf8("addzeroPush")) self.verticalLayout_3.addWidget(self.addzeroPush) self.addpolePush = QtGui.QToolButton(self.pzgroupBox) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/add_pole.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.addpolePush.setIcon(icon1) self.addpolePush.setCheckable(True) self.addpolePush.setObjectName(_fromUtf8("addpolePush")) self.verticalLayout_3.addWidget(self.addpolePush) self.delPush = QtGui.QToolButton(self.pzgroupBox) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/remove_red.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.delPush.setIcon(icon2) self.delPush.setCheckable(True) self.delPush.setObjectName(_fromUtf8("delPush")) self.verticalLayout_3.addWidget(self.delPush) self.conjPush = QtGui.QToolButton(self.pzgroupBox) icon3 = QtGui.QIcon() icon3.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/conjugate.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.conjPush.setIcon(icon3) self.conjPush.setIconSize(QtCore.QSize(16, 16)) self.conjPush.setCheckable(True) self.conjPush.setObjectName(_fromUtf8("conjPush")) self.verticalLayout_3.addWidget(self.conjPush) self.gridLayout_2.addWidget(self.pzgroupBox, 0, 1, 1, 1) self.pzstatusBar = QtGui.QStatusBar(self.poleZero) self.pzstatusBar.setObjectName(_fromUtf8("pzstatusBar")) self.gridLayout_2.addWidget(self.pzstatusBar, 1, 0, 1, 2) self.filterspecView.addTab(self.poleZero, _fromUtf8("")) self.horizontalLayout.addWidget(self.splitter) self.quickFrame = QtGui.QFrame(self.classic) self.quickFrame.setMinimumSize(QtCore.QSize(180, 200)) self.quickFrame.setFrameShape(QtGui.QFrame.StyledPanel) self.quickFrame.setFrameShadow(QtGui.QFrame.Raised) self.quickFrame.setObjectName(_fromUtf8("quickFrame")) self.responseBox = QtGui.QGroupBox(self.quickFrame) self.responseBox.setGeometry(QtCore.QRect(10, 10, 161, 251)) self.responseBox.setObjectName(_fromUtf8("responseBox")) self.checkMagres = QtGui.QCheckBox(self.responseBox) self.checkMagres.setGeometry(QtCore.QRect(10, 40, 151, 19)) self.checkMagres.setChecked(True) self.checkMagres.setObjectName(_fromUtf8("checkMagres")) self.checkPhase = QtGui.QCheckBox(self.responseBox) self.checkPhase.setGeometry(QtCore.QRect(10, 60, 151, 19)) self.checkPhase.setChecked(True) self.checkPhase.setObjectName(_fromUtf8("checkPhase")) self.checkGdelay = QtGui.QCheckBox(self.responseBox) self.checkGdelay.setGeometry(QtCore.QRect(10, 80, 111, 19)) self.checkGdelay.setChecked(True) self.checkGdelay.setObjectName(_fromUtf8("checkGdelay")) self.checkPdelay = QtGui.QCheckBox(self.responseBox) self.checkPdelay.setGeometry(QtCore.QRect(10, 100, 111, 19)) self.checkPdelay.setChecked(True) self.checkPdelay.setObjectName(_fromUtf8("checkPdelay")) self.checkImpulse = QtGui.QCheckBox(self.responseBox) self.checkImpulse.setGeometry(QtCore.QRect(10, 120, 141, 19)) self.checkImpulse.setChecked(True) self.checkImpulse.setObjectName(_fromUtf8("checkImpulse")) self.checkStep = QtGui.QCheckBox(self.responseBox) self.checkStep.setGeometry(QtCore.QRect(10, 140, 131, 19)) self.checkStep.setChecked(True) self.checkStep.setObjectName(_fromUtf8("checkStep")) self.checkGrid = QtGui.QCheckBox(self.responseBox) self.checkGrid.setGeometry(QtCore.QRect(10, 160, 85, 19)) self.checkGrid.setObjectName(_fromUtf8("checkGrid")) self.checkFcoeff = QtGui.QCheckBox(self.responseBox) self.checkFcoeff.setGeometry(QtCore.QRect(10, 180, 131, 19)) self.checkFcoeff.setChecked(True) self.checkFcoeff.setObjectName(_fromUtf8("checkFcoeff")) self.checkKeepcur = QtGui.QCheckBox(self.responseBox) self.checkKeepcur.setGeometry(QtCore.QRect(10, 200, 141, 19)) self.checkKeepcur.setObjectName(_fromUtf8("checkKeepcur")) self.groupSpecs = QtGui.QGroupBox(self.quickFrame) self.groupSpecs.setGeometry(QtCore.QRect(10, 280, 161, 91)) self.groupSpecs.setObjectName(_fromUtf8("groupSpecs")) self.checkBand = QtGui.QCheckBox(self.groupSpecs) self.checkBand.setGeometry(QtCore.QRect(10, 20, 111, 19)) self.checkBand.setChecked(True) self.checkBand.setObjectName(_fromUtf8("checkBand")) self.checkPzplot = QtGui.QCheckBox(self.groupSpecs) self.checkPzplot.setGeometry(QtCore.QRect(10, 40, 131, 19)) self.checkPzplot.setChecked(True) self.checkPzplot.setObjectName(_fromUtf8("checkPzplot")) self.sysParamsBox = QtGui.QGroupBox(self.quickFrame) self.sysParamsBox.setGeometry(QtCore.QRect(10, 390, 161, 91)) self.sysParamsBox.setObjectName(_fromUtf8("sysParamsBox")) self.formLayout_4 = QtGui.QFormLayout(self.sysParamsBox) self.formLayout_4.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow) self.formLayout_4.setObjectName(_fromUtf8("formLayout_4")) self.nfftLabel = QtGui.QLabel(self.sysParamsBox) self.nfftLabel.setMinimumSize(QtCore.QSize(150, 0)) self.nfftLabel.setObjectName(_fromUtf8("nfftLabel")) self.formLayout_4.setWidget(1, QtGui.QFormLayout.LabelRole, self.nfftLabel) self.nfftEdit = QtGui.QLineEdit(self.sysParamsBox) self.nfftEdit.setObjectName(_fromUtf8("nfftEdit")) self.formLayout_4.setWidget(2, QtGui.QFormLayout.LabelRole, self.nfftEdit) self.horizontalLayout.addWidget(self.quickFrame) self.stackedWindows.addWidget(self.classic) self.modern = QtGui.QWidget() sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.modern.sizePolicy().hasHeightForWidth()) self.modern.setSizePolicy(sizePolicy) self.modern.setObjectName(_fromUtf8("modern")) self.horizontalLayout_5 = QtGui.QHBoxLayout(self.modern) self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5")) self.splitter_3 = QtGui.QSplitter(self.modern) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.splitter_3.sizePolicy().hasHeightForWidth()) self.splitter_3.setSizePolicy(sizePolicy) self.splitter_3.setOrientation(QtCore.Qt.Vertical) self.splitter_3.setObjectName(_fromUtf8("splitter_3")) self.splitter_2 = QtGui.QSplitter(self.splitter_3) self.splitter_2.setOrientation(QtCore.Qt.Horizontal) self.splitter_2.setObjectName(_fromUtf8("splitter_2")) self.mfreqTabgroup = QtGui.QTabWidget(self.splitter_2) self.mfreqTabgroup.setTabsClosable(False) self.mfreqTabgroup.setObjectName(_fromUtf8("mfreqTabgroup")) self.mfreqTab = QtGui.QWidget() sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.mfreqTab.sizePolicy().hasHeightForWidth()) self.mfreqTab.setSizePolicy(sizePolicy) self.mfreqTab.setObjectName(_fromUtf8("mfreqTab")) self.horizontalLayout_2 = QtGui.QHBoxLayout(self.mfreqTab) self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2")) self.mfreqPlot = Qwt5.QwtPlot(self.mfreqTab) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.mfreqPlot.sizePolicy().hasHeightForWidth()) self.mfreqPlot.setSizePolicy(sizePolicy) self.mfreqPlot.setObjectName(_fromUtf8("mfreqPlot")) self.horizontalLayout_2.addWidget(self.mfreqPlot) self.mfgroupBox = QtGui.QGroupBox(self.mfreqTab) self.mfgroupBox.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.mfgroupBox.setTitle(_fromUtf8("")) self.mfgroupBox.setFlat(False) self.mfgroupBox.setCheckable(False) self.mfgroupBox.setObjectName(_fromUtf8("mfgroupBox")) self.verticalLayout_2 = QtGui.QVBoxLayout(self.mfgroupBox) self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2")) self.mfmagPush = QtGui.QToolButton(self.mfgroupBox) icon4 = QtGui.QIcon() icon4.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/mag_response.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.mfmagPush.setIcon(icon4) self.mfmagPush.setIconSize(QtCore.QSize(16, 16)) self.mfmagPush.setCheckable(False) self.mfmagPush.setObjectName(_fromUtf8("mfmagPush")) self.verticalLayout_2.addWidget(self.mfmagPush) self.mfphasePush = QtGui.QToolButton(self.mfgroupBox) icon5 = QtGui.QIcon() icon5.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/phase_response.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.mfphasePush.setIcon(icon5) self.mfphasePush.setIconSize(QtCore.QSize(16, 16)) self.mfphasePush.setCheckable(False) self.mfphasePush.setObjectName(_fromUtf8("mfphasePush")) self.verticalLayout_2.addWidget(self.mfphasePush) self.mfgpdlyPush = QtGui.QToolButton(self.mfgroupBox) icon6 = QtGui.QIcon() icon6.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/group_delay.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.mfgpdlyPush.setIcon(icon6) self.mfgpdlyPush.setIconSize(QtCore.QSize(16, 16)) self.mfgpdlyPush.setCheckable(False) self.mfgpdlyPush.setObjectName(_fromUtf8("mfgpdlyPush")) self.verticalLayout_2.addWidget(self.mfgpdlyPush) self.mfphdlyPush = QtGui.QToolButton(self.mfgroupBox) icon7 = QtGui.QIcon() icon7.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/phase_delay.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.mfphdlyPush.setIcon(icon7) self.mfphdlyPush.setIconSize(QtCore.QSize(16, 16)) self.mfphdlyPush.setCheckable(False) self.mfphdlyPush.setObjectName(_fromUtf8("mfphdlyPush")) self.verticalLayout_2.addWidget(self.mfphdlyPush) self.mfoverlayPush = QtGui.QToolButton(self.mfgroupBox) icon8 = QtGui.QIcon() icon8.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/overlay.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.mfoverlayPush.setIcon(icon8) self.mfoverlayPush.setIconSize(QtCore.QSize(16, 16)) self.mfoverlayPush.setCheckable(True) self.mfoverlayPush.setObjectName(_fromUtf8("mfoverlayPush")) self.verticalLayout_2.addWidget(self.mfoverlayPush) self.horizontalLayout_2.addWidget(self.mfgroupBox) self.mfreqTabgroup.addTab(self.mfreqTab, _fromUtf8("")) self.mtimeTabgroup = QtGui.QTabWidget(self.splitter_2) self.mtimeTabgroup.setObjectName(_fromUtf8("mtimeTabgroup")) self.mtimeTab = QtGui.QWidget() sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.mtimeTab.sizePolicy().hasHeightForWidth()) self.mtimeTab.setSizePolicy(sizePolicy) self.mtimeTab.setObjectName(_fromUtf8("mtimeTab")) self.horizontalLayout_3 = QtGui.QHBoxLayout(self.mtimeTab) self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3")) self.mtimePlot = Qwt5.QwtPlot(self.mtimeTab) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.mtimePlot.sizePolicy().hasHeightForWidth()) self.mtimePlot.setSizePolicy(sizePolicy) self.mtimePlot.setObjectName(_fromUtf8("mtimePlot")) self.horizontalLayout_3.addWidget(self.mtimePlot) self.mtgroupBox = QtGui.QGroupBox(self.mtimeTab) self.mtgroupBox.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.mtgroupBox.setTitle(_fromUtf8("")) self.mtgroupBox.setFlat(False) self.mtgroupBox.setCheckable(False) self.mtgroupBox.setObjectName(_fromUtf8("mtgroupBox")) self.verticalLayout_5 = QtGui.QVBoxLayout(self.mtgroupBox) self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5")) self.mttapsPush = QtGui.QToolButton(self.mtgroupBox) icon9 = QtGui.QIcon() icon9.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/filtr_taps.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.mttapsPush.setIcon(icon9) self.mttapsPush.setIconSize(QtCore.QSize(16, 16)) self.mttapsPush.setCheckable(False) self.mttapsPush.setObjectName(_fromUtf8("mttapsPush")) self.verticalLayout_5.addWidget(self.mttapsPush) self.mtstepPush = QtGui.QToolButton(self.mtgroupBox) icon10 = QtGui.QIcon() icon10.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/step_response.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.mtstepPush.setIcon(icon10) self.mtstepPush.setIconSize(QtCore.QSize(16, 16)) self.mtstepPush.setCheckable(False) self.mtstepPush.setObjectName(_fromUtf8("mtstepPush")) self.verticalLayout_5.addWidget(self.mtstepPush) self.mtimpPush = QtGui.QToolButton(self.mtgroupBox) icon11 = QtGui.QIcon() icon11.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/impulse.svg")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.mtimpPush.setIcon(icon11) self.mtimpPush.setIconSize(QtCore.QSize(16, 16)) self.mtimpPush.setCheckable(False) self.mtimpPush.setObjectName(_fromUtf8("mtimpPush")) self.verticalLayout_5.addWidget(self.mtimpPush) self.horizontalLayout_3.addWidget(self.mtgroupBox) self.mtimeTabgroup.addTab(self.mtimeTab, _fromUtf8("")) self.mfilterspecView = QtGui.QTabWidget(self.splitter_3) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.mfilterspecView.sizePolicy().hasHeightForWidth()) self.mfilterspecView.setSizePolicy(sizePolicy) self.mfilterspecView.setMinimumSize(QtCore.QSize(0, 100)) self.mfilterspecView.setBaseSize(QtCore.QSize(0, 100)) self.mfilterspecView.setDocumentMode(False) self.mfilterspecView.setTabsClosable(False) self.mfilterspecView.setObjectName(_fromUtf8("mfilterspecView")) self.mbandDiagram = QtGui.QWidget() sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.mbandDiagram.sizePolicy().hasHeightForWidth()) self.mbandDiagram.setSizePolicy(sizePolicy) self.mbandDiagram.setObjectName(_fromUtf8("mbandDiagram")) self.horizontalLayout_15 = QtGui.QHBoxLayout(self.mbandDiagram) self.horizontalLayout_15.setObjectName(_fromUtf8("horizontalLayout_15")) self.mbandView = BandGraphicsView(self.mbandDiagram) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(1) sizePolicy.setHeightForWidth(self.mbandView.sizePolicy().hasHeightForWidth()) self.mbandView.setSizePolicy(sizePolicy) self.mbandView.setMinimumSize(QtCore.QSize(525, 249)) self.mbandView.setObjectName(_fromUtf8("mbandView")) self.horizontalLayout_15.addWidget(self.mbandView) self.mfilterspecView.addTab(self.mbandDiagram, _fromUtf8("")) self.mpoleZero = QtGui.QWidget() self.mpoleZero.setAutoFillBackground(False) self.mpoleZero.setObjectName(_fromUtf8("mpoleZero")) self.gridLayout_3 = QtGui.QGridLayout(self.mpoleZero) self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3")) self.mpzPlot = PzPlot(self.mpoleZero) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.mpzPlot.sizePolicy().hasHeightForWidth()) self.mpzPlot.setSizePolicy(sizePolicy) self.mpzPlot.setObjectName(_fromUtf8("mpzPlot")) self.gridLayout_3.addWidget(self.mpzPlot, 0, 0, 1, 1) self.mpzgroupBox = QtGui.QGroupBox(self.mpoleZero) self.mpzgroupBox.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.mpzgroupBox.setTitle(_fromUtf8("")) self.mpzgroupBox.setFlat(False) self.mpzgroupBox.setCheckable(False) self.mpzgroupBox.setObjectName(_fromUtf8("mpzgroupBox")) self.verticalLayout_4 = QtGui.QVBoxLayout(self.mpzgroupBox) self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4")) self.maddzeroPush = QtGui.QToolButton(self.mpzgroupBox) self.maddzeroPush.setIcon(icon) self.maddzeroPush.setCheckable(True) self.maddzeroPush.setObjectName(_fromUtf8("maddzeroPush")) self.verticalLayout_4.addWidget(self.maddzeroPush) self.maddpolePush = QtGui.QToolButton(self.mpzgroupBox) self.maddpolePush.setIcon(icon1) self.maddpolePush.setCheckable(True) self.maddpolePush.setObjectName(_fromUtf8("maddpolePush")) self.verticalLayout_4.addWidget(self.maddpolePush) self.mdelPush = QtGui.QToolButton(self.mpzgroupBox) self.mdelPush.setIcon(icon2) self.mdelPush.setCheckable(True) self.mdelPush.setObjectName(_fromUtf8("mdelPush")) self.verticalLayout_4.addWidget(self.mdelPush) self.mconjPush = QtGui.QToolButton(self.mpzgroupBox) self.mconjPush.setIcon(icon3) self.mconjPush.setIconSize(QtCore.QSize(16, 16)) self.mconjPush.setCheckable(True) self.mconjPush.setObjectName(_fromUtf8("mconjPush")) self.verticalLayout_4.addWidget(self.mconjPush) self.gridLayout_3.addWidget(self.mpzgroupBox, 0, 1, 1, 1) self.mpzstatusBar = QtGui.QStatusBar(self.mpoleZero) self.mpzstatusBar.setObjectName(_fromUtf8("mpzstatusBar")) self.gridLayout_3.addWidget(self.mpzstatusBar, 1, 0, 1, 2) self.mfilterspecView.addTab(self.mpoleZero, _fromUtf8("")) self.mfcTab = QtGui.QWidget() self.mfcTab.setObjectName(_fromUtf8("mfcTab")) self.horizontalLayout_4 = QtGui.QHBoxLayout(self.mfcTab) self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4")) self.mfilterCoeff = QtGui.QTextBrowser(self.mfcTab) self.mfilterCoeff.setObjectName(_fromUtf8("mfilterCoeff")) self.horizontalLayout_4.addWidget(self.mfilterCoeff) self.mfilterspecView.addTab(self.mfcTab, _fromUtf8("")) self.horizontalLayout_5.addWidget(self.splitter_3) self.stackedWindows.addWidget(self.modern) self.gridLayout.addWidget(self.stackedWindows, 0, 1, 1, 1) self.filterFrame = QtGui.QFrame(self.centralwidget) self.filterFrame.setMinimumSize(QtCore.QSize(300, 0)) self.filterFrame.setMaximumSize(QtCore.QSize(300, 16777215)) self.filterFrame.setFrameShape(QtGui.QFrame.StyledPanel) self.filterFrame.setFrameShadow(QtGui.QFrame.Raised) self.filterFrame.setObjectName(_fromUtf8("filterFrame")) self.verticalLayout = QtGui.QVBoxLayout(self.filterFrame) self.verticalLayout.setObjectName(_fromUtf8("verticalLayout")) self.fselectComboBox = QtGui.QComboBox(self.filterFrame) self.fselectComboBox.setEnabled(True) self.fselectComboBox.setObjectName(_fromUtf8("fselectComboBox")) self.fselectComboBox.addItem(_fromUtf8("")) self.fselectComboBox.addItem(_fromUtf8("")) self.verticalLayout.addWidget(self.fselectComboBox) self.filterTypeComboBox = QtGui.QComboBox(self.filterFrame) self.filterTypeComboBox.setObjectName(_fromUtf8("filterTypeComboBox")) self.filterTypeComboBox.addItem(_fromUtf8("")) self.filterTypeComboBox.addItem(_fromUtf8("")) self.filterTypeComboBox.addItem(_fromUtf8("")) self.filterTypeComboBox.addItem(_fromUtf8("")) self.filterTypeComboBox.addItem(_fromUtf8("")) self.filterTypeComboBox.addItem(_fromUtf8("")) self.filterTypeComboBox.addItem(_fromUtf8("")) self.filterTypeComboBox.addItem(_fromUtf8("")) self.verticalLayout.addWidget(self.filterTypeComboBox) self.iirfilterBandComboBox = QtGui.QComboBox(self.filterFrame) self.iirfilterBandComboBox.setObjectName(_fromUtf8("iirfilterBandComboBox")) self.iirfilterBandComboBox.addItem(_fromUtf8("")) self.iirfilterBandComboBox.addItem(_fromUtf8("")) self.iirfilterBandComboBox.addItem(_fromUtf8("")) self.iirfilterBandComboBox.addItem(_fromUtf8("")) self.verticalLayout.addWidget(self.iirfilterBandComboBox) self.adComboBox = QtGui.QComboBox(self.filterFrame) self.adComboBox.setObjectName(_fromUtf8("adComboBox")) self.adComboBox.addItem(_fromUtf8("")) self.adComboBox.addItem(_fromUtf8("")) self.verticalLayout.addWidget(self.adComboBox) self.filterDesignTypeComboBox = QtGui.QComboBox(self.filterFrame) self.filterDesignTypeComboBox.setObjectName(_fromUtf8("filterDesignTypeComboBox")) self.filterDesignTypeComboBox.addItem(_fromUtf8("")) self.filterDesignTypeComboBox.addItem(_fromUtf8("")) self.filterDesignTypeComboBox.addItem(_fromUtf8("")) self.filterDesignTypeComboBox.addItem(_fromUtf8("")) self.filterDesignTypeComboBox.addItem(_fromUtf8("")) self.filterDesignTypeComboBox.addItem(_fromUtf8("")) self.filterDesignTypeComboBox.addItem(_fromUtf8("")) self.verticalLayout.addWidget(self.filterDesignTypeComboBox) self.iirfilterTypeComboBox = QtGui.QComboBox(self.filterFrame) self.iirfilterTypeComboBox.setObjectName(_fromUtf8("iirfilterTypeComboBox")) self.iirfilterTypeComboBox.addItem(_fromUtf8("")) self.iirfilterTypeComboBox.addItem(_fromUtf8("")) self.iirfilterTypeComboBox.addItem(_fromUtf8("")) self.iirfilterTypeComboBox.addItem(_fromUtf8("")) self.iirfilterTypeComboBox.addItem(_fromUtf8("")) self.verticalLayout.addWidget(self.iirfilterTypeComboBox) self.globalParamsBox = QtGui.QGroupBox(self.filterFrame) self.globalParamsBox.setTitle(_fromUtf8("")) self.globalParamsBox.setObjectName(_fromUtf8("globalParamsBox")) self.formLayout_12 = QtGui.QFormLayout(self.globalParamsBox) self.formLayout_12.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow) self.formLayout_12.setObjectName(_fromUtf8("formLayout_12")) self.sampleRateLabel = QtGui.QLabel(self.globalParamsBox) self.sampleRateLabel.setMaximumSize(QtCore.QSize(16777215, 30)) self.sampleRateLabel.setObjectName(_fromUtf8("sampleRateLabel")) self.formLayout_12.setWidget(0, QtGui.QFormLayout.LabelRole, self.sampleRateLabel) self.sampleRateEdit = QtGui.QLineEdit(self.globalParamsBox) self.sampleRateEdit.setMaximumSize(QtCore.QSize(16777215, 30)) self.sampleRateEdit.setObjectName(_fromUtf8("sampleRateEdit")) self.formLayout_12.setWidget(0, QtGui.QFormLayout.FieldRole, self.sampleRateEdit) self.filterGainLabel = QtGui.QLabel(self.globalParamsBox) self.filterGainLabel.setObjectName(_fromUtf8("filterGainLabel")) self.formLayout_12.setWidget(1, QtGui.QFormLayout.LabelRole, self.filterGainLabel) self.filterGainEdit = QtGui.QLineEdit(self.globalParamsBox) self.filterGainEdit.setObjectName(_fromUtf8("filterGainEdit")) self.formLayout_12.setWidget(1, QtGui.QFormLayout.FieldRole, self.filterGainEdit) self.verticalLayout.addWidget(self.globalParamsBox) self.filterTypeWidget = QtGui.QStackedWidget(self.filterFrame) self.filterTypeWidget.setObjectName(_fromUtf8("filterTypeWidget")) self.firlpfPage = QtGui.QWidget() self.firlpfPage.setObjectName(_fromUtf8("firlpfPage")) self.formLayout = QtGui.QFormLayout(self.firlpfPage) self.formLayout.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow) self.formLayout.setObjectName(_fromUtf8("formLayout")) self.endofLpfPassBandLabel = QtGui.QLabel(self.firlpfPage) self.endofLpfPassBandLabel.setObjectName(_fromUtf8("endofLpfPassBandLabel")) self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.endofLpfPassBandLabel) self.endofLpfPassBandEdit = QtGui.QLineEdit(self.firlpfPage) self.endofLpfPassBandEdit.setObjectName(_fromUtf8("endofLpfPassBandEdit")) self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.endofLpfPassBandEdit) self.startofLpfStopBandLabel = QtGui.QLabel(self.firlpfPage) self.startofLpfStopBandLabel.setObjectName(_fromUtf8("startofLpfStopBandLabel")) self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.startofLpfStopBandLabel) self.startofLpfStopBandEdit = QtGui.QLineEdit(self.firlpfPage) self.startofLpfStopBandEdit.setObjectName(_fromUtf8("startofLpfStopBandEdit")) self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.startofLpfStopBandEdit) self.lpfStopBandAttenLabel = QtGui.QLabel(self.firlpfPage) self.lpfStopBandAttenLabel.setObjectName(_fromUtf8("lpfStopBandAttenLabel")) self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.lpfStopBandAttenLabel) self.lpfStopBandAttenEdit = QtGui.QLineEdit(self.firlpfPage) self.lpfStopBandAttenEdit.setObjectName(_fromUtf8("lpfStopBandAttenEdit")) self.formLayout.setWidget(2, QtGui.QFormLayout.FieldRole, self.lpfStopBandAttenEdit) self.lpfPassBandRippleLabel = QtGui.QLabel(self.firlpfPage) self.lpfPassBandRippleLabel.setObjectName(_fromUtf8("lpfPassBandRippleLabel")) self.formLayout.setWidget(3, QtGui.QFormLayout.LabelRole, self.lpfPassBandRippleLabel) self.lpfPassBandRippleEdit = QtGui.QLineEdit(self.firlpfPage) self.lpfPassBandRippleEdit.setObjectName(_fromUtf8("lpfPassBandRippleEdit")) self.formLayout.setWidget(3, QtGui.QFormLayout.FieldRole, self.lpfPassBandRippleEdit) self.filterTypeWidget.addWidget(self.firlpfPage) self.firbpfPage = QtGui.QWidget() self.firbpfPage.setObjectName(_fromUtf8("firbpfPage")) self.formLayout_2 = QtGui.QFormLayout(self.firbpfPage) self.formLayout_2.setObjectName(_fromUtf8("formLayout_2")) self.startofBpfPassBandLabel = QtGui.QLabel(self.firbpfPage) self.startofBpfPassBandLabel.setObjectName(_fromUtf8("startofBpfPassBandLabel")) self.formLayout_2.setWidget(0, QtGui.QFormLayout.LabelRole, self.startofBpfPassBandLabel) self.startofBpfPassBandEdit = QtGui.QLineEdit(self.firbpfPage) self.startofBpfPassBandEdit.setObjectName(_fromUtf8("startofBpfPassBandEdit")) self.formLayout_2.setWidget(0, QtGui.QFormLayout.FieldRole, self.startofBpfPassBandEdit) self.endofBpfPassBandLabel = QtGui.QLabel(self.firbpfPage) self.endofBpfPassBandLabel.setObjectName(_fromUtf8("endofBpfPassBandLabel")) self.formLayout_2.setWidget(1, QtGui.QFormLayout.LabelRole, self.endofBpfPassBandLabel) self.endofBpfPassBandEdit = QtGui.QLineEdit(self.firbpfPage) self.endofBpfPassBandEdit.setObjectName(_fromUtf8("endofBpfPassBandEdit")) self.formLayout_2.setWidget(1, QtGui.QFormLayout.FieldRole, self.endofBpfPassBandEdit) self.bpfStopBandAttenEdit = QtGui.QLineEdit(self.firbpfPage) self.bpfStopBandAttenEdit.setObjectName(_fromUtf8("bpfStopBandAttenEdit")) self.formLayout_2.setWidget(3, QtGui.QFormLayout.FieldRole, self.bpfStopBandAttenEdit) self.bpfStopBandAttenLabel = QtGui.QLabel(self.firbpfPage) self.bpfStopBandAttenLabel.setObjectName(_fromUtf8("bpfStopBandAttenLabel")) self.formLayout_2.setWidget(3, QtGui.QFormLayout.LabelRole, self.bpfStopBandAttenLabel) self.bpfTransitionLabel = QtGui.QLabel(self.firbpfPage) self.bpfTransitionLabel.setObjectName(_fromUtf8("bpfTransitionLabel")) self.formLayout_2.setWidget(2, QtGui.QFormLayout.LabelRole, self.bpfTransitionLabel) self.bpfTransitionEdit = QtGui.QLineEdit(self.firbpfPage) self.bpfTransitionEdit.setObjectName(_fromUtf8("bpfTransitionEdit")) self.formLayout_2.setWidget(2, QtGui.QFormLayout.FieldRole, self.bpfTransitionEdit) self.bpfPassBandRippleEdit = QtGui.QLineEdit(self.firbpfPage) self.bpfPassBandRippleEdit.setObjectName(_fromUtf8("bpfPassBandRippleEdit")) self.formLayout_2.setWidget(4, QtGui.QFormLayout.FieldRole, self.bpfPassBandRippleEdit) self.bpfPassBandRippleLabel = QtGui.QLabel(self.firbpfPage) self.bpfPassBandRippleLabel.setObjectName(_fromUtf8("bpfPassBandRippleLabel")) self.formLayout_2.setWidget(4, QtGui.QFormLayout.LabelRole, self.bpfPassBandRippleLabel) self.filterTypeWidget.addWidget(self.firbpfPage) self.firbnfPage = QtGui.QWidget() self.firbnfPage.setObjectName(_fromUtf8("firbnfPage")) self.formLayout_5 = QtGui.QFormLayout(self.firbnfPage) self.formLayout_5.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow) self.formLayout_5.setObjectName(_fromUtf8("formLayout_5")) self.startofBnfStopBandLabel = QtGui.QLabel(self.firbnfPage) self.startofBnfStopBandLabel.setObjectName(_fromUtf8("startofBnfStopBandLabel")) self.formLayout_5.setWidget(0, QtGui.QFormLayout.LabelRole, self.startofBnfStopBandLabel) self.startofBnfStopBandEdit = QtGui.QLineEdit(self.firbnfPage) self.startofBnfStopBandEdit.setObjectName(_fromUtf8("startofBnfStopBandEdit")) self.formLayout_5.setWidget(0, QtGui.QFormLayout.FieldRole, self.startofBnfStopBandEdit) self.endofBnfStopBandLabel = QtGui.QLabel(self.firbnfPage) self.endofBnfStopBandLabel.setObjectName(_fromUtf8("endofBnfStopBandLabel")) self.formLayout_5.setWidget(1, QtGui.QFormLayout.LabelRole, self.endofBnfStopBandLabel) self.endofBnfStopBandEdit = QtGui.QLineEdit(self.firbnfPage) self.endofBnfStopBandEdit.setObjectName(_fromUtf8("endofBnfStopBandEdit")) self.formLayout_5.setWidget(1, QtGui.QFormLayout.FieldRole, self.endofBnfStopBandEdit) self.bnfTransitionLabel = QtGui.QLabel(self.firbnfPage) self.bnfTransitionLabel.setObjectName(_fromUtf8("bnfTransitionLabel")) self.formLayout_5.setWidget(2, QtGui.QFormLayout.LabelRole, self.bnfTransitionLabel) self.bnfTransitionEdit = QtGui.QLineEdit(self.firbnfPage) self.bnfTransitionEdit.setObjectName(_fromUtf8("bnfTransitionEdit")) self.formLayout_5.setWidget(2, QtGui.QFormLayout.FieldRole, self.bnfTransitionEdit) self.bnfStopBandAttenLabel = QtGui.QLabel(self.firbnfPage) self.bnfStopBandAttenLabel.setObjectName(_fromUtf8("bnfStopBandAttenLabel")) self.formLayout_5.setWidget(3, QtGui.QFormLayout.LabelRole, self.bnfStopBandAttenLabel) self.bnfStopBandAttenEdit = QtGui.QLineEdit(self.firbnfPage) self.bnfStopBandAttenEdit.setObjectName(_fromUtf8("bnfStopBandAttenEdit")) self.formLayout_5.setWidget(3, QtGui.QFormLayout.FieldRole, self.bnfStopBandAttenEdit) self.bnfPassBandRippleLabel = QtGui.QLabel(self.firbnfPage) self.bnfPassBandRippleLabel.setObjectName(_fromUtf8("bnfPassBandRippleLabel")) self.formLayout_5.setWidget(4, QtGui.QFormLayout.LabelRole, self.bnfPassBandRippleLabel) self.bnfPassBandRippleEdit = QtGui.QLineEdit(self.firbnfPage) self.bnfPassBandRippleEdit.setObjectName(_fromUtf8("bnfPassBandRippleEdit")) self.formLayout_5.setWidget(4, QtGui.QFormLayout.FieldRole, self.bnfPassBandRippleEdit) self.filterTypeWidget.addWidget(self.firbnfPage) self.firhpfPage = QtGui.QWidget() self.firhpfPage.setObjectName(_fromUtf8("firhpfPage")) self.formLayout_3 = QtGui.QFormLayout(self.firhpfPage) self.formLayout_3.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow) self.formLayout_3.setObjectName(_fromUtf8("formLayout_3")) self.endofHpfStopBandLabel = QtGui.QLabel(self.firhpfPage) self.endofHpfStopBandLabel.setObjectName(_fromUtf8("endofHpfStopBandLabel")) self.formLayout_3.setWidget(0, QtGui.QFormLayout.LabelRole, self.endofHpfStopBandLabel) self.endofHpfStopBandEdit = QtGui.QLineEdit(self.firhpfPage) self.endofHpfStopBandEdit.setObjectName(_fromUtf8("endofHpfStopBandEdit")) self.formLayout_3.setWidget(0, QtGui.QFormLayout.FieldRole, self.endofHpfStopBandEdit) self.startofHpfPassBandLabel = QtGui.QLabel(self.firhpfPage) self.startofHpfPassBandLabel.setObjectName(_fromUtf8("startofHpfPassBandLabel")) self.formLayout_3.setWidget(1, QtGui.QFormLayout.LabelRole, self.startofHpfPassBandLabel) self.startofHpfPassBandEdit = QtGui.QLineEdit(self.firhpfPage) self.startofHpfPassBandEdit.setObjectName(_fromUtf8("startofHpfPassBandEdit")) self.formLayout_3.setWidget(1, QtGui.QFormLayout.FieldRole, self.startofHpfPassBandEdit) self.hpfStopBandAttenLabel = QtGui.QLabel(self.firhpfPage) self.hpfStopBandAttenLabel.setObjectName(_fromUtf8("hpfStopBandAttenLabel")) self.formLayout_3.setWidget(2, QtGui.QFormLayout.LabelRole, self.hpfStopBandAttenLabel) self.hpfStopBandAttenEdit = QtGui.QLineEdit(self.firhpfPage) self.hpfStopBandAttenEdit.setObjectName(_fromUtf8("hpfStopBandAttenEdit")) self.formLayout_3.setWidget(2, QtGui.QFormLayout.FieldRole, self.hpfStopBandAttenEdit) self.hpfPassBandRippleLabel = QtGui.QLabel(self.firhpfPage) self.hpfPassBandRippleLabel.setObjectName(_fromUtf8("hpfPassBandRippleLabel")) self.formLayout_3.setWidget(3, QtGui.QFormLayout.LabelRole, self.hpfPassBandRippleLabel) self.hpfPassBandRippleEdit = QtGui.QLineEdit(self.firhpfPage) self.hpfPassBandRippleEdit.setObjectName(_fromUtf8("hpfPassBandRippleEdit")) self.formLayout_3.setWidget(3, QtGui.QFormLayout.FieldRole, self.hpfPassBandRippleEdit) self.filterTypeWidget.addWidget(self.firhpfPage) self.rrcPage = QtGui.QWidget() self.rrcPage.setObjectName(_fromUtf8("rrcPage")) self.formLayout_6 = QtGui.QFormLayout(self.rrcPage) self.formLayout_6.setObjectName(_fromUtf8("formLayout_6")) self.rrcSymbolRateLabel = QtGui.QLabel(self.rrcPage) self.rrcSymbolRateLabel.setObjectName(_fromUtf8("rrcSymbolRateLabel")) self.formLayout_6.setWidget(0, QtGui.QFormLayout.LabelRole, self.rrcSymbolRateLabel) self.rrcAlphaLabel = QtGui.QLabel(self.rrcPage) self.rrcAlphaLabel.setObjectName(_fromUtf8("rrcAlphaLabel")) self.formLayout_6.setWidget(1, QtGui.QFormLayout.LabelRole, self.rrcAlphaLabel) self.rrcNumTapsLabel = QtGui.QLabel(self.rrcPage) self.rrcNumTapsLabel.setObjectName(_fromUtf8("rrcNumTapsLabel")) self.formLayout_6.setWidget(2, QtGui.QFormLayout.LabelRole, self.rrcNumTapsLabel) self.rrcSymbolRateEdit = QtGui.QLineEdit(self.rrcPage) self.rrcSymbolRateEdit.setObjectName(_fromUtf8("rrcSymbolRateEdit")) self.formLayout_6.setWidget(0, QtGui.QFormLayout.FieldRole, self.rrcSymbolRateEdit) self.rrcAlphaEdit = QtGui.QLineEdit(self.rrcPage) self.rrcAlphaEdit.setObjectName(_fromUtf8("rrcAlphaEdit")) self.formLayout_6.setWidget(1, QtGui.QFormLayout.FieldRole, self.rrcAlphaEdit) self.rrcNumTapsEdit = QtGui.QLineEdit(self.rrcPage) self.rrcNumTapsEdit.setObjectName(_fromUtf8("rrcNumTapsEdit")) self.formLayout_6.setWidget(2, QtGui.QFormLayout.FieldRole, self.rrcNumTapsEdit) self.filterTypeWidget.addWidget(self.rrcPage) self.gausPage = QtGui.QWidget() self.gausPage.setObjectName(_fromUtf8("gausPage")) self.formLayout_7 = QtGui.QFormLayout(self.gausPage) self.formLayout_7.setObjectName(_fromUtf8("formLayout_7")) self.gausSymbolRateLabel = QtGui.QLabel(self.gausPage) self.gausSymbolRateLabel.setObjectName(_fromUtf8("gausSymbolRateLabel")) self.formLayout_7.setWidget(0, QtGui.QFormLayout.LabelRole, self.gausSymbolRateLabel) self.gausSymbolRateEdit = QtGui.QLineEdit(self.gausPage) self.gausSymbolRateEdit.setObjectName(_fromUtf8("gausSymbolRateEdit")) self.formLayout_7.setWidget(0, QtGui.QFormLayout.FieldRole, self.gausSymbolRateEdit) self.gausBTLabel = QtGui.QLabel(self.gausPage) self.gausBTLabel.setObjectName(_fromUtf8("gausBTLabel")) self.formLayout_7.setWidget(1, QtGui.QFormLayout.LabelRole, self.gausBTLabel) self.gausBTEdit = QtGui.QLineEdit(self.gausPage) self.gausBTEdit.setObjectName(_fromUtf8("gausBTEdit")) self.formLayout_7.setWidget(1, QtGui.QFormLayout.FieldRole, self.gausBTEdit) self.gausNumTapsLabel = QtGui.QLabel(self.gausPage) self.gausNumTapsLabel.setObjectName(_fromUtf8("gausNumTapsLabel")) self.formLayout_7.setWidget(2, QtGui.QFormLayout.LabelRole, self.gausNumTapsLabel) self.gausNumTapsEdit = QtGui.QLineEdit(self.gausPage) self.gausNumTapsEdit.setObjectName(_fromUtf8("gausNumTapsEdit")) self.formLayout_7.setWidget(2, QtGui.QFormLayout.FieldRole, self.gausNumTapsEdit) self.filterTypeWidget.addWidget(self.gausPage) self.iirlpfPage = QtGui.QWidget() self.iirlpfPage.setObjectName(_fromUtf8("iirlpfPage")) self.formLayout_15 = QtGui.QFormLayout(self.iirlpfPage) self.formLayout_15.setObjectName(_fromUtf8("formLayout_15")) self.iirendofLpfPassBandLabel = QtGui.QLabel(self.iirlpfPage) self.iirendofLpfPassBandLabel.setObjectName(_fromUtf8("iirendofLpfPassBandLabel")) self.formLayout_15.setWidget(0, QtGui.QFormLayout.LabelRole, self.iirendofLpfPassBandLabel) self.iirendofLpfPassBandEdit = QtGui.QLineEdit(self.iirlpfPage) self.iirendofLpfPassBandEdit.setObjectName(_fromUtf8("iirendofLpfPassBandEdit")) self.formLayout_15.setWidget(0, QtGui.QFormLayout.FieldRole, self.iirendofLpfPassBandEdit) self.iirstartofLpfStopBandLabel = QtGui.QLabel(self.iirlpfPage) self.iirstartofLpfStopBandLabel.setObjectName(_fromUtf8("iirstartofLpfStopBandLabel")) self.formLayout_15.setWidget(1, QtGui.QFormLayout.LabelRole, self.iirstartofLpfStopBandLabel) self.iirstartofLpfStopBandEdit = QtGui.QLineEdit(self.iirlpfPage) self.iirstartofLpfStopBandEdit.setObjectName(_fromUtf8("iirstartofLpfStopBandEdit")) self.formLayout_15.setWidget(1, QtGui.QFormLayout.FieldRole, self.iirstartofLpfStopBandEdit) self.iirLpfPassBandAttenLabel = QtGui.QLabel(self.iirlpfPage) self.iirLpfPassBandAttenLabel.setObjectName(_fromUtf8("iirLpfPassBandAttenLabel")) self.formLayout_15.setWidget(2, QtGui.QFormLayout.LabelRole, self.iirLpfPassBandAttenLabel) self.iirLpfPassBandAttenEdit = QtGui.QLineEdit(self.iirlpfPage) self.iirLpfPassBandAttenEdit.setObjectName(_fromUtf8("iirLpfPassBandAttenEdit")) self.formLayout_15.setWidget(2, QtGui.QFormLayout.FieldRole, self.iirLpfPassBandAttenEdit) self.iirLpfStopBandRippleLabel = QtGui.QLabel(self.iirlpfPage) self.iirLpfStopBandRippleLabel.setObjectName(_fromUtf8("iirLpfStopBandRippleLabel")) self.formLayout_15.setWidget(3, QtGui.QFormLayout.LabelRole, self.iirLpfStopBandRippleLabel) self.iirLpfStopBandRippleEdit = QtGui.QLineEdit(self.iirlpfPage) self.iirLpfStopBandRippleEdit.setObjectName(_fromUtf8("iirLpfStopBandRippleEdit")) self.formLayout_15.setWidget(3, QtGui.QFormLayout.FieldRole, self.iirLpfStopBandRippleEdit) self.filterTypeWidget.addWidget(self.iirlpfPage) self.iirhpfPage = QtGui.QWidget() self.iirhpfPage.setObjectName(_fromUtf8("iirhpfPage")) self.formLayout_9 = QtGui.QFormLayout(self.iirhpfPage) self.formLayout_9.setObjectName(_fromUtf8("formLayout_9")) self.iirendofHpfStopBandLabel = QtGui.QLabel(self.iirhpfPage) self.iirendofHpfStopBandLabel.setObjectName(_fromUtf8("iirendofHpfStopBandLabel")) self.formLayout_9.setWidget(0, QtGui.QFormLayout.LabelRole, self.iirendofHpfStopBandLabel) self.iirendofHpfStopBandEdit = QtGui.QLineEdit(self.iirhpfPage) self.iirendofHpfStopBandEdit.setObjectName(_fromUtf8("iirendofHpfStopBandEdit")) self.formLayout_9.setWidget(0, QtGui.QFormLayout.FieldRole, self.iirendofHpfStopBandEdit) self.iirstartofHpfPassBandLabel = QtGui.QLabel(self.iirhpfPage) self.iirstartofHpfPassBandLabel.setObjectName(_fromUtf8("iirstartofHpfPassBandLabel")) self.formLayout_9.setWidget(1, QtGui.QFormLayout.LabelRole, self.iirstartofHpfPassBandLabel) self.iirstartofHpfPassBandEdit = QtGui.QLineEdit(self.iirhpfPage) self.iirstartofHpfPassBandEdit.setObjectName(_fromUtf8("iirstartofHpfPassBandEdit")) self.formLayout_9.setWidget(1, QtGui.QFormLayout.FieldRole, self.iirstartofHpfPassBandEdit) self.iirHpfPassBandAttenLabel = QtGui.QLabel(self.iirhpfPage) self.iirHpfPassBandAttenLabel.setObjectName(_fromUtf8("iirHpfPassBandAttenLabel")) self.formLayout_9.setWidget(2, QtGui.QFormLayout.LabelRole, self.iirHpfPassBandAttenLabel) self.iirHpfPassBandAttenEdit = QtGui.QLineEdit(self.iirhpfPage) self.iirHpfPassBandAttenEdit.setObjectName(_fromUtf8("iirHpfPassBandAttenEdit")) self.formLayout_9.setWidget(2, QtGui.QFormLayout.FieldRole, self.iirHpfPassBandAttenEdit) self.iirHpfStopBandRippleLabel = QtGui.QLabel(self.iirhpfPage) self.iirHpfStopBandRippleLabel.setObjectName(_fromUtf8("iirHpfStopBandRippleLabel")) self.formLayout_9.setWidget(3, QtGui.QFormLayout.LabelRole, self.iirHpfStopBandRippleLabel) self.iirHpfStopBandRippleEdit = QtGui.QLineEdit(self.iirhpfPage) self.iirHpfStopBandRippleEdit.setObjectName(_fromUtf8("iirHpfStopBandRippleEdit")) self.formLayout_9.setWidget(3, QtGui.QFormLayout.FieldRole, self.iirHpfStopBandRippleEdit) self.filterTypeWidget.addWidget(self.iirhpfPage) self.iirbpfPage = QtGui.QWidget() self.iirbpfPage.setObjectName(_fromUtf8("iirbpfPage")) self.formLayout_10 = QtGui.QFormLayout(self.iirbpfPage) self.formLayout_10.setObjectName(_fromUtf8("formLayout_10")) self.iirendofBpfStopBandLabel1 = QtGui.QLabel(self.iirbpfPage) self.iirendofBpfStopBandLabel1.setObjectName(_fromUtf8("iirendofBpfStopBandLabel1")) self.formLayout_10.setWidget(0, QtGui.QFormLayout.LabelRole, self.iirendofBpfStopBandLabel1) self.iirendofBpfStopBandEdit1 = QtGui.QLineEdit(self.iirbpfPage) self.iirendofBpfStopBandEdit1.setObjectName(_fromUtf8("iirendofBpfStopBandEdit1")) self.formLayout_10.setWidget(0, QtGui.QFormLayout.FieldRole, self.iirendofBpfStopBandEdit1) self.iirstartofBpfPassBandLabel = QtGui.QLabel(self.iirbpfPage) self.iirstartofBpfPassBandLabel.setObjectName(_fromUtf8("iirstartofBpfPassBandLabel")) self.formLayout_10.setWidget(1, QtGui.QFormLayout.LabelRole, self.iirstartofBpfPassBandLabel) self.iirstartofBpfPassBandEdit = QtGui.QLineEdit(self.iirbpfPage) self.iirstartofBpfPassBandEdit.setObjectName(_fromUtf8("iirstartofBpfPassBandEdit")) self.formLayout_10.setWidget(1, QtGui.QFormLayout.FieldRole, self.iirstartofBpfPassBandEdit) self.iirendofBpfPassBandLabel = QtGui.QLabel(self.iirbpfPage) self.iirendofBpfPassBandLabel.setObjectName(_fromUtf8("iirendofBpfPassBandLabel")) self.formLayout_10.setWidget(2, QtGui.QFormLayout.LabelRole, self.iirendofBpfPassBandLabel) self.iirendofBpfPassBandEdit = QtGui.QLineEdit(self.iirbpfPage) self.iirendofBpfPassBandEdit.setObjectName(_fromUtf8("iirendofBpfPassBandEdit")) self.formLayout_10.setWidget(2, QtGui.QFormLayout.FieldRole, self.iirendofBpfPassBandEdit) self.iirstartofBpfStopBandLabel2 = QtGui.QLabel(self.iirbpfPage) self.iirstartofBpfStopBandLabel2.setObjectName(_fromUtf8("iirstartofBpfStopBandLabel2")) self.formLayout_10.setWidget(3, QtGui.QFormLayout.LabelRole, self.iirstartofBpfStopBandLabel2) self.iirstartofBpfStopBandEdit2 = QtGui.QLineEdit(self.iirbpfPage) self.iirstartofBpfStopBandEdit2.setObjectName(_fromUtf8("iirstartofBpfStopBandEdit2")) self.formLayout_10.setWidget(3, QtGui.QFormLayout.FieldRole, self.iirstartofBpfStopBandEdit2) self.iirBpfPassBandAttenLabel = QtGui.QLabel(self.iirbpfPage) self.iirBpfPassBandAttenLabel.setObjectName(_fromUtf8("iirBpfPassBandAttenLabel")) self.formLayout_10.setWidget(4, QtGui.QFormLayout.LabelRole, self.iirBpfPassBandAttenLabel) self.iirBpfPassBandAttenEdit = QtGui.QLineEdit(self.iirbpfPage) self.iirBpfPassBandAttenEdit.setObjectName(_fromUtf8("iirBpfPassBandAttenEdit")) self.formLayout_10.setWidget(4, QtGui.QFormLayout.FieldRole, self.iirBpfPassBandAttenEdit) self.iirBpfStopBandRippleLabel = QtGui.QLabel(self.iirbpfPage) self.iirBpfStopBandRippleLabel.setObjectName(_fromUtf8("iirBpfStopBandRippleLabel")) self.formLayout_10.setWidget(5, QtGui.QFormLayout.LabelRole, self.iirBpfStopBandRippleLabel) self.iirBpfStopBandRippleEdit = QtGui.QLineEdit(self.iirbpfPage) self.iirBpfStopBandRippleEdit.setObjectName(_fromUtf8("iirBpfStopBandRippleEdit")) self.formLayout_10.setWidget(5, QtGui.QFormLayout.FieldRole, self.iirBpfStopBandRippleEdit) self.filterTypeWidget.addWidget(self.iirbpfPage) self.iirbsfPage = QtGui.QWidget() self.iirbsfPage.setObjectName(_fromUtf8("iirbsfPage")) self.formLayout_11 = QtGui.QFormLayout(self.iirbsfPage) self.formLayout_11.setObjectName(_fromUtf8("formLayout_11")) self.iirendofBsfPassBandLabel1 = QtGui.QLabel(self.iirbsfPage) self.iirendofBsfPassBandLabel1.setObjectName(_fromUtf8("iirendofBsfPassBandLabel1")) self.formLayout_11.setWidget(0, QtGui.QFormLayout.LabelRole, self.iirendofBsfPassBandLabel1) self.iirendofBsfPassBandEdit1 = QtGui.QLineEdit(self.iirbsfPage) self.iirendofBsfPassBandEdit1.setObjectName(_fromUtf8("iirendofBsfPassBandEdit1")) self.formLayout_11.setWidget(0, QtGui.QFormLayout.FieldRole, self.iirendofBsfPassBandEdit1) self.iirstartofBsfStopBandLabel = QtGui.QLabel(self.iirbsfPage) self.iirstartofBsfStopBandLabel.setObjectName(_fromUtf8("iirstartofBsfStopBandLabel")) self.formLayout_11.setWidget(1, QtGui.QFormLayout.LabelRole, self.iirstartofBsfStopBandLabel) self.iirstartofBsfStopBandEdit = QtGui.QLineEdit(self.iirbsfPage) self.iirstartofBsfStopBandEdit.setObjectName(_fromUtf8("iirstartofBsfStopBandEdit")) self.formLayout_11.setWidget(1, QtGui.QFormLayout.FieldRole, self.iirstartofBsfStopBandEdit) self.iirendofBsfStopBandLabel = QtGui.QLabel(self.iirbsfPage) self.iirendofBsfStopBandLabel.setObjectName(_fromUtf8("iirendofBsfStopBandLabel")) self.formLayout_11.setWidget(2, QtGui.QFormLayout.LabelRole, self.iirendofBsfStopBandLabel) self.iirendofBsfStopBandEdit = QtGui.QLineEdit(self.iirbsfPage) self.iirendofBsfStopBandEdit.setObjectName(_fromUtf8("iirendofBsfStopBandEdit")) self.formLayout_11.setWidget(2, QtGui.QFormLayout.FieldRole, self.iirendofBsfStopBandEdit) self.iirstartofBsfPassBandLabel2 = QtGui.QLabel(self.iirbsfPage) self.iirstartofBsfPassBandLabel2.setObjectName(_fromUtf8("iirstartofBsfPassBandLabel2")) self.formLayout_11.setWidget(3, QtGui.QFormLayout.LabelRole, self.iirstartofBsfPassBandLabel2) self.iirstartofBsfPassBandEdit2 = QtGui.QLineEdit(self.iirbsfPage) self.iirstartofBsfPassBandEdit2.setObjectName(_fromUtf8("iirstartofBsfPassBandEdit2")) self.formLayout_11.setWidget(3, QtGui.QFormLayout.FieldRole, self.iirstartofBsfPassBandEdit2) self.iirBsfPassBandAttenLabel = QtGui.QLabel(self.iirbsfPage) self.iirBsfPassBandAttenLabel.setObjectName(_fromUtf8("iirBsfPassBandAttenLabel")) self.formLayout_11.setWidget(4, QtGui.QFormLayout.LabelRole, self.iirBsfPassBandAttenLabel) self.iirBsfPassBandAttenEdit = QtGui.QLineEdit(self.iirbsfPage) self.iirBsfPassBandAttenEdit.setObjectName(_fromUtf8("iirBsfPassBandAttenEdit")) self.formLayout_11.setWidget(4, QtGui.QFormLayout.FieldRole, self.iirBsfPassBandAttenEdit) self.iirBsfStopBandRippleLabel = QtGui.QLabel(self.iirbsfPage) self.iirBsfStopBandRippleLabel.setObjectName(_fromUtf8("iirBsfStopBandRippleLabel")) self.formLayout_11.setWidget(5, QtGui.QFormLayout.LabelRole, self.iirBsfStopBandRippleLabel) self.iirBsfStopBandRippleEdit = QtGui.QLineEdit(self.iirbsfPage) self.iirBsfStopBandRippleEdit.setObjectName(_fromUtf8("iirBsfStopBandRippleEdit")) self.formLayout_11.setWidget(5, QtGui.QFormLayout.FieldRole, self.iirBsfStopBandRippleEdit) self.filterTypeWidget.addWidget(self.iirbsfPage) self.iirbesselPage = QtGui.QWidget() self.iirbesselPage.setObjectName(_fromUtf8("iirbesselPage")) self.formLayout_13 = QtGui.QFormLayout(self.iirbesselPage) self.formLayout_13.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow) self.formLayout_13.setObjectName(_fromUtf8("formLayout_13")) self.besselordLabel = QtGui.QLabel(self.iirbesselPage) self.besselordLabel.setObjectName(_fromUtf8("besselordLabel")) self.formLayout_13.setWidget(0, QtGui.QFormLayout.LabelRole, self.besselordLabel) self.besselordEdit = QtGui.QLineEdit(self.iirbesselPage) self.besselordEdit.setObjectName(_fromUtf8("besselordEdit")) self.formLayout_13.setWidget(0, QtGui.QFormLayout.FieldRole, self.besselordEdit) self.iirbesselcritLabel1 = QtGui.QLabel(self.iirbesselPage) self.iirbesselcritLabel1.setObjectName(_fromUtf8("iirbesselcritLabel1")) self.formLayout_13.setWidget(1, QtGui.QFormLayout.LabelRole, self.iirbesselcritLabel1) self.iirbesselcritEdit1 = QtGui.QLineEdit(self.iirbesselPage) self.iirbesselcritEdit1.setObjectName(_fromUtf8("iirbesselcritEdit1")) self.formLayout_13.setWidget(1, QtGui.QFormLayout.FieldRole, self.iirbesselcritEdit1) self.iirbesselcritEdit2 = QtGui.QLineEdit(self.iirbesselPage) self.iirbesselcritEdit2.setObjectName(_fromUtf8("iirbesselcritEdit2")) self.formLayout_13.setWidget(2, QtGui.QFormLayout.FieldRole, self.iirbesselcritEdit2) self.iirbesselcritLabel2 = QtGui.QLabel(self.iirbesselPage) self.iirbesselcritLabel2.setObjectName(_fromUtf8("iirbesselcritLabel2")) self.formLayout_13.setWidget(2, QtGui.QFormLayout.LabelRole, self.iirbesselcritLabel2) self.filterTypeWidget.addWidget(self.iirbesselPage) self.firhbPage = QtGui.QWidget() self.firhbPage.setObjectName(_fromUtf8("firhbPage")) self.formLayout_14 = QtGui.QFormLayout(self.firhbPage) self.formLayout_14.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow) self.formLayout_14.setObjectName(_fromUtf8("formLayout_14")) self.firhbordLabel = QtGui.QLabel(self.firhbPage) self.firhbordLabel.setObjectName(_fromUtf8("firhbordLabel")) self.formLayout_14.setWidget(0, QtGui.QFormLayout.LabelRole, self.firhbordLabel) self.firhbordEdit = QtGui.QLineEdit(self.firhbPage) self.firhbordEdit.setObjectName(_fromUtf8("firhbordEdit")) self.formLayout_14.setWidget(0, QtGui.QFormLayout.FieldRole, self.firhbordEdit) self.firhbtrEditLabel2 = QtGui.QLabel(self.firhbPage) self.firhbtrEditLabel2.setObjectName(_fromUtf8("firhbtrEditLabel2")) self.formLayout_14.setWidget(2, QtGui.QFormLayout.LabelRole, self.firhbtrEditLabel2) self.firhbtrEdit = QtGui.QLineEdit(self.firhbPage) self.firhbtrEdit.setObjectName(_fromUtf8("firhbtrEdit")) self.formLayout_14.setWidget(2, QtGui.QFormLayout.FieldRole, self.firhbtrEdit) self.filterTypeWidget.addWidget(self.firhbPage) self.verticalLayout.addWidget(self.filterTypeWidget) self.filterPropsBox = QtGui.QGroupBox(self.filterFrame) self.filterPropsBox.setObjectName(_fromUtf8("filterPropsBox")) self.formLayout_8 = QtGui.QFormLayout(self.filterPropsBox) self.formLayout_8.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow) self.formLayout_8.setObjectName(_fromUtf8("formLayout_8")) self.nTapsLabel = QtGui.QLabel(self.filterPropsBox) self.nTapsLabel.setMinimumSize(QtCore.QSize(150, 0)) self.nTapsLabel.setObjectName(_fromUtf8("nTapsLabel")) self.formLayout_8.setWidget(1, QtGui.QFormLayout.LabelRole, self.nTapsLabel) self.nTapsEdit = QtGui.QLabel(self.filterPropsBox) self.nTapsEdit.setMaximumSize(QtCore.QSize(100, 16777215)) self.nTapsEdit.setFrameShape(QtGui.QFrame.Box) self.nTapsEdit.setFrameShadow(QtGui.QFrame.Raised) self.nTapsEdit.setText(_fromUtf8("")) self.nTapsEdit.setObjectName(_fromUtf8("nTapsEdit")) self.formLayout_8.setWidget(1, QtGui.QFormLayout.FieldRole, self.nTapsEdit) self.verticalLayout.addWidget(self.filterPropsBox) self.designButton = QtGui.QPushButton(self.filterFrame) self.designButton.setMinimumSize(QtCore.QSize(0, 0)) self.designButton.setMaximumSize(QtCore.QSize(200, 16777215)) self.designButton.setAutoDefault(True) self.designButton.setDefault(True) self.designButton.setObjectName(_fromUtf8("designButton")) self.verticalLayout.addWidget(self.designButton) self.gridLayout.addWidget(self.filterFrame, 0, 0, 1, 1) MainWindow.setCentralWidget(self.centralwidget) self.menubar = QtGui.QMenuBar(MainWindow) self.menubar.setGeometry(QtCore.QRect(0, 0, 1128, 19)) self.menubar.setObjectName(_fromUtf8("menubar")) self.menu_File = QtGui.QMenu(self.menubar) self.menu_File.setObjectName(_fromUtf8("menu_File")) self.menu_Analysis = QtGui.QMenu(self.menubar) self.menu_Analysis.setObjectName(_fromUtf8("menu_Analysis")) self.menuWidgets = QtGui.QMenu(self.menubar) self.menuWidgets.setGeometry(QtCore.QRect(408, 108, 129, 86)) self.menuWidgets.setObjectName(_fromUtf8("menuWidgets")) MainWindow.setMenuBar(self.menubar) self.statusbar = QtGui.QStatusBar(MainWindow) self.statusbar.setObjectName(_fromUtf8("statusbar")) MainWindow.setStatusBar(self.statusbar) self.action_exit = QtGui.QAction(MainWindow) self.action_exit.setObjectName(_fromUtf8("action_exit")) self.action_save = QtGui.QAction(MainWindow) self.action_save.setObjectName(_fromUtf8("action_save")) self.action_open = QtGui.QAction(MainWindow) self.action_open.setObjectName(_fromUtf8("action_open")) self.actionMagnitude_Response = QtGui.QAction(MainWindow) self.actionMagnitude_Response.setCheckable(True) self.actionMagnitude_Response.setChecked(True) self.actionMagnitude_Response.setObjectName(_fromUtf8("actionMagnitude_Response")) self.actionPhase_Respone = QtGui.QAction(MainWindow) self.actionPhase_Respone.setCheckable(True) self.actionPhase_Respone.setChecked(True) self.actionPhase_Respone.setObjectName(_fromUtf8("actionPhase_Respone")) self.actionGroup_Delay = QtGui.QAction(MainWindow) self.actionGroup_Delay.setCheckable(True) self.actionGroup_Delay.setChecked(True) self.actionGroup_Delay.setObjectName(_fromUtf8("actionGroup_Delay")) self.actionPhase_Delay = QtGui.QAction(MainWindow) self.actionPhase_Delay.setCheckable(True) self.actionPhase_Delay.setChecked(True) self.actionPhase_Delay.setObjectName(_fromUtf8("actionPhase_Delay")) self.actionImpulse_Response = QtGui.QAction(MainWindow) self.actionImpulse_Response.setCheckable(True) self.actionImpulse_Response.setChecked(True) self.actionImpulse_Response.setObjectName(_fromUtf8("actionImpulse_Response")) self.actionStep_Response = QtGui.QAction(MainWindow) self.actionStep_Response.setCheckable(True) self.actionStep_Response.setChecked(True) self.actionStep_Response.setObjectName(_fromUtf8("actionStep_Response")) self.actionPole_Zero_Plot = QtGui.QAction(MainWindow) self.actionPole_Zero_Plot.setObjectName(_fromUtf8("actionPole_Zero_Plot")) self.actionGrid = QtGui.QAction(MainWindow) self.actionGrid.setObjectName(_fromUtf8("actionGrid")) self.actionPole_Zero_Plot_2 = QtGui.QAction(MainWindow) self.actionPole_Zero_Plot_2.setCheckable(True) self.actionPole_Zero_Plot_2.setChecked(True) self.actionPole_Zero_Plot_2.setObjectName(_fromUtf8("actionPole_Zero_Plot_2")) self.actionIdeal_Band = QtGui.QAction(MainWindow) self.actionIdeal_Band.setCheckable(True) self.actionIdeal_Band.setChecked(False) self.actionIdeal_Band.setObjectName(_fromUtf8("actionIdeal_Band")) self.actionGrid_2 = QtGui.QAction(MainWindow) self.actionGrid_2.setCheckable(True) self.actionGrid_2.setChecked(False) self.actionGrid_2.setObjectName(_fromUtf8("actionGrid_2")) self.actionGrid_3 = QtGui.QAction(MainWindow) self.actionGrid_3.setObjectName(_fromUtf8("actionGrid_3")) self.actionTabbed = QtGui.QAction(MainWindow) self.actionTabbed.setCheckable(True) self.actionTabbed.setObjectName(_fromUtf8("actionTabbed")) self.actionOverlay = QtGui.QAction(MainWindow) self.actionOverlay.setCheckable(True) self.actionOverlay.setObjectName(_fromUtf8("actionOverlay")) self.actionResponse_widget = QtGui.QAction(MainWindow) self.actionResponse_widget.setCheckable(True) self.actionResponse_widget.setChecked(True) self.actionResponse_widget.setObjectName(_fromUtf8("actionResponse_widget")) self.actionSpec_widget = QtGui.QAction(MainWindow) self.actionSpec_widget.setCheckable(True) self.actionSpec_widget.setChecked(True) self.actionSpec_widget.setObjectName(_fromUtf8("actionSpec_widget")) self.actionQuick_access = QtGui.QAction(MainWindow) self.actionQuick_access.setCheckable(True) self.actionQuick_access.setChecked(True) self.actionQuick_access.setObjectName(_fromUtf8("actionQuick_access")) self.actionFilter_Coefficients = QtGui.QAction(MainWindow) self.actionFilter_Coefficients.setCheckable(True) self.actionFilter_Coefficients.setChecked(True) self.actionFilter_Coefficients.setObjectName(_fromUtf8("actionFilter_Coefficients")) self.actionDesign_widget = QtGui.QAction(MainWindow) self.actionDesign_widget.setCheckable(True) self.actionDesign_widget.setChecked(True) self.actionDesign_widget.setObjectName(_fromUtf8("actionDesign_widget")) self.actionOverlay_2 = QtGui.QAction(MainWindow) self.actionOverlay_2.setObjectName(_fromUtf8("actionOverlay_2")) self.actionGridview = QtGui.QAction(MainWindow) self.actionGridview.setCheckable(True) self.actionGridview.setObjectName(_fromUtf8("actionGridview")) self.actionDesign_widget_2 = QtGui.QAction(MainWindow) self.actionDesign_widget_2.setCheckable(True) self.actionDesign_widget_2.setObjectName(_fromUtf8("actionDesign_widget_2")) self.actionQuick_access_2 = QtGui.QAction(MainWindow) self.actionQuick_access_2.setObjectName(_fromUtf8("actionQuick_access_2")) self.actionSpec_widget_2 = QtGui.QAction(MainWindow) self.actionSpec_widget_2.setObjectName(_fromUtf8("actionSpec_widget_2")) self.actionResponse_widget_2 = QtGui.QAction(MainWindow) self.actionResponse_widget_2.setObjectName(_fromUtf8("actionResponse_widget_2")) self.actionDesign_Widget = QtGui.QAction(MainWindow) self.actionDesign_Widget.setCheckable(True) self.actionDesign_Widget.setChecked(True) self.actionDesign_Widget.setObjectName(_fromUtf8("actionDesign_Widget")) self.actionQuick_Access = QtGui.QAction(MainWindow) self.actionQuick_Access.setCheckable(True) self.actionQuick_Access.setChecked(True) self.actionQuick_Access.setObjectName(_fromUtf8("actionQuick_Access")) self.actionSpec_Widget = QtGui.QAction(MainWindow) self.actionSpec_Widget.setCheckable(True) self.actionSpec_Widget.setChecked(True) self.actionSpec_Widget.setObjectName(_fromUtf8("actionSpec_Widget")) self.actionResponse_Widget = QtGui.QAction(MainWindow) self.actionResponse_Widget.setCheckable(True) self.actionResponse_Widget.setChecked(True) self.actionResponse_Widget.setObjectName(_fromUtf8("actionResponse_Widget")) self.actionTabview_2 = QtGui.QAction(MainWindow) self.actionTabview_2.setCheckable(True) self.actionTabview_2.setChecked(True) self.actionTabview_2.setObjectName(_fromUtf8("actionTabview_2")) self.actionPlot_select = QtGui.QAction(MainWindow) self.actionPlot_select.setCheckable(True) self.actionPlot_select.setChecked(True) self.actionPlot_select.setObjectName(_fromUtf8("actionPlot_select")) self.actionBand_Diagram = QtGui.QAction(MainWindow) self.actionBand_Diagram.setCheckable(True) self.actionBand_Diagram.setChecked(True) self.actionBand_Diagram.setObjectName(_fromUtf8("actionBand_Diagram")) self.actionCheck = QtGui.QAction(MainWindow) self.actionCheck.setObjectName(_fromUtf8("actionCheck")) self.actionPlot_FFT_points = QtGui.QAction(MainWindow) self.actionPlot_FFT_points.setObjectName(_fromUtf8("actionPlot_FFT_points")) self.menu_File.addAction(self.action_open) self.menu_File.addAction(self.action_save) self.menu_File.addAction(self.action_exit) self.menu_Analysis.addSeparator() self.menu_Analysis.addAction(self.actionMagnitude_Response) self.menu_Analysis.addAction(self.actionPhase_Respone) self.menu_Analysis.addAction(self.actionGroup_Delay) self.menu_Analysis.addAction(self.actionPhase_Delay) self.menu_Analysis.addAction(self.actionImpulse_Response) self.menu_Analysis.addAction(self.actionStep_Response) self.menu_Analysis.addAction(self.actionGrid_2) self.menu_Analysis.addAction(self.actionFilter_Coefficients) self.menu_Analysis.addAction(self.actionIdeal_Band) self.menu_Analysis.addSeparator() self.menu_Analysis.addAction(self.actionPole_Zero_Plot_2) self.menu_Analysis.addAction(self.actionBand_Diagram) self.menu_Analysis.addSeparator() self.menu_Analysis.addAction(self.actionDesign_Widget) self.menu_Analysis.addAction(self.actionQuick_Access) self.menu_Analysis.addAction(self.actionSpec_Widget) self.menu_Analysis.addAction(self.actionResponse_Widget) self.menuWidgets.addAction(self.actionGridview) self.menuWidgets.addAction(self.actionPlot_select) self.menubar.addAction(self.menu_File.menuAction()) self.menubar.addAction(self.menu_Analysis.menuAction()) self.menubar.addAction(self.menuWidgets.menuAction()) self.retranslateUi(MainWindow) self.stackedWindows.setCurrentIndex(0) self.tabGroup.setCurrentIndex(0) self.filterspecView.setCurrentIndex(1) self.mfreqTabgroup.setCurrentIndex(0) self.mfilterspecView.setCurrentIndex(1) self.filterTypeWidget.setCurrentIndex(11) QtCore.QObject.connect(self.action_exit, QtCore.SIGNAL(_fromUtf8("activated()")), MainWindow.close) QtCore.QMetaObject.connectSlotsByName(MainWindow) MainWindow.setTabOrder(self.filterTypeComboBox, self.filterDesignTypeComboBox) MainWindow.setTabOrder(self.filterDesignTypeComboBox, self.endofLpfPassBandEdit) MainWindow.setTabOrder(self.endofLpfPassBandEdit, self.startofLpfStopBandEdit) MainWindow.setTabOrder(self.startofLpfStopBandEdit, self.lpfStopBandAttenEdit) MainWindow.setTabOrder(self.lpfStopBandAttenEdit, self.lpfPassBandRippleEdit) MainWindow.setTabOrder(self.lpfPassBandRippleEdit, self.startofBpfPassBandEdit) MainWindow.setTabOrder(self.startofBpfPassBandEdit, self.endofBpfPassBandEdit) MainWindow.setTabOrder(self.endofBpfPassBandEdit, self.bpfTransitionEdit) MainWindow.setTabOrder(self.bpfTransitionEdit, self.bpfStopBandAttenEdit) MainWindow.setTabOrder(self.bpfStopBandAttenEdit, self.bpfPassBandRippleEdit) MainWindow.setTabOrder(self.bpfPassBandRippleEdit, self.startofBnfStopBandEdit) MainWindow.setTabOrder(self.startofBnfStopBandEdit, self.endofBnfStopBandEdit) MainWindow.setTabOrder(self.endofBnfStopBandEdit, self.bnfTransitionEdit) MainWindow.setTabOrder(self.bnfTransitionEdit, self.bnfStopBandAttenEdit) MainWindow.setTabOrder(self.bnfStopBandAttenEdit, self.bnfPassBandRippleEdit) MainWindow.setTabOrder(self.bnfPassBandRippleEdit, self.endofHpfStopBandEdit) MainWindow.setTabOrder(self.endofHpfStopBandEdit, self.startofHpfPassBandEdit) MainWindow.setTabOrder(self.startofHpfPassBandEdit, self.hpfStopBandAttenEdit) MainWindow.setTabOrder(self.hpfStopBandAttenEdit, self.hpfPassBandRippleEdit) MainWindow.setTabOrder(self.hpfPassBandRippleEdit, self.rrcSymbolRateEdit) MainWindow.setTabOrder(self.rrcSymbolRateEdit, self.rrcAlphaEdit) MainWindow.setTabOrder(self.rrcAlphaEdit, self.rrcNumTapsEdit) MainWindow.setTabOrder(self.rrcNumTapsEdit, self.gausSymbolRateEdit) MainWindow.setTabOrder(self.gausSymbolRateEdit, self.gausBTEdit) MainWindow.setTabOrder(self.gausBTEdit, self.gausNumTapsEdit) MainWindow.setTabOrder(self.gausNumTapsEdit, self.designButton) def retranslateUi(self, MainWindow): MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "GNU Radio Filter Design Tool", None, QtGui.QApplication.UnicodeUTF8)) self.tabGroup.setTabText(self.tabGroup.indexOf(self.freqTab), QtGui.QApplication.translate("MainWindow", "Magnitude Response", None, QtGui.QApplication.UnicodeUTF8)) self.tabGroup.setTabText(self.tabGroup.indexOf(self.timeTab), QtGui.QApplication.translate("MainWindow", "Filter Taps", None, QtGui.QApplication.UnicodeUTF8)) self.tabGroup.setTabText(self.tabGroup.indexOf(self.phaseTab), QtGui.QApplication.translate("MainWindow", "Phase Response", None, QtGui.QApplication.UnicodeUTF8)) self.tabGroup.setTabText(self.tabGroup.indexOf(self.groupTab), QtGui.QApplication.translate("MainWindow", "Group Delay", None, QtGui.QApplication.UnicodeUTF8)) self.tabGroup.setTabText(self.tabGroup.indexOf(self.fcTab), QtGui.QApplication.translate("MainWindow", "Filter Coefficients", None, QtGui.QApplication.UnicodeUTF8)) self.tabGroup.setTabText(self.tabGroup.indexOf(self.impresTab), QtGui.QApplication.translate("MainWindow", "Impulse Response", None, QtGui.QApplication.UnicodeUTF8)) self.tabGroup.setTabText(self.tabGroup.indexOf(self.stepresTab), QtGui.QApplication.translate("MainWindow", "Step Response", None, QtGui.QApplication.UnicodeUTF8)) self.tabGroup.setTabText(self.tabGroup.indexOf(self.pdelayTab), QtGui.QApplication.translate("MainWindow", "Phase Delay", None, QtGui.QApplication.UnicodeUTF8)) self.filterspecView.setTabText(self.filterspecView.indexOf(self.bandDiagram), QtGui.QApplication.translate("MainWindow", "Band Diagram", None, QtGui.QApplication.UnicodeUTF8)) self.addzeroPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Add zero", None, QtGui.QApplication.UnicodeUTF8)) self.addzeroPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8)) self.addpolePush.setToolTip(QtGui.QApplication.translate("MainWindow", "Add pole", None, QtGui.QApplication.UnicodeUTF8)) self.addpolePush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8)) self.delPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Delete pole/zero", None, QtGui.QApplication.UnicodeUTF8)) self.delPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8)) self.conjPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Conjugate", None, QtGui.QApplication.UnicodeUTF8)) self.conjPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8)) self.filterspecView.setTabText(self.filterspecView.indexOf(self.poleZero), QtGui.QApplication.translate("MainWindow", "Pole-Zero Plot", None, QtGui.QApplication.UnicodeUTF8)) self.responseBox.setTitle(QtGui.QApplication.translate("MainWindow", "Filter Responses", None, QtGui.QApplication.UnicodeUTF8)) self.checkMagres.setText(QtGui.QApplication.translate("MainWindow", "Magnitude Response", None, QtGui.QApplication.UnicodeUTF8)) self.checkPhase.setText(QtGui.QApplication.translate("MainWindow", "Phase Response", None, QtGui.QApplication.UnicodeUTF8)) self.checkGdelay.setText(QtGui.QApplication.translate("MainWindow", "Group Delay", None, QtGui.QApplication.UnicodeUTF8)) self.checkPdelay.setText(QtGui.QApplication.translate("MainWindow", "Phase Delay", None, QtGui.QApplication.UnicodeUTF8)) self.checkImpulse.setText(QtGui.QApplication.translate("MainWindow", "Impulse Response", None, QtGui.QApplication.UnicodeUTF8)) self.checkStep.setText(QtGui.QApplication.translate("MainWindow", "Step Response", None, QtGui.QApplication.UnicodeUTF8)) self.checkGrid.setText(QtGui.QApplication.translate("MainWindow", "Grid", None, QtGui.QApplication.UnicodeUTF8)) self.checkFcoeff.setText(QtGui.QApplication.translate("MainWindow", "Filter Coefficients", None, QtGui.QApplication.UnicodeUTF8)) self.checkKeepcur.setText(QtGui.QApplication.translate("MainWindow", "Buffer current plots", None, QtGui.QApplication.UnicodeUTF8)) self.groupSpecs.setTitle(QtGui.QApplication.translate("MainWindow", "Filter Specs", None, QtGui.QApplication.UnicodeUTF8)) self.checkBand.setText(QtGui.QApplication.translate("MainWindow", "Band Diagram", None, QtGui.QApplication.UnicodeUTF8)) self.checkPzplot.setText(QtGui.QApplication.translate("MainWindow", "Pole-Zero Plot", None, QtGui.QApplication.UnicodeUTF8)) self.sysParamsBox.setTitle(QtGui.QApplication.translate("MainWindow", "Plot Parameter", None, QtGui.QApplication.UnicodeUTF8)) self.nfftLabel.setText(QtGui.QApplication.translate("MainWindow", "Num FFT points", None, QtGui.QApplication.UnicodeUTF8)) self.mfmagPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Magnitude Response", None, QtGui.QApplication.UnicodeUTF8)) self.mfmagPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8)) self.mfphasePush.setToolTip(QtGui.QApplication.translate("MainWindow", "Phase Response", None, QtGui.QApplication.UnicodeUTF8)) self.mfphasePush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8)) self.mfgpdlyPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Group Delay", None, QtGui.QApplication.UnicodeUTF8)) self.mfgpdlyPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8)) self.mfphdlyPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Phase Delay", None, QtGui.QApplication.UnicodeUTF8)) self.mfphdlyPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8)) self.mfoverlayPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Overlay", None, QtGui.QApplication.UnicodeUTF8)) self.mfoverlayPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8)) self.mfreqTabgroup.setTabText(self.mfreqTabgroup.indexOf(self.mfreqTab), QtGui.QApplication.translate("MainWindow", "Frequency Response", None, QtGui.QApplication.UnicodeUTF8)) self.mttapsPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Filter Taps", None, QtGui.QApplication.UnicodeUTF8)) self.mttapsPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8)) self.mtstepPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Step Response", None, QtGui.QApplication.UnicodeUTF8)) self.mtstepPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8)) self.mtimpPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Impulse Response", None, QtGui.QApplication.UnicodeUTF8)) self.mtimpPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8)) self.mtimeTabgroup.setTabText(self.mtimeTabgroup.indexOf(self.mtimeTab), QtGui.QApplication.translate("MainWindow", "Time responses", None, QtGui.QApplication.UnicodeUTF8)) self.mfilterspecView.setTabText(self.mfilterspecView.indexOf(self.mbandDiagram), QtGui.QApplication.translate("MainWindow", "Ideal Band", None, QtGui.QApplication.UnicodeUTF8)) self.maddzeroPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Add zero", None, QtGui.QApplication.UnicodeUTF8)) self.maddzeroPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8)) self.maddpolePush.setToolTip(QtGui.QApplication.translate("MainWindow", "Add pole", None, QtGui.QApplication.UnicodeUTF8)) self.maddpolePush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8)) self.mdelPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Delete pole/zero", None, QtGui.QApplication.UnicodeUTF8)) self.mdelPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8)) self.mconjPush.setToolTip(QtGui.QApplication.translate("MainWindow", "Conjugate", None, QtGui.QApplication.UnicodeUTF8)) self.mconjPush.setText(QtGui.QApplication.translate("MainWindow", "...", None, QtGui.QApplication.UnicodeUTF8)) self.mfilterspecView.setTabText(self.mfilterspecView.indexOf(self.mpoleZero), QtGui.QApplication.translate("MainWindow", "Pole-Zero Plot", None, QtGui.QApplication.UnicodeUTF8)) self.mfilterspecView.setTabText(self.mfilterspecView.indexOf(self.mfcTab), QtGui.QApplication.translate("MainWindow", "Filter Coefficients", None, QtGui.QApplication.UnicodeUTF8)) self.fselectComboBox.setItemText(0, QtGui.QApplication.translate("MainWindow", "FIR", None, QtGui.QApplication.UnicodeUTF8)) self.fselectComboBox.setItemText(1, QtGui.QApplication.translate("MainWindow", "IIR(scipy)", None, QtGui.QApplication.UnicodeUTF8)) self.filterTypeComboBox.setItemText(0, QtGui.QApplication.translate("MainWindow", "Low Pass", None, QtGui.QApplication.UnicodeUTF8)) self.filterTypeComboBox.setItemText(1, QtGui.QApplication.translate("MainWindow", "High Pass", None, QtGui.QApplication.UnicodeUTF8)) self.filterTypeComboBox.setItemText(2, QtGui.QApplication.translate("MainWindow", "Band Pass", None, QtGui.QApplication.UnicodeUTF8)) self.filterTypeComboBox.setItemText(3, QtGui.QApplication.translate("MainWindow", "Complex Band Pass", None, QtGui.QApplication.UnicodeUTF8)) self.filterTypeComboBox.setItemText(4, QtGui.QApplication.translate("MainWindow", "Band Notch", None, QtGui.QApplication.UnicodeUTF8)) self.filterTypeComboBox.setItemText(5, QtGui.QApplication.translate("MainWindow", "Root Raised Cosine", None, QtGui.QApplication.UnicodeUTF8)) self.filterTypeComboBox.setItemText(6, QtGui.QApplication.translate("MainWindow", "Gaussian", None, QtGui.QApplication.UnicodeUTF8)) self.filterTypeComboBox.setItemText(7, QtGui.QApplication.translate("MainWindow", "Half Band", None, QtGui.QApplication.UnicodeUTF8)) self.iirfilterBandComboBox.setItemText(0, QtGui.QApplication.translate("MainWindow", "Low Pass", None, QtGui.QApplication.UnicodeUTF8)) self.iirfilterBandComboBox.setItemText(1, QtGui.QApplication.translate("MainWindow", "Band Pass", None, QtGui.QApplication.UnicodeUTF8)) self.iirfilterBandComboBox.setItemText(2, QtGui.QApplication.translate("MainWindow", "Band Stop", None, QtGui.QApplication.UnicodeUTF8)) self.iirfilterBandComboBox.setItemText(3, QtGui.QApplication.translate("MainWindow", "High Pass", None, QtGui.QApplication.UnicodeUTF8)) self.adComboBox.setItemText(0, QtGui.QApplication.translate("MainWindow", "Digital (normalized 0-1)", None, QtGui.QApplication.UnicodeUTF8)) self.adComboBox.setItemText(1, QtGui.QApplication.translate("MainWindow", "Analog (rad/second)", None, QtGui.QApplication.UnicodeUTF8)) self.filterDesignTypeComboBox.setItemText(0, QtGui.QApplication.translate("MainWindow", "Hamming Window", None, QtGui.QApplication.UnicodeUTF8)) self.filterDesignTypeComboBox.setItemText(1, QtGui.QApplication.translate("MainWindow", "Hann Window", None, QtGui.QApplication.UnicodeUTF8)) self.filterDesignTypeComboBox.setItemText(2, QtGui.QApplication.translate("MainWindow", "Blackman Window", None, QtGui.QApplication.UnicodeUTF8)) self.filterDesignTypeComboBox.setItemText(3, QtGui.QApplication.translate("MainWindow", "Rectangular Window", None, QtGui.QApplication.UnicodeUTF8)) self.filterDesignTypeComboBox.setItemText(4, QtGui.QApplication.translate("MainWindow", "Kaiser Window", None, QtGui.QApplication.UnicodeUTF8)) self.filterDesignTypeComboBox.setItemText(5, QtGui.QApplication.translate("MainWindow", "Blackman-harris Window", None, QtGui.QApplication.UnicodeUTF8)) self.filterDesignTypeComboBox.setItemText(6, QtGui.QApplication.translate("MainWindow", "Equiripple", None, QtGui.QApplication.UnicodeUTF8)) self.iirfilterTypeComboBox.setItemText(0, QtGui.QApplication.translate("MainWindow", "Elliptic", None, QtGui.QApplication.UnicodeUTF8)) self.iirfilterTypeComboBox.setItemText(1, QtGui.QApplication.translate("MainWindow", "Butterworth", None, QtGui.QApplication.UnicodeUTF8)) self.iirfilterTypeComboBox.setItemText(2, QtGui.QApplication.translate("MainWindow", "Chebyshev-1", None, QtGui.QApplication.UnicodeUTF8)) self.iirfilterTypeComboBox.setItemText(3, QtGui.QApplication.translate("MainWindow", "Chebyshev-2", None, QtGui.QApplication.UnicodeUTF8)) self.iirfilterTypeComboBox.setItemText(4, QtGui.QApplication.translate("MainWindow", "Bessel", None, QtGui.QApplication.UnicodeUTF8)) self.sampleRateLabel.setText(QtGui.QApplication.translate("MainWindow", "Sample Rate (sps)", None, QtGui.QApplication.UnicodeUTF8)) self.sampleRateEdit.setText(QtGui.QApplication.translate("MainWindow", "320000", None, QtGui.QApplication.UnicodeUTF8)) self.filterGainLabel.setText(QtGui.QApplication.translate("MainWindow", "Filter Gain", None, QtGui.QApplication.UnicodeUTF8)) self.filterGainEdit.setText(QtGui.QApplication.translate("MainWindow", "2", None, QtGui.QApplication.UnicodeUTF8)) self.endofLpfPassBandLabel.setText(QtGui.QApplication.translate("MainWindow", "End of Pass Band (Hz)", None, QtGui.QApplication.UnicodeUTF8)) self.endofLpfPassBandEdit.setText(QtGui.QApplication.translate("MainWindow", "50000", None, QtGui.QApplication.UnicodeUTF8)) self.startofLpfStopBandLabel.setText(QtGui.QApplication.translate("MainWindow", "Start of Stop Band (Hz)", None, QtGui.QApplication.UnicodeUTF8)) self.startofLpfStopBandEdit.setText(QtGui.QApplication.translate("MainWindow", "60000", None, QtGui.QApplication.UnicodeUTF8)) self.lpfStopBandAttenLabel.setText(QtGui.QApplication.translate("MainWindow", "Stop Band Attenuation (dB)", None, QtGui.QApplication.UnicodeUTF8)) self.lpfStopBandAttenEdit.setText(QtGui.QApplication.translate("MainWindow", "40", None, QtGui.QApplication.UnicodeUTF8)) self.lpfPassBandRippleLabel.setText(QtGui.QApplication.translate("MainWindow", "Pass Band Ripple (dB)", None, QtGui.QApplication.UnicodeUTF8)) self.lpfPassBandRippleEdit.setText(QtGui.QApplication.translate("MainWindow", "1", None, QtGui.QApplication.UnicodeUTF8)) self.startofBpfPassBandLabel.setText(QtGui.QApplication.translate("MainWindow", "Start of Pass Band (Hz)", None, QtGui.QApplication.UnicodeUTF8)) self.startofBpfPassBandEdit.setText(QtGui.QApplication.translate("MainWindow", "50000", None, QtGui.QApplication.UnicodeUTF8)) self.endofBpfPassBandLabel.setText(QtGui.QApplication.translate("MainWindow", "End of Pass Band (Hz)", None, QtGui.QApplication.UnicodeUTF8)) self.endofBpfPassBandEdit.setText(QtGui.QApplication.translate("MainWindow", "80000", None, QtGui.QApplication.UnicodeUTF8)) self.bpfStopBandAttenEdit.setText(QtGui.QApplication.translate("MainWindow", "40", None, QtGui.QApplication.UnicodeUTF8)) self.bpfStopBandAttenLabel.setText(QtGui.QApplication.translate("MainWindow", "Stop Band Attenuation (dB)", None, QtGui.QApplication.UnicodeUTF8)) self.bpfTransitionLabel.setText(QtGui.QApplication.translate("MainWindow", "Transition Width (Hz)", None, QtGui.QApplication.UnicodeUTF8)) self.bpfTransitionEdit.setText(QtGui.QApplication.translate("MainWindow", "10000", None, QtGui.QApplication.UnicodeUTF8)) self.bpfPassBandRippleEdit.setText(QtGui.QApplication.translate("MainWindow", "1", None, QtGui.QApplication.UnicodeUTF8)) self.bpfPassBandRippleLabel.setText(QtGui.QApplication.translate("MainWindow", "Pass Band Ripple (dB)", None, QtGui.QApplication.UnicodeUTF8)) self.startofBnfStopBandLabel.setText(QtGui.QApplication.translate("MainWindow", "Start of Stop Band (Hz)", None, QtGui.QApplication.UnicodeUTF8)) self.startofBnfStopBandEdit.setText(QtGui.QApplication.translate("MainWindow", "50000", None, QtGui.QApplication.UnicodeUTF8)) self.endofBnfStopBandLabel.setText(QtGui.QApplication.translate("MainWindow", "End of Stop Band (Hz)", None, QtGui.QApplication.UnicodeUTF8)) self.endofBnfStopBandEdit.setText(QtGui.QApplication.translate("MainWindow", "80000", None, QtGui.QApplication.UnicodeUTF8)) self.bnfTransitionLabel.setText(QtGui.QApplication.translate("MainWindow", "Transition Width (Hz)", None, QtGui.QApplication.UnicodeUTF8)) self.bnfTransitionEdit.setText(QtGui.QApplication.translate("MainWindow", "10000", None, QtGui.QApplication.UnicodeUTF8)) self.bnfStopBandAttenLabel.setText(QtGui.QApplication.translate("MainWindow", "Stop Band Attenuation (dB)", None, QtGui.QApplication.UnicodeUTF8)) self.bnfStopBandAttenEdit.setText(QtGui.QApplication.translate("MainWindow", "48", None, QtGui.QApplication.UnicodeUTF8)) self.bnfPassBandRippleLabel.setText(QtGui.QApplication.translate("MainWindow", "Pass Band Ripple (dB)", None, QtGui.QApplication.UnicodeUTF8)) self.bnfPassBandRippleEdit.setText(QtGui.QApplication.translate("MainWindow", "1", None, QtGui.QApplication.UnicodeUTF8)) self.endofHpfStopBandLabel.setText(QtGui.QApplication.translate("MainWindow", "End of Stop Band (Hz)", None, QtGui.QApplication.UnicodeUTF8)) self.endofHpfStopBandEdit.setText(QtGui.QApplication.translate("MainWindow", "50000", None, QtGui.QApplication.UnicodeUTF8)) self.startofHpfPassBandLabel.setText(QtGui.QApplication.translate("MainWindow", "Start of Pass Band (Hz)", None, QtGui.QApplication.UnicodeUTF8)) self.startofHpfPassBandEdit.setText(QtGui.QApplication.translate("MainWindow", "55000", None, QtGui.QApplication.UnicodeUTF8)) self.hpfStopBandAttenLabel.setText(QtGui.QApplication.translate("MainWindow", "Stop Band Attenuation (dB)", None, QtGui.QApplication.UnicodeUTF8)) self.hpfStopBandAttenEdit.setText(QtGui.QApplication.translate("MainWindow", "48", None, QtGui.QApplication.UnicodeUTF8)) self.hpfPassBandRippleLabel.setText(QtGui.QApplication.translate("MainWindow", "Pass Band Ripple (dB)", None, QtGui.QApplication.UnicodeUTF8)) self.hpfPassBandRippleEdit.setText(QtGui.QApplication.translate("MainWindow", "1", None, QtGui.QApplication.UnicodeUTF8)) self.rrcSymbolRateLabel.setText(QtGui.QApplication.translate("MainWindow", "Symbol Rate (sps)", None, QtGui.QApplication.UnicodeUTF8)) self.rrcAlphaLabel.setText(QtGui.QApplication.translate("MainWindow", "Roll-off Factor", None, QtGui.QApplication.UnicodeUTF8)) self.rrcNumTapsLabel.setText(QtGui.QApplication.translate("MainWindow", "Number of Taps", None, QtGui.QApplication.UnicodeUTF8)) self.rrcSymbolRateEdit.setText(QtGui.QApplication.translate("MainWindow", "3200", None, QtGui.QApplication.UnicodeUTF8)) self.rrcAlphaEdit.setText(QtGui.QApplication.translate("MainWindow", "15", None, QtGui.QApplication.UnicodeUTF8)) self.rrcNumTapsEdit.setText(QtGui.QApplication.translate("MainWindow", "50", None, QtGui.QApplication.UnicodeUTF8)) self.gausSymbolRateLabel.setText(QtGui.QApplication.translate("MainWindow", "Symbol Rate (sps)", None, QtGui.QApplication.UnicodeUTF8)) self.gausSymbolRateEdit.setText(QtGui.QApplication.translate("MainWindow", "5000", None, QtGui.QApplication.UnicodeUTF8)) self.gausBTLabel.setText(QtGui.QApplication.translate("MainWindow", "Roll-off Factor", None, QtGui.QApplication.UnicodeUTF8)) self.gausBTEdit.setText(QtGui.QApplication.translate("MainWindow", "0.5", None, QtGui.QApplication.UnicodeUTF8)) self.gausNumTapsLabel.setText(QtGui.QApplication.translate("MainWindow", "Number of Taps", None, QtGui.QApplication.UnicodeUTF8)) self.gausNumTapsEdit.setText(QtGui.QApplication.translate("MainWindow", "30", None, QtGui.QApplication.UnicodeUTF8)) self.iirendofLpfPassBandLabel.setText(QtGui.QApplication.translate("MainWindow", "End of Pass Band", None, QtGui.QApplication.UnicodeUTF8)) self.iirendofLpfPassBandEdit.setText(QtGui.QApplication.translate("MainWindow", "0.3", None, QtGui.QApplication.UnicodeUTF8)) self.iirstartofLpfStopBandLabel.setText(QtGui.QApplication.translate("MainWindow", "Start of Stop Band ", None, QtGui.QApplication.UnicodeUTF8)) self.iirstartofLpfStopBandEdit.setText(QtGui.QApplication.translate("MainWindow", "0.5", None, QtGui.QApplication.UnicodeUTF8)) self.iirLpfPassBandAttenLabel.setText(QtGui.QApplication.translate("MainWindow", "Max loss in Pass Band (dB)", None, QtGui.QApplication.UnicodeUTF8)) self.iirLpfPassBandAttenEdit.setText(QtGui.QApplication.translate("MainWindow", "1", None, QtGui.QApplication.UnicodeUTF8)) self.iirLpfStopBandRippleLabel.setText(QtGui.QApplication.translate("MainWindow", "Min atten in Stop Band (dB)", None, QtGui.QApplication.UnicodeUTF8)) self.iirLpfStopBandRippleEdit.setText(QtGui.QApplication.translate("MainWindow", "60", None, QtGui.QApplication.UnicodeUTF8)) self.iirendofHpfStopBandLabel.setText(QtGui.QApplication.translate("MainWindow", "End of Stop Band", None, QtGui.QApplication.UnicodeUTF8)) self.iirendofHpfStopBandEdit.setText(QtGui.QApplication.translate("MainWindow", "0.3", None, QtGui.QApplication.UnicodeUTF8)) self.iirstartofHpfPassBandLabel.setText(QtGui.QApplication.translate("MainWindow", "Start of Pass Band ", None, QtGui.QApplication.UnicodeUTF8)) self.iirstartofHpfPassBandEdit.setText(QtGui.QApplication.translate("MainWindow", "0.5", None, QtGui.QApplication.UnicodeUTF8)) self.iirHpfPassBandAttenLabel.setText(QtGui.QApplication.translate("MainWindow", "Max loss in Pass Band (dB)", None, QtGui.QApplication.UnicodeUTF8)) self.iirHpfPassBandAttenEdit.setText(QtGui.QApplication.translate("MainWindow", "1", None, QtGui.QApplication.UnicodeUTF8)) self.iirHpfStopBandRippleLabel.setText(QtGui.QApplication.translate("MainWindow", "Min atten in Stop Band (dB)", None, QtGui.QApplication.UnicodeUTF8)) self.iirHpfStopBandRippleEdit.setText(QtGui.QApplication.translate("MainWindow", "60", None, QtGui.QApplication.UnicodeUTF8)) self.iirendofBpfStopBandLabel1.setText(QtGui.QApplication.translate("MainWindow", "End of Stop Band-1", None, QtGui.QApplication.UnicodeUTF8)) self.iirendofBpfStopBandEdit1.setText(QtGui.QApplication.translate("MainWindow", "0.2", None, QtGui.QApplication.UnicodeUTF8)) self.iirstartofBpfPassBandLabel.setText(QtGui.QApplication.translate("MainWindow", "Start of Pass Band ", None, QtGui.QApplication.UnicodeUTF8)) self.iirstartofBpfPassBandEdit.setText(QtGui.QApplication.translate("MainWindow", "0.3", None, QtGui.QApplication.UnicodeUTF8)) self.iirendofBpfPassBandLabel.setText(QtGui.QApplication.translate("MainWindow", "End of Pass Band ", None, QtGui.QApplication.UnicodeUTF8)) self.iirendofBpfPassBandEdit.setText(QtGui.QApplication.translate("MainWindow", "0.5", None, QtGui.QApplication.UnicodeUTF8)) self.iirstartofBpfStopBandLabel2.setText(QtGui.QApplication.translate("MainWindow", "Start of Stop Band-2", None, QtGui.QApplication.UnicodeUTF8)) self.iirstartofBpfStopBandEdit2.setText(QtGui.QApplication.translate("MainWindow", "0.6", None, QtGui.QApplication.UnicodeUTF8)) self.iirBpfPassBandAttenLabel.setText(QtGui.QApplication.translate("MainWindow", "Max loss in Pass Band (dB)", None, QtGui.QApplication.UnicodeUTF8)) self.iirBpfPassBandAttenEdit.setText(QtGui.QApplication.translate("MainWindow", "1", None, QtGui.QApplication.UnicodeUTF8)) self.iirBpfStopBandRippleLabel.setText(QtGui.QApplication.translate("MainWindow", "Min atten in Stop Band (dB)", None, QtGui.QApplication.UnicodeUTF8)) self.iirBpfStopBandRippleEdit.setText(QtGui.QApplication.translate("MainWindow", "60", None, QtGui.QApplication.UnicodeUTF8)) self.iirendofBsfPassBandLabel1.setText(QtGui.QApplication.translate("MainWindow", "End of Pass Band-1", None, QtGui.QApplication.UnicodeUTF8)) self.iirendofBsfPassBandEdit1.setText(QtGui.QApplication.translate("MainWindow", "0.2", None, QtGui.QApplication.UnicodeUTF8)) self.iirstartofBsfStopBandLabel.setText(QtGui.QApplication.translate("MainWindow", "Start of Stop Band ", None, QtGui.QApplication.UnicodeUTF8)) self.iirstartofBsfStopBandEdit.setText(QtGui.QApplication.translate("MainWindow", "0.3", None, QtGui.QApplication.UnicodeUTF8)) self.iirendofBsfStopBandLabel.setText(QtGui.QApplication.translate("MainWindow", "End of Stop Band ", None, QtGui.QApplication.UnicodeUTF8)) self.iirendofBsfStopBandEdit.setText(QtGui.QApplication.translate("MainWindow", "0.6", None, QtGui.QApplication.UnicodeUTF8)) self.iirstartofBsfPassBandLabel2.setText(QtGui.QApplication.translate("MainWindow", "Start of Pass Band-2", None, QtGui.QApplication.UnicodeUTF8)) self.iirstartofBsfPassBandEdit2.setText(QtGui.QApplication.translate("MainWindow", "0.7", None, QtGui.QApplication.UnicodeUTF8)) self.iirBsfPassBandAttenLabel.setText(QtGui.QApplication.translate("MainWindow", "Max loss in Pass Band (dB)", None, QtGui.QApplication.UnicodeUTF8)) self.iirBsfPassBandAttenEdit.setText(QtGui.QApplication.translate("MainWindow", "1", None, QtGui.QApplication.UnicodeUTF8)) self.iirBsfStopBandRippleLabel.setText(QtGui.QApplication.translate("MainWindow", "Min atten in Stop Band (dB)", None, QtGui.QApplication.UnicodeUTF8)) self.iirBsfStopBandRippleEdit.setText(QtGui.QApplication.translate("MainWindow", "60", None, QtGui.QApplication.UnicodeUTF8)) self.besselordLabel.setText(QtGui.QApplication.translate("MainWindow", "Filter Order", None, QtGui.QApplication.UnicodeUTF8)) self.besselordEdit.setText(QtGui.QApplication.translate("MainWindow", "10", None, QtGui.QApplication.UnicodeUTF8)) self.iirbesselcritLabel1.setText(QtGui.QApplication.translate("MainWindow", "Critical point-1", None, QtGui.QApplication.UnicodeUTF8)) self.iirbesselcritEdit1.setText(QtGui.QApplication.translate("MainWindow", "0.2", None, QtGui.QApplication.UnicodeUTF8)) self.iirbesselcritEdit2.setText(QtGui.QApplication.translate("MainWindow", "0.5", None, QtGui.QApplication.UnicodeUTF8)) self.iirbesselcritLabel2.setText(QtGui.QApplication.translate("MainWindow", "Critical point-2", None, QtGui.QApplication.UnicodeUTF8)) self.firhbordLabel.setText(QtGui.QApplication.translate("MainWindow", "Filter Order", None, QtGui.QApplication.UnicodeUTF8)) self.firhbordEdit.setText(QtGui.QApplication.translate("MainWindow", "34", None, QtGui.QApplication.UnicodeUTF8)) self.firhbtrEditLabel2.setText(QtGui.QApplication.translate("MainWindow", "Transition width \n" " (from fs/4)", None, QtGui.QApplication.UnicodeUTF8)) self.firhbtrEdit.setText(QtGui.QApplication.translate("MainWindow", "10000", None, QtGui.QApplication.UnicodeUTF8)) self.filterPropsBox.setTitle(QtGui.QApplication.translate("MainWindow", "Filter Properties", None, QtGui.QApplication.UnicodeUTF8)) self.nTapsLabel.setText(QtGui.QApplication.translate("MainWindow", "Number of Taps:", None, QtGui.QApplication.UnicodeUTF8)) self.designButton.setText(QtGui.QApplication.translate("MainWindow", "Design", None, QtGui.QApplication.UnicodeUTF8)) self.menu_File.setTitle(QtGui.QApplication.translate("MainWindow", "&File", None, QtGui.QApplication.UnicodeUTF8)) self.menu_Analysis.setTitle(QtGui.QApplication.translate("MainWindow", "Analysis", None, QtGui.QApplication.UnicodeUTF8)) self.menuWidgets.setTitle(QtGui.QApplication.translate("MainWindow", "View", None, QtGui.QApplication.UnicodeUTF8)) self.action_exit.setText(QtGui.QApplication.translate("MainWindow", "E&xit", None, QtGui.QApplication.UnicodeUTF8)) self.action_save.setText(QtGui.QApplication.translate("MainWindow", "&Save", None, QtGui.QApplication.UnicodeUTF8)) self.action_save.setShortcut(QtGui.QApplication.translate("MainWindow", "Ctrl+S", None, QtGui.QApplication.UnicodeUTF8)) self.action_open.setText(QtGui.QApplication.translate("MainWindow", "&Open", None, QtGui.QApplication.UnicodeUTF8)) self.action_open.setShortcut(QtGui.QApplication.translate("MainWindow", "Ctrl+O", None, QtGui.QApplication.UnicodeUTF8)) self.actionMagnitude_Response.setText(QtGui.QApplication.translate("MainWindow", "Magnitude Response", None, QtGui.QApplication.UnicodeUTF8)) self.actionPhase_Respone.setText(QtGui.QApplication.translate("MainWindow", "Phase Respone", None, QtGui.QApplication.UnicodeUTF8)) self.actionGroup_Delay.setText(QtGui.QApplication.translate("MainWindow", "Group Delay", None, QtGui.QApplication.UnicodeUTF8)) self.actionPhase_Delay.setText(QtGui.QApplication.translate("MainWindow", "Phase Delay", None, QtGui.QApplication.UnicodeUTF8)) self.actionImpulse_Response.setText(QtGui.QApplication.translate("MainWindow", "Impulse Response", None, QtGui.QApplication.UnicodeUTF8)) self.actionStep_Response.setText(QtGui.QApplication.translate("MainWindow", "Step Response", None, QtGui.QApplication.UnicodeUTF8)) self.actionPole_Zero_Plot.setText(QtGui.QApplication.translate("MainWindow", "Pole-Zero Plot", None, QtGui.QApplication.UnicodeUTF8)) self.actionGrid.setText(QtGui.QApplication.translate("MainWindow", "Grid", None, QtGui.QApplication.UnicodeUTF8)) self.actionPole_Zero_Plot_2.setText(QtGui.QApplication.translate("MainWindow", "Pole Zero Plot", None, QtGui.QApplication.UnicodeUTF8)) self.actionIdeal_Band.setText(QtGui.QApplication.translate("MainWindow", "Ideal Band", None, QtGui.QApplication.UnicodeUTF8)) self.actionGrid_2.setText(QtGui.QApplication.translate("MainWindow", "Grid", None, QtGui.QApplication.UnicodeUTF8)) self.actionGrid_3.setText(QtGui.QApplication.translate("MainWindow", "Grid", None, QtGui.QApplication.UnicodeUTF8)) self.actionTabbed.setText(QtGui.QApplication.translate("MainWindow", "Tabbed", None, QtGui.QApplication.UnicodeUTF8)) self.actionOverlay.setText(QtGui.QApplication.translate("MainWindow", "Overlay", None, QtGui.QApplication.UnicodeUTF8)) self.actionResponse_widget.setText(QtGui.QApplication.translate("MainWindow", "Response widget", None, QtGui.QApplication.UnicodeUTF8)) self.actionSpec_widget.setText(QtGui.QApplication.translate("MainWindow", "Spec widget", None, QtGui.QApplication.UnicodeUTF8)) self.actionQuick_access.setText(QtGui.QApplication.translate("MainWindow", "Quick access", None, QtGui.QApplication.UnicodeUTF8)) self.actionFilter_Coefficients.setText(QtGui.QApplication.translate("MainWindow", "Filter Coefficients", None, QtGui.QApplication.UnicodeUTF8)) self.actionDesign_widget.setText(QtGui.QApplication.translate("MainWindow", "Design widget", None, QtGui.QApplication.UnicodeUTF8)) self.actionOverlay_2.setText(QtGui.QApplication.translate("MainWindow", "Overlay", None, QtGui.QApplication.UnicodeUTF8)) self.actionGridview.setText(QtGui.QApplication.translate("MainWindow", "Gridview", None, QtGui.QApplication.UnicodeUTF8)) self.actionDesign_widget_2.setText(QtGui.QApplication.translate("MainWindow", "Design widget", None, QtGui.QApplication.UnicodeUTF8)) self.actionQuick_access_2.setText(QtGui.QApplication.translate("MainWindow", "Quick access", None, QtGui.QApplication.UnicodeUTF8)) self.actionSpec_widget_2.setText(QtGui.QApplication.translate("MainWindow", "Spec widget", None, QtGui.QApplication.UnicodeUTF8)) self.actionResponse_widget_2.setText(QtGui.QApplication.translate("MainWindow", "Response widget", None, QtGui.QApplication.UnicodeUTF8)) self.actionDesign_Widget.setText(QtGui.QApplication.translate("MainWindow", "Design Widget", None, QtGui.QApplication.UnicodeUTF8)) self.actionQuick_Access.setText(QtGui.QApplication.translate("MainWindow", "Quick Access", None, QtGui.QApplication.UnicodeUTF8)) self.actionSpec_Widget.setText(QtGui.QApplication.translate("MainWindow", "Spec Widget", None, QtGui.QApplication.UnicodeUTF8)) self.actionResponse_Widget.setText(QtGui.QApplication.translate("MainWindow", "Response Widget", None, QtGui.QApplication.UnicodeUTF8)) self.actionTabview_2.setText(QtGui.QApplication.translate("MainWindow", "Tabview", None, QtGui.QApplication.UnicodeUTF8)) self.actionPlot_select.setText(QtGui.QApplication.translate("MainWindow", "Plot select", None, QtGui.QApplication.UnicodeUTF8)) self.actionBand_Diagram.setText(QtGui.QApplication.translate("MainWindow", "Band Diagram", None, QtGui.QApplication.UnicodeUTF8)) self.actionCheck.setText(QtGui.QApplication.translate("MainWindow", "check", None, QtGui.QApplication.UnicodeUTF8)) self.actionPlot_FFT_points.setText(QtGui.QApplication.translate("MainWindow", "Plot FFT points", None, QtGui.QApplication.UnicodeUTF8)) from PyQt4 import Qwt5 from bandgraphicsview import BandGraphicsView from polezero_plot import PzPlot import icons_rc
gpl-3.0
synasius/django
tests/utils_tests/test_lazyobject.py
38
11862
from __future__ import unicode_literals import copy import pickle import sys import warnings from unittest import TestCase from django.utils import six from django.utils.functional import LazyObject, SimpleLazyObject, empty from .models import Category, CategoryInfo class Foo(object): """ A simple class with just one attribute. """ foo = 'bar' def __eq__(self, other): return self.foo == other.foo class LazyObjectTestCase(TestCase): def lazy_wrap(self, wrapped_object): """ Wrap the given object into a LazyObject """ class AdHocLazyObject(LazyObject): def _setup(self): self._wrapped = wrapped_object return AdHocLazyObject() def test_getattr(self): obj = self.lazy_wrap(Foo()) self.assertEqual(obj.foo, 'bar') def test_setattr(self): obj = self.lazy_wrap(Foo()) obj.foo = 'BAR' obj.bar = 'baz' self.assertEqual(obj.foo, 'BAR') self.assertEqual(obj.bar, 'baz') def test_setattr2(self): # Same as test_setattr but in reversed order obj = self.lazy_wrap(Foo()) obj.bar = 'baz' obj.foo = 'BAR' self.assertEqual(obj.foo, 'BAR') self.assertEqual(obj.bar, 'baz') def test_delattr(self): obj = self.lazy_wrap(Foo()) obj.bar = 'baz' self.assertEqual(obj.bar, 'baz') del obj.bar with self.assertRaises(AttributeError): obj.bar def test_cmp(self): obj1 = self.lazy_wrap('foo') obj2 = self.lazy_wrap('bar') obj3 = self.lazy_wrap('foo') self.assertEqual(obj1, 'foo') self.assertEqual(obj1, obj3) self.assertNotEqual(obj1, obj2) self.assertNotEqual(obj1, 'bar') def test_bytes(self): obj = self.lazy_wrap(b'foo') self.assertEqual(bytes(obj), b'foo') def test_text(self): obj = self.lazy_wrap('foo') self.assertEqual(six.text_type(obj), 'foo') def test_bool(self): # Refs #21840 for f in [False, 0, (), {}, [], None, set()]: self.assertFalse(self.lazy_wrap(f)) for t in [True, 1, (1,), {1: 2}, [1], object(), {1}]: self.assertTrue(t) def test_dir(self): obj = self.lazy_wrap('foo') self.assertEqual(dir(obj), dir('foo')) def test_len(self): for seq in ['asd', [1, 2, 3], {'a': 1, 'b': 2, 'c': 3}]: obj = self.lazy_wrap(seq) self.assertEqual(len(obj), 3) def test_class(self): self.assertIsInstance(self.lazy_wrap(42), int) class Bar(Foo): pass self.assertIsInstance(self.lazy_wrap(Bar()), Foo) def test_hash(self): obj = self.lazy_wrap('foo') d = {} d[obj] = 'bar' self.assertIn('foo', d) self.assertEqual(d['foo'], 'bar') def test_contains(self): test_data = [ ('c', 'abcde'), (2, [1, 2, 3]), ('a', {'a': 1, 'b': 2, 'c': 3}), (2, {1, 2, 3}), ] for needle, haystack in test_data: self.assertIn(needle, self.lazy_wrap(haystack)) # __contains__ doesn't work when the haystack is a string and the needle a LazyObject for needle_haystack in test_data[1:]: self.assertIn(self.lazy_wrap(needle), haystack) self.assertIn(self.lazy_wrap(needle), self.lazy_wrap(haystack)) def test_getitem(self): obj_list = self.lazy_wrap([1, 2, 3]) obj_dict = self.lazy_wrap({'a': 1, 'b': 2, 'c': 3}) self.assertEqual(obj_list[0], 1) self.assertEqual(obj_list[-1], 3) self.assertEqual(obj_list[1:2], [2]) self.assertEqual(obj_dict['b'], 2) with self.assertRaises(IndexError): obj_list[3] with self.assertRaises(KeyError): obj_dict['f'] def test_setitem(self): obj_list = self.lazy_wrap([1, 2, 3]) obj_dict = self.lazy_wrap({'a': 1, 'b': 2, 'c': 3}) obj_list[0] = 100 self.assertEqual(obj_list, [100, 2, 3]) obj_list[1:2] = [200, 300, 400] self.assertEqual(obj_list, [100, 200, 300, 400, 3]) obj_dict['a'] = 100 obj_dict['d'] = 400 self.assertEqual(obj_dict, {'a': 100, 'b': 2, 'c': 3, 'd': 400}) def test_delitem(self): obj_list = self.lazy_wrap([1, 2, 3]) obj_dict = self.lazy_wrap({'a': 1, 'b': 2, 'c': 3}) del obj_list[-1] del obj_dict['c'] self.assertEqual(obj_list, [1, 2]) self.assertEqual(obj_dict, {'a': 1, 'b': 2}) with self.assertRaises(IndexError): del obj_list[3] with self.assertRaises(KeyError): del obj_dict['f'] def test_iter(self): # Tests whether an object's custom `__iter__` method is being # used when iterating over it. class IterObject(object): def __init__(self, values): self.values = values def __iter__(self): return iter(self.values) original_list = ['test', '123'] self.assertEqual( list(self.lazy_wrap(IterObject(original_list))), original_list ) def test_pickle(self): # See ticket #16563 obj = self.lazy_wrap(Foo()) pickled = pickle.dumps(obj) unpickled = pickle.loads(pickled) self.assertIsInstance(unpickled, Foo) self.assertEqual(unpickled, obj) self.assertEqual(unpickled.foo, obj.foo) def test_deepcopy(self): # Check that we *can* do deep copy, and that it returns the right # objects. l = [1, 2, 3] obj = self.lazy_wrap(l) len(l) # forces evaluation obj2 = copy.deepcopy(obj) self.assertIsInstance(obj2, list) self.assertEqual(obj2, [1, 2, 3]) def test_deepcopy_no_evaluation(self): # copying doesn't force evaluation l = [1, 2, 3] obj = self.lazy_wrap(l) obj2 = copy.deepcopy(obj) # Copying shouldn't force evaluation self.assertIs(obj._wrapped, empty) self.assertIs(obj2._wrapped, empty) class SimpleLazyObjectTestCase(LazyObjectTestCase): # By inheriting from LazyObjectTestCase and redefining the lazy_wrap() # method which all testcases use, we get to make sure all behaviors # tested in the parent testcase also apply to SimpleLazyObject. def lazy_wrap(self, wrapped_object): return SimpleLazyObject(lambda: wrapped_object) def test_repr(self): # First, for an unevaluated SimpleLazyObject obj = self.lazy_wrap(42) # __repr__ contains __repr__ of setup function and does not evaluate # the SimpleLazyObject six.assertRegex(self, repr(obj), '^<SimpleLazyObject:') self.assertIs(obj._wrapped, empty) # make sure evaluation hasn't been triggered self.assertEqual(obj, 42) # evaluate the lazy object self.assertIsInstance(obj._wrapped, int) self.assertEqual(repr(obj), '<SimpleLazyObject: 42>') def test_trace(self): # See ticket #19456 old_trace_func = sys.gettrace() try: def trace_func(frame, event, arg): frame.f_locals['self'].__class__ if old_trace_func is not None: old_trace_func(frame, event, arg) sys.settrace(trace_func) self.lazy_wrap(None) finally: sys.settrace(old_trace_func) def test_none(self): i = [0] def f(): i[0] += 1 return None x = SimpleLazyObject(f) self.assertEqual(str(x), "None") self.assertEqual(i, [1]) self.assertEqual(str(x), "None") self.assertEqual(i, [1]) def test_dict(self): # See ticket #18447 lazydict = SimpleLazyObject(lambda: {'one': 1}) self.assertEqual(lazydict['one'], 1) lazydict['one'] = -1 self.assertEqual(lazydict['one'], -1) self.assertIn('one', lazydict) self.assertNotIn('two', lazydict) self.assertEqual(len(lazydict), 1) del lazydict['one'] with self.assertRaises(KeyError): lazydict['one'] def test_list_set(self): lazy_list = SimpleLazyObject(lambda: [1, 2, 3, 4, 5]) lazy_set = SimpleLazyObject(lambda: {1, 2, 3, 4}) self.assertIn(1, lazy_list) self.assertIn(1, lazy_set) self.assertNotIn(6, lazy_list) self.assertNotIn(6, lazy_set) self.assertEqual(len(lazy_list), 5) self.assertEqual(len(lazy_set), 4) class BaseBaz(object): """ A base class with a funky __reduce__ method, meant to simulate the __reduce__ method of Model, which sets self._django_version. """ def __init__(self): self.baz = 'wrong' def __reduce__(self): self.baz = 'right' return super(BaseBaz, self).__reduce__() def __eq__(self, other): if self.__class__ != other.__class__: return False for attr in ['bar', 'baz', 'quux']: if hasattr(self, attr) != hasattr(other, attr): return False elif getattr(self, attr, None) != getattr(other, attr, None): return False return True class Baz(BaseBaz): """ A class that inherits from BaseBaz and has its own __reduce_ex__ method. """ def __init__(self, bar): self.bar = bar super(Baz, self).__init__() def __reduce_ex__(self, proto): self.quux = 'quux' return super(Baz, self).__reduce_ex__(proto) class BazProxy(Baz): """ A class that acts as a proxy for Baz. It does some scary mucking about with dicts, which simulates some crazy things that people might do with e.g. proxy models. """ def __init__(self, baz): self.__dict__ = baz.__dict__ self._baz = baz super(BaseBaz, self).__init__() class SimpleLazyObjectPickleTestCase(TestCase): """ Regression test for pickling a SimpleLazyObject wrapping a model (#25389). Also covers other classes with a custom __reduce__ method. """ def test_pickle_with_reduce(self): """ Test in a fairly synthetic setting. """ # Test every pickle protocol available for protocol in range(pickle.HIGHEST_PROTOCOL + 1): lazy_objs = [ SimpleLazyObject(lambda: BaseBaz()), SimpleLazyObject(lambda: Baz(1)), SimpleLazyObject(lambda: BazProxy(Baz(2))), ] for obj in lazy_objs: pickled = pickle.dumps(obj, protocol) unpickled = pickle.loads(pickled) self.assertEqual(unpickled, obj) self.assertEqual(unpickled.baz, 'right') def test_pickle_model(self): """ Test on an actual model, based on the report in #25426. """ category = Category.objects.create(name="thing1") CategoryInfo.objects.create(category=category) # Test every pickle protocol available for protocol in range(pickle.HIGHEST_PROTOCOL + 1): lazy_category = SimpleLazyObject(lambda: category) # Test both if we accessed a field on the model and if we didn't. lazy_category.categoryinfo lazy_category_2 = SimpleLazyObject(lambda: category) with warnings.catch_warnings(record=True) as recorded: self.assertEqual(pickle.loads(pickle.dumps(lazy_category, protocol)), category) self.assertEqual(pickle.loads(pickle.dumps(lazy_category_2, protocol)), category) # Assert that there were no warnings. self.assertEqual(len(recorded), 0)
bsd-3-clause
VitalPet/odoo
addons/portal/wizard/portal_wizard.py
22
9618
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2011 OpenERP S.A (<http://www.openerp.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import logging import random from openerp.osv import fields, osv from openerp.tools.translate import _ from openerp.tools import email_split from openerp import SUPERUSER_ID _logger = logging.getLogger(__name__) # welcome email sent to portal users # (note that calling '_' has no effect except exporting those strings for translation) WELCOME_EMAIL_SUBJECT = _("Your OpenERP account at %(company)s") WELCOME_EMAIL_BODY = _("""Dear %(name)s, You have been given access to %(portal)s. Your login account data is: Database: %(db)s Username: %(login)s In order to complete the signin process, click on the following url: %(url)s %(welcome_message)s -- OpenERP - Open Source Business Applications http://www.openerp.com """) def extract_email(email): """ extract the email address from a user-friendly email address """ addresses = email_split(email) return addresses[0] if addresses else '' class wizard(osv.osv_memory): """ A wizard to manage the creation/removal of portal users. """ _name = 'portal.wizard' _description = 'Portal Access Management' _columns = { 'portal_id': fields.many2one('res.groups', domain=[('is_portal', '=', True)], required=True, string='Portal', help="The portal that users can be added in or removed from."), 'user_ids': fields.one2many('portal.wizard.user', 'wizard_id', string='Users'), 'welcome_message': fields.text(string='Invitation Message', help="This text is included in the email sent to new users of the portal."), } def _default_portal(self, cr, uid, context): portal_ids = self.pool.get('res.groups').search(cr, uid, [('is_portal', '=', True)]) return portal_ids and portal_ids[0] or False _defaults = { 'portal_id': _default_portal, } def onchange_portal_id(self, cr, uid, ids, portal_id, context=None): # for each partner, determine corresponding portal.wizard.user records res_partner = self.pool.get('res.partner') partner_ids = context and context.get('active_ids') or [] contact_ids = set() user_changes = [] for partner in res_partner.browse(cr, SUPERUSER_ID, partner_ids, context): for contact in (partner.child_ids or [partner]): # make sure that each contact appears at most once in the list if contact.id not in contact_ids: contact_ids.add(contact.id) in_portal = False if contact.user_ids: in_portal = portal_id in [g.id for g in contact.user_ids[0].groups_id] user_changes.append((0, 0, { 'partner_id': contact.id, 'email': contact.email, 'in_portal': in_portal, })) return {'value': {'user_ids': user_changes}} def action_apply(self, cr, uid, ids, context=None): wizard = self.browse(cr, uid, ids[0], context) portal_user_ids = [user.id for user in wizard.user_ids] self.pool.get('portal.wizard.user').action_apply(cr, uid, portal_user_ids, context) return {'type': 'ir.actions.act_window_close'} class wizard_user(osv.osv_memory): """ A model to configure users in the portal wizard. """ _name = 'portal.wizard.user' _description = 'Portal User Config' _columns = { 'wizard_id': fields.many2one('portal.wizard', string='Wizard', required=True, ondelete="cascade"), 'partner_id': fields.many2one('res.partner', string='Contact', required=True, readonly=True), 'email': fields.char(size=240, string='Email'), 'in_portal': fields.boolean('In Portal'), } def create(self, cr, uid, values, context=None): """ overridden to update the partner's email (if necessary) """ id = super(wizard_user, self).create(cr, uid, values, context) wuser = self.browse(cr, uid, id, context) if wuser.partner_id.email != wuser.email: wuser.partner_id.write({'email': wuser.email}) return id def action_apply(self, cr, uid, ids, context=None): for wizard_user in self.browse(cr, SUPERUSER_ID, ids, context): portal = wizard_user.wizard_id.portal_id user = self._retrieve_user(cr, SUPERUSER_ID, wizard_user, context) if wizard_user.in_portal: # create a user if necessary, and make sure it is in the portal group if not user: user = self._create_user(cr, SUPERUSER_ID, wizard_user, context) if (not user.active) or (portal not in user.groups_id): user.write({'active': True, 'groups_id': [(4, portal.id)]}) # prepare for the signup process user.partner_id.signup_prepare() wizard_user = self.browse(cr, SUPERUSER_ID, wizard_user.id, context) self._send_email(cr, uid, wizard_user, context) else: # remove the user (if it exists) from the portal group if user and (portal in user.groups_id): # if user belongs to portal only, deactivate it if len(user.groups_id) <= 1: user.write({'groups_id': [(3, portal.id)], 'active': False}) else: user.write({'groups_id': [(3, portal.id)]}) def _retrieve_user(self, cr, uid, wizard_user, context=None): """ retrieve the (possibly inactive) user corresponding to wizard_user.partner_id @param wizard_user: browse record of model portal.wizard.user @return: browse record of model res.users """ if wizard_user.partner_id.user_ids: return wizard_user.partner_id.user_ids[0] # the user may be inactive, search for it res_users = self.pool.get('res.users') domain = [('partner_id', '=', wizard_user.partner_id.id), ('active', '=', False)] user_ids = res_users.search(cr, uid, domain) return user_ids and res_users.browse(cr, uid, user_ids[0], context) or False def _create_user(self, cr, uid, wizard_user, context=None): """ create a new user for wizard_user.partner_id @param wizard_user: browse record of model portal.wizard.user @return: browse record of model res.users """ res_users = self.pool.get('res.users') create_context = dict(context or {}, noshortcut=True) # to prevent shortcut creation values = { 'login': extract_email(wizard_user.email), 'partner_id': wizard_user.partner_id.id, 'groups_id': [(6, 0, [])], 'share': True, } user_id = res_users.create(cr, uid, values, context=create_context) return res_users.browse(cr, uid, user_id, context) def _send_email(self, cr, uid, wizard_user, context=None): """ send notification email to a new portal user @param wizard_user: browse record of model portal.wizard.user @return: the id of the created mail.mail record """ this_context = context this_user = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context) if not this_user.email: raise osv.except_osv(_('Email Required'), _('You must have an email address in your User Preferences to send emails.')) # determine subject and body in the portal user's language user = self._retrieve_user(cr, SUPERUSER_ID, wizard_user, context) context = dict(this_context or {}, lang=user.lang) data = { 'company': this_user.company_id.name, 'portal': wizard_user.wizard_id.portal_id.name, 'welcome_message': wizard_user.wizard_id.welcome_message or "", 'db': cr.dbname, 'name': user.name, 'login': user.login, 'url': user.signup_url, } mail_mail = self.pool.get('mail.mail') mail_values = { 'email_from': this_user.email, 'email_to': user.email, 'subject': _(WELCOME_EMAIL_SUBJECT) % data, 'body_html': '<pre>%s</pre>' % (_(WELCOME_EMAIL_BODY) % data), 'state': 'outgoing', 'type': 'email', } mail_id = mail_mail.create(cr, uid, mail_values, context=this_context) return mail_mail.send(cr, uid, [mail_id], context=this_context) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
runarberg/servo
tests/wpt/web-platform-tests/referrer-policy/generic/tools/common_paths.py
238
1823
import os, sys, json, re script_directory = os.path.dirname(os.path.abspath(__file__)) generic_directory = os.path.abspath(os.path.join(script_directory, '..')) template_directory = os.path.abspath(os.path.join(script_directory, '..', 'template')) spec_directory = os.path.abspath(os.path.join(script_directory, '..', '..')) test_root_directory = os.path.abspath(os.path.join(script_directory, '..', '..', '..')) spec_filename = os.path.join(spec_directory, "spec.src.json") generated_spec_json_filename = os.path.join(spec_directory, "spec_json.js") selection_pattern = '%(delivery_method)s/' + \ '%(origin)s/' + \ '%(source_protocol)s-%(target_protocol)s/' + \ '%(subresource)s/' test_file_path_pattern = '%(spec_name)s/' + selection_pattern + \ '%(name)s.%(redirection)s.%(source_protocol)s.html' def get_template(basename): with open(os.path.join(template_directory, basename)) as f: return f.read() def read_nth_line(fp, line_number): fp.seek(0) for i, line in enumerate(fp): if (i + 1) == line_number: return line def load_spec_json(): re_error_location = re.compile('line ([0-9]+) column ([0-9]+)') with open(spec_filename) as f: try: spec_json = json.load(f) except ValueError, ex: print ex.message match = re_error_location.search(ex.message) if match: line_number, column = int(match.group(1)), int(match.group(2)) print read_nth_line(f, line_number).rstrip() print " " * (column - 1) + "^" sys.exit(1) return spec_json
mpl-2.0
kromain/chromium-tools
third_party/boto/pyami/copybot.py
102
4273
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # import boto from boto.pyami.scriptbase import ScriptBase import os, StringIO class CopyBot(ScriptBase): def __init__(self): ScriptBase.__init__(self) self.wdir = boto.config.get('Pyami', 'working_dir') self.log_file = '%s.log' % self.instance_id self.log_path = os.path.join(self.wdir, self.log_file) boto.set_file_logger(self.name, self.log_path) self.src_name = boto.config.get(self.name, 'src_bucket') self.dst_name = boto.config.get(self.name, 'dst_bucket') self.replace = boto.config.getbool(self.name, 'replace_dst', True) s3 = boto.connect_s3() self.src = s3.lookup(self.src_name) if not self.src: boto.log.error('Source bucket does not exist: %s' % self.src_name) dest_access_key = boto.config.get(self.name, 'dest_aws_access_key_id', None) if dest_access_key: dest_secret_key = boto.config.get(self.name, 'dest_aws_secret_access_key', None) s3 = boto.connect(dest_access_key, dest_secret_key) self.dst = s3.lookup(self.dst_name) if not self.dst: self.dst = s3.create_bucket(self.dst_name) def copy_bucket_acl(self): if boto.config.get(self.name, 'copy_acls', True): acl = self.src.get_xml_acl() self.dst.set_xml_acl(acl) def copy_key_acl(self, src, dst): if boto.config.get(self.name, 'copy_acls', True): acl = src.get_xml_acl() dst.set_xml_acl(acl) def copy_keys(self): boto.log.info('src=%s' % self.src.name) boto.log.info('dst=%s' % self.dst.name) try: for key in self.src: if not self.replace: exists = self.dst.lookup(key.name) if exists: boto.log.info('key=%s already exists in %s, skipping' % (key.name, self.dst.name)) continue boto.log.info('copying %d bytes from key=%s' % (key.size, key.name)) prefix, base = os.path.split(key.name) path = os.path.join(self.wdir, base) key.get_contents_to_filename(path) new_key = self.dst.new_key(key.name) new_key.set_contents_from_filename(path) self.copy_key_acl(key, new_key) os.unlink(path) except: boto.log.exception('Error copying key: %s' % key.name) def copy_log(self): key = self.dst.new_key(self.log_file) key.set_contents_from_filename(self.log_path) def main(self): fp = StringIO.StringIO() boto.config.dump_safe(fp) self.notify('%s (%s) Starting' % (self.name, self.instance_id), fp.getvalue()) if self.src and self.dst: self.copy_keys() if self.dst: self.copy_log() self.notify('%s (%s) Stopping' % (self.name, self.instance_id), 'Copy Operation Complete') if boto.config.getbool(self.name, 'exit_on_completion', True): ec2 = boto.connect_ec2() ec2.terminate_instances([self.instance_id])
bsd-3-clause
sam-falvo/kestrel
cores/S16X4B/rtl/nmigen/interfaces.py
1
3319
from nmigen import Signal # Unprefixed opcodes are 100% backward compatible with S16X4A. # New addition is the use of opcode 8 as an escape prefix. # Additionally, opcode 9 is reserved as a prefix for future # use. OPC_NOP = 0 OPC_LIT = 1 OPC_FWM = 2 OPC_SWM = 3 OPC_ADD = 4 OPC_AND = 5 OPC_XOR = 6 OPC_ZGO = 7 OPC_prefix8 = 8 OPC_prefix9 = 9 OPC_FBM = 10 OPC_SBM = 11 OPC_LCALL = 12 OPC_ICALL = 13 OPC_GO = 14 OPC_NZGO = 15 # 8-prefixed opcodes below. PFX8_FCR = 0 # Fetch Control Register PFX8_SCR = 1 # Store Control Register PFX8_INW = 2 # Read word from I/O device PFX8_OUTW = 3 # Write word to I/O device PFX8_unk4 = 4 PFX8_unk5 = 5 PFX8_unk6 = 6 PFX8_unk7 = 7 PFX8_unk8 = 8 PFX8_unk9 = 9 PFX8_unkA = 10 PFX8_unkB = 11 PFX8_unkC = 12 PFX8_unkD = 13 PFX8_unkE = 14 PFX8_unkF = 15 # Address Types # # AT_O is a 3 bit signal. 5 out of the 8 cycle types are defined. # Values are defined so that AT_O[0:2] can be tied directly to # hardware expecting VPA_O and VDA_O of a 65816 or S16X4A. # # 2 1 0 # +-------+-------+-------+ # | IOREQ | VPA | VDA | # +-------+-------+-------+ # # (I avoid the use of "Cycle Type" because this term has some # prior-defined meaning in the context of a Wishbone interconnect.) AT_IDLE = 0 # Bus is idle; address is meaningless. AT_DAT = 1 # Bus is presenting a data memory address. AT_PGM = 2 # Bus is presenting a program memory address. AT_ARG = 3 # Bus is presenting a program memory address, but for an operand. AT_unk4 = 4 # AT_IO = 5 # Bus is presenting an I/O port address. AT_unk6 = 6 # AT_unk7 = 7 # def create_s16x4b_interface(self, platform=''): self.adr_o = Signal(15) # Word address self.we_o = Signal(1) self.cyc_o = Signal(1) self.stb_o = Signal(1) self.sel_o = Signal(2) self.at_o = Signal(3) # New with S16X4B; replaces vda_o and vpa_o self.dat_o = Signal(16) self.ack_i = Signal(1) self.err_i = Signal(1) # New with S16X4A (then called ABORT_I) self.dat_i = Signal(16) self.irq_i = Signal(16) # New with S16X4B self.trap_o = Signal(1) # New with S16X4B (acks all exceptions) self.intack_o = Signal(1) # New with S16X4B (acks only interrupts) if platform == 'formal': self.fv_pc = Signal(15) self.fv_iw = Signal(16) self.fv_f_e = Signal(1) self.fv_u = Signal(16) self.fv_v = Signal(16) self.fv_w = Signal(16) self.fv_x = Signal(16) self.fv_y = Signal(16) self.fv_z = Signal(16) self.fv_opc = Signal(4) self.fv_cycle_done = Signal(1) self.fv_current_slot = Signal(2) self.fv_epc = Signal(len(self.fv_pc)) self.fv_eat = Signal(len(self.at_o)) self.fv_ecs = Signal(len(self.fv_current_slot)) self.fv_efe = Signal(len(self.fv_f_e)) self.fv_eiw = Signal(len(self.fv_iw)) self.fv_eipa = Signal(len(self.fv_pc)) self.fv_ipa = Signal(len(self.fv_eipa)) self.fv_ie = Signal(len(self.dat_i)) self.fv_eie = Signal(len(self.fv_ie)) self.fv_ehpc = Signal(len(self.fv_pc)) self.fv_ihpc = Signal(len(self.fv_pc)) self.fv_take_int = Signal(1) self.fv_sample_fe = Signal(1) self.fv_sample_at = Signal(len(self.fv_eat)) self.fv_take_trap = Signal(1)
mpl-2.0
neocortex/paletti
paletti/utils.py
1
3459
import numpy as np def rgb2lab(image): """ Transforms an RGB-image to a LAB-image. """ return xyz2lab(rgb2xyz(image)) def lab2rgb(image): """ Transforms a LAB-image to an RGB-image. """ return xyz2rgb(lab2xyz(image)) def rgb2xyz(image): """ Transforms an RGB-mage to a XYZ-image. """ image = np.array(image, dtype='float64') r = image[:, :, 0] / 255. g = image[:, :, 1] / 255. b = image[:, :, 2] / 255. ri = r > .04045 r[ri] = ((r[ri] + .055) / 1.055) ** 2.4 r[~ri] = r[~ri] / 12.92 gi = g > .04045 g[gi] = ((g[gi] + .055) / 1.055) ** 2.4 g[~gi] = g[~gi] / 12.92 bi = b > .04045 b[bi] = ((b[bi] + .055) / 1.055) ** 2.4 b[~bi] = b[~bi] / 12.92 r *= 100. g *= 100. b *= 100. x = r * .4124 + g * .3576 + b * .1805 y = r * .2126 + g * .7152 + b * .0722 z = r * .0193 + g * .1192 + b * .9505 return np.transpose(np.array([x, y, z]), (1, 2, 0)) def xyz2rgb(image): """ Transforms a XYZ-image to an RGB-image. """ x = image[:, :, 0] / 100. y = image[:, :, 1] / 100. z = image[:, :, 2] / 100. var_R = x * 3.2406 + y * -1.5372 + z * -0.4986 var_G = x * -0.9689 + y * 1.8758 + z * 0.0415 var_B = x * 0.0557 + y * -0.2040 + z * 1.0570 def convert(var): i = var > 0.0031308 var[i] = 1.055 * (var[i] ** (1 / 2.4)) - 0.055 var[~i] = var[~i] * 12.92 return var var_R = convert(var_R) var_G = convert(var_G) var_B = convert(var_B) var_R[var_R < 0] = 0 var_B[var_B < 0] = 0 var_G[var_G < 0] = 0 var_R[var_R > 1] = 1 var_B[var_B > 1] = 1 var_G[var_G > 1] = 1 R = var_R * 255 G = var_G * 255 B = var_B * 255 return np.transpose(np.array([R, G, B], dtype='uint8'), (1, 2, 0)) def xyz2lab(image): """ Transforms a XYZ-image to a LAB-image. """ var_X = image[:, :, 0] / 95.047 var_Y = image[:, :, 1] / 100. var_Z = image[:, :, 2] / 108.883 xi = var_X > .008856 var_X[xi] = var_X[xi] ** (1. / 3.) var_X[~xi] = (7.787 * var_X[~xi]) + (16. / 116.) yi = var_Y > .008856 var_Y[yi] = var_Y[yi] ** (1. / 3.) var_Y[~yi] = (7.787 * var_Y[~yi]) + (16. / 116.) zi = var_Z > .008856 var_Z[zi] = var_Z[zi] ** (1. / 3.) var_Z[~zi] = (7.787 * var_Z[~zi]) + (16. / 116.) L = (116 * var_Y) - 16 a = 500. * (var_X - var_Y) b = 200. * (var_Y - var_Z) return np.transpose(np.array([L, a, b]), (1, 2, 0)) def lab2xyz(image): """ Transforms a LAB-image to a XYZ-image. """ var_Y = (image[:, :, 0] + 16.) / 116. var_X = image[:, :, 1] / 500. + var_Y var_Z = var_Y - image[:, :, 2] / 200. yi = var_Y > 0.2069 var_Y[yi] = var_Y[yi] ** 3 var_Y[~yi] = (var_Y[~yi] - 16. / 116.) / 7.787 xi = var_X > 0.2069 var_X[xi] = var_X[xi] ** 3 var_X[~xi] = (var_X[~xi] - 16. / 116.) / 7.787 zi = var_Z > 0.2069 var_Z[zi] = var_Z[zi] ** 3 var_Z[~zi] = (var_Z[~zi] - 16. / 116.) / 7.787 X = 95.047 * var_X Y = 100. * var_Y Z = 108.883 * var_Z return np.transpose(np.array([X, Y, Z]), (1, 2, 0)) def hex2rgb(hexcolor): """ Convert a color in Hex format to RGB. """ value = hexcolor.lstrip('#') if hexcolor.startswith('#') else hexcolor lv = len(value) return [int(value[i:i + lv // 3], 16) for i in range(0, lv, lv // 3)] def rgb2hex(rgb): """ Convert an RGB color to Hex format. """ return '#%02x%02x%02x' % rgb
mit
xwolf12/scikit-learn
sklearn/linear_model/randomized_l1.py
95
23365
""" Randomized Lasso/Logistic: feature selection based on Lasso and sparse Logistic Regression """ # Author: Gael Varoquaux, Alexandre Gramfort # # License: BSD 3 clause import itertools from abc import ABCMeta, abstractmethod import warnings import numpy as np from scipy.sparse import issparse from scipy import sparse from scipy.interpolate import interp1d from .base import center_data from ..base import BaseEstimator, TransformerMixin from ..externals import six from ..externals.joblib import Memory, Parallel, delayed from ..utils import (as_float_array, check_random_state, check_X_y, check_array, safe_mask, ConvergenceWarning) from ..utils.validation import check_is_fitted from .least_angle import lars_path, LassoLarsIC from .logistic import LogisticRegression ############################################################################### # Randomized linear model: feature selection def _resample_model(estimator_func, X, y, scaling=.5, n_resampling=200, n_jobs=1, verbose=False, pre_dispatch='3*n_jobs', random_state=None, sample_fraction=.75, **params): random_state = check_random_state(random_state) # We are generating 1 - weights, and not weights n_samples, n_features = X.shape if not (0 < scaling < 1): raise ValueError( "'scaling' should be between 0 and 1. Got %r instead." % scaling) scaling = 1. - scaling scores_ = 0.0 for active_set in Parallel(n_jobs=n_jobs, verbose=verbose, pre_dispatch=pre_dispatch)( delayed(estimator_func)( X, y, weights=scaling * random_state.random_integers( 0, 1, size=(n_features,)), mask=(random_state.rand(n_samples) < sample_fraction), verbose=max(0, verbose - 1), **params) for _ in range(n_resampling)): scores_ += active_set scores_ /= n_resampling return scores_ class BaseRandomizedLinearModel(six.with_metaclass(ABCMeta, BaseEstimator, TransformerMixin)): """Base class to implement randomized linear models for feature selection This implements the strategy by Meinshausen and Buhlman: stability selection with randomized sampling, and random re-weighting of the penalty. """ @abstractmethod def __init__(self): pass _center_data = staticmethod(center_data) def fit(self, X, y): """Fit the model using X, y as training data. Parameters ---------- X : array-like, sparse matrix shape = [n_samples, n_features] Training data. y : array-like, shape = [n_samples] Target values. Returns ------- self : object Returns an instance of self. """ X, y = check_X_y(X, y, ['csr', 'csc', 'coo'], y_numeric=True) X = as_float_array(X, copy=False) n_samples, n_features = X.shape X, y, X_mean, y_mean, X_std = self._center_data(X, y, self.fit_intercept, self.normalize) estimator_func, params = self._make_estimator_and_params(X, y) memory = self.memory if isinstance(memory, six.string_types): memory = Memory(cachedir=memory) scores_ = memory.cache( _resample_model, ignore=['verbose', 'n_jobs', 'pre_dispatch'] )( estimator_func, X, y, scaling=self.scaling, n_resampling=self.n_resampling, n_jobs=self.n_jobs, verbose=self.verbose, pre_dispatch=self.pre_dispatch, random_state=self.random_state, sample_fraction=self.sample_fraction, **params) if scores_.ndim == 1: scores_ = scores_[:, np.newaxis] self.all_scores_ = scores_ self.scores_ = np.max(self.all_scores_, axis=1) return self def _make_estimator_and_params(self, X, y): """Return the parameters passed to the estimator""" raise NotImplementedError def get_support(self, indices=False): """Return a mask, or list, of the features/indices selected.""" check_is_fitted(self, 'scores_') mask = self.scores_ > self.selection_threshold return mask if not indices else np.where(mask)[0] # XXX: the two function below are copy/pasted from feature_selection, # Should we add an intermediate base class? def transform(self, X): """Transform a new matrix using the selected features""" mask = self.get_support() X = check_array(X) if len(mask) != X.shape[1]: raise ValueError("X has a different shape than during fitting.") return check_array(X)[:, safe_mask(X, mask)] def inverse_transform(self, X): """Transform a new matrix using the selected features""" support = self.get_support() if X.ndim == 1: X = X[None, :] Xt = np.zeros((X.shape[0], support.size)) Xt[:, support] = X return Xt ############################################################################### # Randomized lasso: regression settings def _randomized_lasso(X, y, weights, mask, alpha=1., verbose=False, precompute=False, eps=np.finfo(np.float).eps, max_iter=500): X = X[safe_mask(X, mask)] y = y[mask] # Center X and y to avoid fit the intercept X -= X.mean(axis=0) y -= y.mean() alpha = np.atleast_1d(np.asarray(alpha, dtype=np.float)) X = (1 - weights) * X with warnings.catch_warnings(): warnings.simplefilter('ignore', ConvergenceWarning) alphas_, _, coef_ = lars_path(X, y, Gram=precompute, copy_X=False, copy_Gram=False, alpha_min=np.min(alpha), method='lasso', verbose=verbose, max_iter=max_iter, eps=eps) if len(alpha) > 1: if len(alphas_) > 1: # np.min(alpha) < alpha_min interpolator = interp1d(alphas_[::-1], coef_[:, ::-1], bounds_error=False, fill_value=0.) scores = (interpolator(alpha) != 0.0) else: scores = np.zeros((X.shape[1], len(alpha)), dtype=np.bool) else: scores = coef_[:, -1] != 0.0 return scores class RandomizedLasso(BaseRandomizedLinearModel): """Randomized Lasso. Randomized Lasso works by resampling the train data and computing a Lasso on each resampling. In short, the features selected more often are good features. It is also known as stability selection. Read more in the :ref:`User Guide <randomized_l1>`. Parameters ---------- alpha : float, 'aic', or 'bic', optional The regularization parameter alpha parameter in the Lasso. Warning: this is not the alpha parameter in the stability selection article which is scaling. scaling : float, optional The alpha parameter in the stability selection article used to randomly scale the features. Should be between 0 and 1. sample_fraction : float, optional The fraction of samples to be used in each randomized design. Should be between 0 and 1. If 1, all samples are used. n_resampling : int, optional Number of randomized models. selection_threshold: float, optional The score above which features should be selected. fit_intercept : boolean, optional whether to calculate the intercept for this model. If set to false, no intercept will be used in calculations (e.g. data is expected to be already centered). verbose : boolean or integer, optional Sets the verbosity amount normalize : boolean, optional, default True If True, the regressors X will be normalized before regression. precompute : True | False | 'auto' Whether to use a precomputed Gram matrix to speed up calculations. If set to 'auto' let us decide. The Gram matrix can also be passed as argument. max_iter : integer, optional Maximum number of iterations to perform in the Lars algorithm. eps : float, optional The machine-precision regularization in the computation of the Cholesky diagonal factors. Increase this for very ill-conditioned systems. Unlike the 'tol' parameter in some iterative optimization-based algorithms, this parameter does not control the tolerance of the optimization. n_jobs : integer, optional Number of CPUs to use during the resampling. If '-1', use all the CPUs random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. pre_dispatch : int, or string, optional Controls the number of jobs that get dispatched during parallel execution. Reducing this number can be useful to avoid an explosion of memory consumption when more jobs get dispatched than CPUs can process. This parameter can be: - None, in which case all the jobs are immediately created and spawned. Use this for lightweight and fast-running jobs, to avoid delays due to on-demand spawning of the jobs - An int, giving the exact number of total jobs that are spawned - A string, giving an expression as a function of n_jobs, as in '2*n_jobs' memory : Instance of joblib.Memory or string Used for internal caching. By default, no caching is done. If a string is given, it is the path to the caching directory. Attributes ---------- scores_ : array, shape = [n_features] Feature scores between 0 and 1. all_scores_ : array, shape = [n_features, n_reg_parameter] Feature scores between 0 and 1 for all values of the regularization \ parameter. The reference article suggests ``scores_`` is the max of \ ``all_scores_``. Examples -------- >>> from sklearn.linear_model import RandomizedLasso >>> randomized_lasso = RandomizedLasso() Notes ----- See examples/linear_model/plot_sparse_recovery.py for an example. References ---------- Stability selection Nicolai Meinshausen, Peter Buhlmann Journal of the Royal Statistical Society: Series B Volume 72, Issue 4, pages 417-473, September 2010 DOI: 10.1111/j.1467-9868.2010.00740.x See also -------- RandomizedLogisticRegression, LogisticRegression """ def __init__(self, alpha='aic', scaling=.5, sample_fraction=.75, n_resampling=200, selection_threshold=.25, fit_intercept=True, verbose=False, normalize=True, precompute='auto', max_iter=500, eps=np.finfo(np.float).eps, random_state=None, n_jobs=1, pre_dispatch='3*n_jobs', memory=Memory(cachedir=None, verbose=0)): self.alpha = alpha self.scaling = scaling self.sample_fraction = sample_fraction self.n_resampling = n_resampling self.fit_intercept = fit_intercept self.max_iter = max_iter self.verbose = verbose self.normalize = normalize self.precompute = precompute self.eps = eps self.random_state = random_state self.n_jobs = n_jobs self.selection_threshold = selection_threshold self.pre_dispatch = pre_dispatch self.memory = memory def _make_estimator_and_params(self, X, y): assert self.precompute in (True, False, None, 'auto') alpha = self.alpha if alpha in ('aic', 'bic'): model = LassoLarsIC(precompute=self.precompute, criterion=self.alpha, max_iter=self.max_iter, eps=self.eps) model.fit(X, y) self.alpha_ = alpha = model.alpha_ return _randomized_lasso, dict(alpha=alpha, max_iter=self.max_iter, eps=self.eps, precompute=self.precompute) ############################################################################### # Randomized logistic: classification settings def _randomized_logistic(X, y, weights, mask, C=1., verbose=False, fit_intercept=True, tol=1e-3): X = X[safe_mask(X, mask)] y = y[mask] if issparse(X): size = len(weights) weight_dia = sparse.dia_matrix((1 - weights, 0), (size, size)) X = X * weight_dia else: X *= (1 - weights) C = np.atleast_1d(np.asarray(C, dtype=np.float)) scores = np.zeros((X.shape[1], len(C)), dtype=np.bool) for this_C, this_scores in zip(C, scores.T): # XXX : would be great to do it with a warm_start ... clf = LogisticRegression(C=this_C, tol=tol, penalty='l1', dual=False, fit_intercept=fit_intercept) clf.fit(X, y) this_scores[:] = np.any( np.abs(clf.coef_) > 10 * np.finfo(np.float).eps, axis=0) return scores class RandomizedLogisticRegression(BaseRandomizedLinearModel): """Randomized Logistic Regression Randomized Regression works by resampling the train data and computing a LogisticRegression on each resampling. In short, the features selected more often are good features. It is also known as stability selection. Read more in the :ref:`User Guide <randomized_l1>`. Parameters ---------- C : float, optional, default=1 The regularization parameter C in the LogisticRegression. scaling : float, optional, default=0.5 The alpha parameter in the stability selection article used to randomly scale the features. Should be between 0 and 1. sample_fraction : float, optional, default=0.75 The fraction of samples to be used in each randomized design. Should be between 0 and 1. If 1, all samples are used. n_resampling : int, optional, default=200 Number of randomized models. selection_threshold : float, optional, default=0.25 The score above which features should be selected. fit_intercept : boolean, optional, default=True whether to calculate the intercept for this model. If set to false, no intercept will be used in calculations (e.g. data is expected to be already centered). verbose : boolean or integer, optional Sets the verbosity amount normalize : boolean, optional, default=True If True, the regressors X will be normalized before regression. tol : float, optional, default=1e-3 tolerance for stopping criteria of LogisticRegression n_jobs : integer, optional Number of CPUs to use during the resampling. If '-1', use all the CPUs random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. pre_dispatch : int, or string, optional Controls the number of jobs that get dispatched during parallel execution. Reducing this number can be useful to avoid an explosion of memory consumption when more jobs get dispatched than CPUs can process. This parameter can be: - None, in which case all the jobs are immediately created and spawned. Use this for lightweight and fast-running jobs, to avoid delays due to on-demand spawning of the jobs - An int, giving the exact number of total jobs that are spawned - A string, giving an expression as a function of n_jobs, as in '2*n_jobs' memory : Instance of joblib.Memory or string Used for internal caching. By default, no caching is done. If a string is given, it is the path to the caching directory. Attributes ---------- scores_ : array, shape = [n_features] Feature scores between 0 and 1. all_scores_ : array, shape = [n_features, n_reg_parameter] Feature scores between 0 and 1 for all values of the regularization \ parameter. The reference article suggests ``scores_`` is the max \ of ``all_scores_``. Examples -------- >>> from sklearn.linear_model import RandomizedLogisticRegression >>> randomized_logistic = RandomizedLogisticRegression() Notes ----- See examples/linear_model/plot_sparse_recovery.py for an example. References ---------- Stability selection Nicolai Meinshausen, Peter Buhlmann Journal of the Royal Statistical Society: Series B Volume 72, Issue 4, pages 417-473, September 2010 DOI: 10.1111/j.1467-9868.2010.00740.x See also -------- RandomizedLasso, Lasso, ElasticNet """ def __init__(self, C=1, scaling=.5, sample_fraction=.75, n_resampling=200, selection_threshold=.25, tol=1e-3, fit_intercept=True, verbose=False, normalize=True, random_state=None, n_jobs=1, pre_dispatch='3*n_jobs', memory=Memory(cachedir=None, verbose=0)): self.C = C self.scaling = scaling self.sample_fraction = sample_fraction self.n_resampling = n_resampling self.fit_intercept = fit_intercept self.verbose = verbose self.normalize = normalize self.tol = tol self.random_state = random_state self.n_jobs = n_jobs self.selection_threshold = selection_threshold self.pre_dispatch = pre_dispatch self.memory = memory def _make_estimator_and_params(self, X, y): params = dict(C=self.C, tol=self.tol, fit_intercept=self.fit_intercept) return _randomized_logistic, params def _center_data(self, X, y, fit_intercept, normalize=False): """Center the data in X but not in y""" X, _, Xmean, _, X_std = center_data(X, y, fit_intercept, normalize=normalize) return X, y, Xmean, y, X_std ############################################################################### # Stability paths def _lasso_stability_path(X, y, mask, weights, eps): "Inner loop of lasso_stability_path" X = X * weights[np.newaxis, :] X = X[safe_mask(X, mask), :] y = y[mask] alpha_max = np.max(np.abs(np.dot(X.T, y))) / X.shape[0] alpha_min = eps * alpha_max # set for early stopping in path with warnings.catch_warnings(): warnings.simplefilter('ignore', ConvergenceWarning) alphas, _, coefs = lars_path(X, y, method='lasso', verbose=False, alpha_min=alpha_min) # Scale alpha by alpha_max alphas /= alphas[0] # Sort alphas in assending order alphas = alphas[::-1] coefs = coefs[:, ::-1] # Get rid of the alphas that are too small mask = alphas >= eps # We also want to keep the first one: it should be close to the OLS # solution mask[0] = True alphas = alphas[mask] coefs = coefs[:, mask] return alphas, coefs def lasso_stability_path(X, y, scaling=0.5, random_state=None, n_resampling=200, n_grid=100, sample_fraction=0.75, eps=4 * np.finfo(np.float).eps, n_jobs=1, verbose=False): """Stabiliy path based on randomized Lasso estimates Read more in the :ref:`User Guide <randomized_l1>`. Parameters ---------- X : array-like, shape = [n_samples, n_features] training data. y : array-like, shape = [n_samples] target values. scaling : float, optional, default=0.5 The alpha parameter in the stability selection article used to randomly scale the features. Should be between 0 and 1. random_state : integer or numpy.random.RandomState, optional The generator used to randomize the design. n_resampling : int, optional, default=200 Number of randomized models. n_grid : int, optional, default=100 Number of grid points. The path is linearly reinterpolated on a grid between 0 and 1 before computing the scores. sample_fraction : float, optional, default=0.75 The fraction of samples to be used in each randomized design. Should be between 0 and 1. If 1, all samples are used. eps : float, optional Smallest value of alpha / alpha_max considered n_jobs : integer, optional Number of CPUs to use during the resampling. If '-1', use all the CPUs verbose : boolean or integer, optional Sets the verbosity amount Returns ------- alphas_grid : array, shape ~ [n_grid] The grid points between 0 and 1: alpha/alpha_max scores_path : array, shape = [n_features, n_grid] The scores for each feature along the path. Notes ----- See examples/linear_model/plot_sparse_recovery.py for an example. """ rng = check_random_state(random_state) if not (0 < scaling < 1): raise ValueError("Parameter 'scaling' should be between 0 and 1." " Got %r instead." % scaling) n_samples, n_features = X.shape paths = Parallel(n_jobs=n_jobs, verbose=verbose)( delayed(_lasso_stability_path)( X, y, mask=rng.rand(n_samples) < sample_fraction, weights=1. - scaling * rng.random_integers(0, 1, size=(n_features,)), eps=eps) for k in range(n_resampling)) all_alphas = sorted(list(set(itertools.chain(*[p[0] for p in paths])))) # Take approximately n_grid values stride = int(max(1, int(len(all_alphas) / float(n_grid)))) all_alphas = all_alphas[::stride] if not all_alphas[-1] == 1: all_alphas.append(1.) all_alphas = np.array(all_alphas) scores_path = np.zeros((n_features, len(all_alphas))) for alphas, coefs in paths: if alphas[0] != 0: alphas = np.r_[0, alphas] coefs = np.c_[np.ones((n_features, 1)), coefs] if alphas[-1] != all_alphas[-1]: alphas = np.r_[alphas, all_alphas[-1]] coefs = np.c_[coefs, np.zeros((n_features, 1))] scores_path += (interp1d(alphas, coefs, kind='nearest', bounds_error=False, fill_value=0, axis=-1)(all_alphas) != 0) scores_path /= n_resampling return all_alphas, scores_path
bsd-3-clause
indolentriffraff/fihndos
cogs/customcmds.py
1
40126
import math import re import json from github import Github from PythonGists import PythonGists from discord.ext import commands from cogs.utils.checks import cmd_prefix_len, load_config '''Module for custom commands adding, removing, and viewing.''' class Customcmds: def __init__(self, bot): self.bot = bot async def githubUpload(self, username, password, repo_name): g = Github(username, password) repo = g.get_user().get_repo(repo_name) with open('settings/commands.json', 'r') as fp: contents = fp.read() updateFile = '/settings/commands.json' sha = repo.get_contents(updateFile).sha repo.update_file('/settings/commands.json', 'Updating customcommands', contents, sha) async def check(self, ctx, val, pre): def is_numb(msg): if msg.content.isdigit() and val != 0: return 0 < int(msg.content) < val elif val == 0: return True else: return False reply = await self.bot.wait_for_message(author=ctx.message.author, check=is_numb) return reply # view customcmds async def customcommands(self, ctx): with open('settings/commands.json', 'r') as c: cmds = json.load(c) sortedcmds = sorted(cmds.keys(), key=lambda x: x.lower()) msgs = [] part = '' pre = cmd_prefix_len() if ctx.message.content[10 + pre:].strip() and ctx.message.content[10 + pre:].strip() != 'gist': one_cmd = True list_cmd = ctx.message.content.strip().split(' ')[1] for cmd in sortedcmds: if one_cmd and list_cmd == cmd: if type(cmds[cmd]) is list: part = cmd + ': ' for i in cmds[cmd]: part += str(i[0]) + ' | ' part = part.rstrip(' | ') break else: part = cmd else: for cmd in sortedcmds: if type(cmds[cmd]) is list: check = cmd + ': ' for i in cmds[cmd]: check += str(i[0]) + ' | ' check = check.rstrip(' | ') + '\n\n' else: check = cmd + '\n\n' if len(part + check) > 1900: msgs.append(part) part = check else: part += check msgs.append(part) if 'gist' in ctx.message.content or 'Gist' in ctx.message.content: msgs = '\n'.join(msgs) url = PythonGists.Gist(description='Custom Commands', content=str(msgs), name='commands.txt') await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'List of Custom Commands: %s' % url) else: if len(msgs) == 1: await self.bot.send_message(ctx.message.channel, '```css\n[List of Custom Commands]\n%s ```' % msgs[0].rstrip()) else: for b, i in enumerate(msgs): await self.bot.send_message(ctx.message.channel, '```css\n[List of Custom Commands %s/%s]\n%s ```' % ( b + 1, len(msgs), i.rstrip())) # List all custom commands @commands.group(pass_context=True) async def customcmds(self, ctx): """Lists all customcmds. >help customcmds for more info >customcmds - normal output with all the customcmds and subcommands (response names). >customcmds <command_name> - output only this specific command. >customcmds gist - normal output but posted to Gist to avoid cluttering the chat.""" if ctx.invoked_subcommand is None: await self.customcommands(ctx) await self.bot.delete_message(ctx.message) @customcmds.command(pass_context=True) async def long(self, ctx): """Lists detailed version of customcmds. Ex: >customcmds long""" with open('settings/commands.json') as commands: if 'gist' in ctx.message.content or 'Gist' in ctx.message.content: cmds = commands.read() link = PythonGists.Gist(description='Full commands.json', content=cmds, name='commands.json') return await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Full commands.json: %s' % link) else: cmds = json.load(commands) msg = '' sortedcmds = sorted(cmds.keys(), key=lambda x: x.lower()) if ctx.message.content[17:] and ctx.message.content[17:] != 'gist': one_cmd = True list_cmd = ctx.message.content.strip().split('long')[1].strip() for cmd in sortedcmds: if one_cmd and list_cmd == cmd: msg += '"' + cmd + '" : "' if type(cmds[cmd]) == list: for i in cmds[cmd]: msg += str(i) + ', ' msg = msg[:-2] + '",\n\n' else: msg += str(cmds[cmd]) + '",\n\n' else: for cmd in sortedcmds: msg += '"' + cmd + '" : "' if type(cmds[cmd]) == list: for i in cmds[cmd]: msg += str(i) + ', ' msg = msg[:-2] + '",\n\n' else: msg += str(cmds[cmd]) + '",\n\n' msg = msg[:-3] msg += '}```' part = int(math.ceil(len(msg) / 1900)) if part == 1: await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + '```json\nList of Custom Commands: {\n' + msg) else: msg = msg[7:-3] splitList = [msg[i:i + 1900] for i in range(0, len(msg), 1900)] allWords = [] splitmsg = '' for i, blocks in enumerate(splitList): splitmsg += 'List of Custom Commands: %s of %s\n\n' % (i + 1, part) for b in blocks.split('\n'): splitmsg += b + '\n' allWords.append(splitmsg) splitmsg = '' for i in allWords: await self.bot.send_message(ctx.message.channel, '```%s```' % i) # Change customcmd embed color @customcmds.command(pass_context=True, aliases=['colour']) async def color(self, ctx, *, msg: str = None): '''Set color (hex) of a custom command image. Ex: >customcmds color 000000''' if msg: try: msg = msg.lstrip('#') int(msg, 16) except: await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Invalid color.') await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Successfully set color for customcmd embeds.') else: msg = '' await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Removed embed color for customcmd embeds.') with open('settings/optional_config.json', 'r+') as fp: opt = json.load(fp) opt['customcmd_color'] = msg fp.seek(0) fp.truncate() json.dump(opt, fp, indent=4) @customcmds.command(pass_context=True) async def update(self, ctx): """Needs GitHub repo set for an update""" with open('settings/github.json', 'r+') as fp: opt = json.load(fp) if opt['username'] != "": try: await self.githubUpload(opt['username'], opt['password'], opt['reponame']) except: await self.bot.send_message(ctx.message.channel, "Incorrect GitHub credentials") else: await self.bot.send_message(ctx.message.channel, "GitHub account and repo not specified in `github.json`") # Toggle auto-embed for images/gifs @customcmds.command(pass_context=True) async def embed(self, ctx): """Toggle auto embeding of images for custom commands.""" with open('settings/optional_config.json', 'r+') as fp: opt = json.load(fp) if opt['rich_embed'] == 'on': opt['rich_embed'] = 'off' await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Turned off auto-embeding images/gifs for customcmds.') else: opt['rich_embed'] = 'on' await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Turned on auto-embeding images/gifs for customcmds.') fp.seek(0) fp.truncate() json.dump(opt, fp, indent=4) # Add a custom command @commands.command(pass_context=True) async def add(self, ctx, *, msg: str = None): """Add a new customcmd. >help add for more info Simply do: >add This will trigger the menu which you can navigate through and add your custom command that way. ----------------------------------------------------------- Legacy method: There are two ways to add custom commands. The first way: ----Simple---- >add <command> <response> Now, if you do .<command> you will receive <response>. Example: >add nervous http://i.imgur.com/K9gMjWo.gifv Then, doing .nervous will output this imgur link (images and gifs will auto embed) Assuming that your customcmd_prefix is set to "." ---Multiple responses to the same command---- >add <command> <response_name> <response>. This way, you can add multiple responses to the same command. Example: >add cry k-on http://i.imgur.com/tWtXttk.gif Then you can add another to the .cry command: >add cry nichijou https://media.giphy.com/media/3fmRTfVIKMRiM/giphy.gif Note: If anything you are adding/removing is more than one word, you MUST put each part in quotes. Example: >add "cry" "mugi why" "http://i.imgur.com/tWtXttk.gif" or >add "copypasta" "I identify as an attack helicopter." Then invoke a specific response with .<command> <response_name> or get a random response for that command with .<command> So: .cry k-on would give you that specific link but .cry would give you one of the two you added to the cry command.""" if not msg: await self.bot.delete_message(ctx.message) pre = ctx.message.content.split('add')[0] customcmd_prefix = load_config()['customcmd_prefix'] menu = await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + '```\n\u2795 Choose type of customcmd to add. Enter a number:\n\n1. Simple customcmd (1 cmd with 1 response).\n2. Customcmd with multiple responses.\n3. View current customcmds.```') reply = await self.check(ctx, 4, pre) if reply: await self.bot.delete_message(reply) # Add simple customcmd if reply.content == "1": menu = await self.bot.edit_message(menu, self.bot.bot_prefix + '```\n\u2795 Enter a cmd name. This is how you will invoke your response.```') reply = await self.check(ctx, 0, pre) # Grab the cmd name if reply: await self.bot.delete_message(reply) entry_cmd = reply.content menu = await self.bot.edit_message(menu, self.bot.bot_prefix + '```\n\u2795 Enter the response for this cmd. This is what the bot will output when you send the cmd you specified.```') reply = await self.check(ctx, 0, pre) # Grab the response if reply: try: await self.bot.delete_message(reply) except: pass entry_response = reply.content with open('settings/commands.json', 'r+') as commands: cmds = json.load(commands) save = cmds commands.seek(0) commands.truncate() try: cmds[entry_cmd] = entry_response json.dump(cmds, commands, indent=4) await self.bot.edit_message(menu, self.bot.bot_prefix + 'Successfully added ``{}`` to ``{}`` Invoke this response by doing: ``{}``'.format( entry_response, entry_cmd, customcmd_prefix + entry_cmd)) except Exception as e: json.dump(save, commands, indent=4) await self.bot.edit_message(menu, self.bot.bot_prefix + 'Error, something went wrong. Exception: ``%s``' % e) # Add complex customcmd elif reply.content == "2": menu = await self.bot.edit_message(menu, self.bot.bot_prefix + '```\n\u2795 What to add? Pick a number.\n\n1. Add new command.\n2. Add response to existing command.```') reply = await self.check(ctx, 3, pre) if reply: await self.bot.delete_message(reply) # Create new list cmd if reply.content == '1': menu = await self.bot.edit_message(menu, self.bot.bot_prefix + '```\n\u2795 Enter the cmd name.```') reply = await self.check(ctx, 0, pre) # Grab cmd name if reply: await self.bot.delete_message(reply) entry_cmd = reply.content menu = await self.bot.edit_message(menu, self.bot.bot_prefix + '```\n\u2795 Since you selected to have this cmd have multiple responses, these multiple responses must have different names to map them. Enter a response name.```') reply = await self.check(ctx, 0, pre) # Grab response name if reply: await self.bot.delete_message(reply) entry_response = reply.content menu = await self.bot.edit_message(menu, self.bot.bot_prefix + '```\n\u2795 Now enter the response.```') reply = await self.check(ctx, 0, pre) # Grab the response if reply: try: await self.bot.delete_message(reply) except: pass response = reply.content with open('settings/commands.json', 'r+') as commands: cmds = json.load(commands) save = cmds commands.seek(0) commands.truncate() try: cmds[entry_cmd] = [[entry_response, response]] json.dump(cmds, commands, indent=4) await self.bot.edit_message(menu, self.bot.bot_prefix + 'Successfully added response with response name ``{}`` to command ``{}`` Invoke this specific response with ``{}`` or get a random response from the list of responses for this command with ``{}``'.format( entry_response, entry_cmd, customcmd_prefix + entry_cmd + ' ' + entry_response, customcmd_prefix + entry_cmd)) except Exception as e: json.dump(save, commands, indent=4) await self.bot.edit_message(menu, self.bot.bot_prefix + 'Error, something went wrong. Exception: ``%s``' % e) # Add to existing list cmd elif reply.content == '2': list_cmds = [] with open('settings/commands.json') as commands: cmds = json.load(commands) for i in cmds: if type(cmds[i]) is list: list_cmds.append(i) msg = '1. ' count = 0 for count, word in enumerate(list_cmds): msg += '{} {}.'.format(word, count + 2) msg = msg[:-(len(str(count + 2)) + 2)] if count == 0: return await self.bot.edit_message(menu, self.bot.bot_prefix + 'There are no cmds you can add multiple responses to. Create a cmd that enables multiple responses and then add a response to it.') menu = await self.bot.edit_message(menu, self.bot.bot_prefix + '```\n\u2795 Enter the number of the cmd name to add a response to.\n\n {}```'.format(msg)) reply = await self.check(ctx, count + 2, pre) if reply: await self.bot.delete_message(reply) entry_cmd = list_cmds[int(reply.content)-1] menu = await self.bot.edit_message(menu, self.bot.bot_prefix + '```\n\u2795 Enter a response name.```') reply = await self.check(ctx, 0, pre) # Grab response name if reply: await self.bot.delete_message(reply) entry_response = reply.content menu = await self.bot.edit_message(menu, self.bot.bot_prefix + '```\n\u2795 Now enter the response.```') reply = await self.check(ctx, 0, pre) # Grab the response if reply: try: await self.bot.delete_message(reply) except: pass response = reply.content with open('settings/commands.json', 'r+') as commands: save = cmds commands.seek(0) commands.truncate() try: cmds[entry_cmd].append([entry_response, response]) json.dump(cmds, commands, indent=4) await self.bot.edit_message(menu, self.bot.bot_prefix + 'Successfully added response with response name ``{}`` to command ``{}`` Invoke this specific response with ``{}`` or get a random response from the list of responses for this command with ``{}``'.format( entry_response, entry_cmd, customcmd_prefix + entry_cmd + ' ' + entry_response, customcmd_prefix + entry_cmd)) except Exception as e: json.dump(save, commands, indent=4) await self.bot.edit_message(menu, self.bot.bot_prefix + 'Error, something went wrong. Exception: ``%s``' % e) elif reply.content == '3': await self.bot.delete_message(menu) await self.customcommands(ctx) else: words = msg.strip() with open('settings/commands.json', 'r') as commands: cmds = json.load(commands) save = cmds try: # If there are quotes in the message (meaning multiple words for each param) if '"' in words: entry = re.findall('"([^"]+)"', words) # Item for key is list if len(entry) == 3: # Key exists if entry[0] in cmds: entries = [] for i in cmds[entry[0]]: entries.append(tuple((i[0], i[1]))) entries.append(tuple([entry[1], entry[2]])) cmds[entry[0]] = entries else: cmds[entry[0]] = [(entry[1], entry[2])] # Item for key is string else: if entry[0] in cmds: if type(cmds[entry[0]]) is list: return await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Error, this is a list command. To append to this command, you need a <response name>. Ex: ``>add cmd response_name response``') cmds[entry[0]] = entry[1] # No quotes so spaces seperate params else: # Item for key is list if len(words.split(' ')) == 3: entry = words.split(' ', 2) # Key exists if entry[0] in cmds: entries = [] for i in cmds[entry[0]]: entries.append(tuple((i[0], i[1]))) entries.append(tuple([entry[1], entry[2]])) cmds[entry[0]] = entries else: cmds[entry[0]] = [(entry[1], entry[2])] # Item for key is string else: entry = words.split(' ', 1) if entry[0] in cmds: if type(cmds[entry[0]]) is list: return await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Error, this is a list command. To append to this command, you need a <response name>. Ex: ``>add cmd response_name response``') cmds[entry[0]] = entry[1] await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Successfully added ``%s`` to ``%s``' % (entry[1], entry[0])) except Exception as e: with open('settings/commands.json', 'w') as commands: commands.truncate() json.dump(save, commands, indent=4) await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Error, something went wrong. Exception: ``%s``' % e) # Update commands.json with open('settings/commands.json', 'w') as commands: commands.truncate() json.dump(cmds, commands, indent=4) # Remove a custom command @commands.command(pass_context=True) async def remove(self, ctx, *, msg: str = None): """Remove a customcmd. >help remove for more info. Simply do: >remove This will trigger the menu which you can navigate through and remove your custom command that way. ----------------------------------------------------------- Legacy method: >remove <command> or >remove <command> <response_name> if you want to remove a specific response for a command. Just like with the add cmd, note that if anything you are adding/removing is more than one word, you must put each part in quotes. Example: If "cry" is the command and "mugi why" is the name for one of the links, removing that link would be: >remove "cry" "mugi why" """ if not msg: await self.bot.delete_message(ctx.message) pre = ctx.message.content.split('remove')[0] menu = await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + '```\n\u2796 Choose what to remove. Enter a number:\n\n1. A command and all its responses.\n2. A single response from a command that has more than one.```') reply = await self.check(ctx, 3, pre) if reply: await self.bot.delete_message(reply) # Remove a cmd if reply.content == '1': with open('settings/commands.json') as commands: cmds = json.load(commands) msg = '1. ' count = 0 all_cmds = [] for count, word in enumerate(cmds): all_cmds.append(word) msg += '{} {}.'.format(word, count + 2) msg = msg[:-(len(str(count + 2)) + 2)] if count == 0: return await self.bot.edit_message(menu, self.bot.bot_prefix + 'There are no cmds to remove.') menu = await self.bot.edit_message(menu, self.bot.bot_prefix + '```\n\u2796 Enter the number of the cmd to remove.\n\n {}```'.format( msg)) reply = await self.check(ctx, count + 2, pre) if reply: await self.bot.delete_message(reply) with open('settings/commands.json', 'r+') as commands: save = cmds commands.seek(0) commands.truncate() try: cmd_to_remove = all_cmds[int(reply.content) - 1] del cmds[cmd_to_remove] json.dump(cmds, commands, indent=4) await self.bot.edit_message(menu, self.bot.bot_prefix + 'Successfully removed command ``{}``'.format(cmd_to_remove)) except Exception as e: json.dump(save, commands, indent=4) await self.bot.edit_message(menu, self.bot.bot_prefix + 'Error, something went wrong. Exception: ``%s``' % e) # Remove a specific response from a cmd elif reply.content == '2': list_cmds = [] with open('settings/commands.json') as commands: cmds = json.load(commands) for i in cmds: if type(cmds[i]) is list: list_cmds.append(i) msg = '1. ' count = 0 for count, word in enumerate(list_cmds): msg += '{} {}.'.format(word, count + 2) msg = msg[:-(len(str(count + 2)) + 2)] if count == 0: return await self.bot.edit_message(menu, self.bot.bot_prefix + 'There are no cmds with multiple responses. If you are looking to remove a cmd with just one response, select 1 in the main menu for this command.') menu = await self.bot.edit_message(menu, self.bot.bot_prefix + '```\n\u2796 Enter the number of the cmd that you want to remove a response from.\n\n {}```'.format( msg)) reply = await self.check(ctx, count + 2, pre) # List responses from this cmd if reply: await self.bot.delete_message(reply) cmd_to_remove_from = list_cmds[int(reply.content) - 1] cmd_responses = [] msg = '1. ' count = 0 for count, word in enumerate(cmds[cmd_to_remove_from]): cmd_responses.append(word[0]) msg += '{} {}.'.format(word[0], count + 2) msg = msg[:-(len(str(count + 2)) + 2)] menu = await self.bot.edit_message(menu, self.bot.bot_prefix + '```\n\u2796 Enter the number of the response to remove.\n\n {}```'.format( msg)) reply = await self.check(ctx, count + 2, pre) if reply: await self.bot.delete_message(reply) with open('settings/commands.json', 'r+') as commands: save = cmds commands.seek(0) commands.truncate() try: response_to_remove = cmd_responses[int(reply.content) - 1] for i in cmds[cmd_to_remove_from]: if i[0] == response_to_remove: cmds[cmd_to_remove_from].remove(i) if cmds[cmd_to_remove_from] == []: del cmds[cmd_to_remove_from] json.dump(cmds, commands, indent=4) await self.bot.edit_message(menu, self.bot.bot_prefix + 'Successfully removed response with name ``{}`` from command ``{}``'.format( response_to_remove, cmd_to_remove_from)) except Exception as e: json.dump(save, commands, indent=4) await self.bot.edit_message(menu, self.bot.bot_prefix + 'Error, something went wrong. Exception: ``%s``' % e) else: words = msg.strip() with open('settings/commands.json', 'r') as commands: cmds = json.load(commands) save = cmds try: # If there are quotes in the message (meaning multiple words for each param) success = False def check(msg): if msg: return msg.content.lower().strip() == 'y' or msg.content.lower().strip() == 'n' else: return False if '"' in words: entry = re.findall('"([^"]+)"', words) # Item for key is list if len(entry) == 2: # Key exists if entry[0] in cmds: entries = [] for i in cmds[entry[0]]: if entry[1] == i[0]: cmds[entry[0]].remove(i) await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Successfully removed ``%s`` from ``%s``' % ( entry[1], entry[0])) success = True else: if entry[0] in cmds: del cmds[entry[0]] success = True await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Successfully removed ``%s`` from ``%s``' % ( entry[1], entry[0])) # Item for key is string else: if entry[0] in cmds: if type(cmds[entry[0]]) is list: await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'This will delete all responses for this list command. Are you sure you want to do this? (y/n).') reply = await self.bot.wait_for_message(timeout=10, author=ctx.message.author, check=check) if reply: if reply.content.lower().strip() == 'n': return await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Cancelled.') else: return await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Cancelled.') oldValue = cmds[entry[0]] del cmds[entry[0]] success = True await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Successfully removed ``%s`` from ``%s``' % (oldValue, entry[0])) # No quotes so spaces seperate params else: # Item for key is list if len(words.split(' ')) == 2: entry = words.split(' ') # Key exists if entry[0] in cmds: for i in cmds[entry[0]]: if entry[1] == i[0]: cmds[entry[0]].remove(i) await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Successfully removed ``%s`` from ``%s``' % ( entry[1], entry[0])) success = True else: if entry[0] in cmds: del cmds[entry[0]] success = True await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Successfully removed ``%s`` from ``%s``' % (entry[1], entry[0])) # Item for key is string else: entry = words.split(' ', 1) if entry[0] in cmds: if type(cmds[entry[0]]) is list: await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'This will delete all responses for this list command. Are you sure you want to do this? (y/n).') reply = await self.bot.wait_for_message(timeout=10, author=ctx.message.author, check=check) if reply: if reply.content.lower().strip() == 'n': return await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Cancelled.') else: return await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Cancelled.') oldValue = cmds[entry[0]] del cmds[entry[0]] success = True await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Successfully removed ``%s`` from ``%s``' % (oldValue, entry[0])) if success is False: await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Could not find specified command.') except Exception as e: with open('settings/commands.json', 'w') as commands: commands.truncate() json.dump(save, commands, indent=4) await self.bot.send_message(ctx.message.channel, self.bot.bot_prefix + 'Error, something went wrong. Exception: ``%s``' % e) # Update commands.json with open('settings/commands.json', 'w') as commands: commands.truncate() json.dump(cmds, commands, indent=4) def setup(bot): bot.add_cog(Customcmds(bot))
gpl-3.0
CellarD0-0r/whatever
node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
1361
45045
# Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. r"""Code to validate and convert settings of the Microsoft build tools. This file contains code to validate and convert settings of the Microsoft build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(), and ValidateMSBuildSettings() are the entry points. This file was created by comparing the projects created by Visual Studio 2008 and Visual Studio 2010 for all available settings through the user interface. The MSBuild schemas were also considered. They are typically found in the MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild """ import sys import re # Dictionaries of settings validators. The key is the tool name, the value is # a dictionary mapping setting names to validation functions. _msvs_validators = {} _msbuild_validators = {} # A dictionary of settings converters. The key is the tool name, the value is # a dictionary mapping setting names to conversion functions. _msvs_to_msbuild_converters = {} # Tool name mapping from MSVS to MSBuild. _msbuild_name_of_tool = {} class _Tool(object): """Represents a tool used by MSVS or MSBuild. Attributes: msvs_name: The name of the tool in MSVS. msbuild_name: The name of the tool in MSBuild. """ def __init__(self, msvs_name, msbuild_name): self.msvs_name = msvs_name self.msbuild_name = msbuild_name def _AddTool(tool): """Adds a tool to the four dictionaries used to process settings. This only defines the tool. Each setting also needs to be added. Args: tool: The _Tool object to be added. """ _msvs_validators[tool.msvs_name] = {} _msbuild_validators[tool.msbuild_name] = {} _msvs_to_msbuild_converters[tool.msvs_name] = {} _msbuild_name_of_tool[tool.msvs_name] = tool.msbuild_name def _GetMSBuildToolSettings(msbuild_settings, tool): """Returns an MSBuild tool dictionary. Creates it if needed.""" return msbuild_settings.setdefault(tool.msbuild_name, {}) class _Type(object): """Type of settings (Base class).""" def ValidateMSVS(self, value): """Verifies that the value is legal for MSVS. Args: value: the value to check for this type. Raises: ValueError if value is not valid for MSVS. """ def ValidateMSBuild(self, value): """Verifies that the value is legal for MSBuild. Args: value: the value to check for this type. Raises: ValueError if value is not valid for MSBuild. """ def ConvertToMSBuild(self, value): """Returns the MSBuild equivalent of the MSVS value given. Args: value: the MSVS value to convert. Returns: the MSBuild equivalent. Raises: ValueError if value is not valid. """ return value class _String(_Type): """A setting that's just a string.""" def ValidateMSVS(self, value): if not isinstance(value, basestring): raise ValueError('expected string; got %r' % value) def ValidateMSBuild(self, value): if not isinstance(value, basestring): raise ValueError('expected string; got %r' % value) def ConvertToMSBuild(self, value): # Convert the macros return ConvertVCMacrosToMSBuild(value) class _StringList(_Type): """A settings that's a list of strings.""" def ValidateMSVS(self, value): if not isinstance(value, basestring) and not isinstance(value, list): raise ValueError('expected string list; got %r' % value) def ValidateMSBuild(self, value): if not isinstance(value, basestring) and not isinstance(value, list): raise ValueError('expected string list; got %r' % value) def ConvertToMSBuild(self, value): # Convert the macros if isinstance(value, list): return [ConvertVCMacrosToMSBuild(i) for i in value] else: return ConvertVCMacrosToMSBuild(value) class _Boolean(_Type): """Boolean settings, can have the values 'false' or 'true'.""" def _Validate(self, value): if value != 'true' and value != 'false': raise ValueError('expected bool; got %r' % value) def ValidateMSVS(self, value): self._Validate(value) def ValidateMSBuild(self, value): self._Validate(value) def ConvertToMSBuild(self, value): self._Validate(value) return value class _Integer(_Type): """Integer settings.""" def __init__(self, msbuild_base=10): _Type.__init__(self) self._msbuild_base = msbuild_base def ValidateMSVS(self, value): # Try to convert, this will raise ValueError if invalid. self.ConvertToMSBuild(value) def ValidateMSBuild(self, value): # Try to convert, this will raise ValueError if invalid. int(value, self._msbuild_base) def ConvertToMSBuild(self, value): msbuild_format = (self._msbuild_base == 10) and '%d' or '0x%04x' return msbuild_format % int(value) class _Enumeration(_Type): """Type of settings that is an enumeration. In MSVS, the values are indexes like '0', '1', and '2'. MSBuild uses text labels that are more representative, like 'Win32'. Constructor args: label_list: an array of MSBuild labels that correspond to the MSVS index. In the rare cases where MSVS has skipped an index value, None is used in the array to indicate the unused spot. new: an array of labels that are new to MSBuild. """ def __init__(self, label_list, new=None): _Type.__init__(self) self._label_list = label_list self._msbuild_values = set(value for value in label_list if value is not None) if new is not None: self._msbuild_values.update(new) def ValidateMSVS(self, value): # Try to convert. It will raise an exception if not valid. self.ConvertToMSBuild(value) def ValidateMSBuild(self, value): if value not in self._msbuild_values: raise ValueError('unrecognized enumerated value %s' % value) def ConvertToMSBuild(self, value): index = int(value) if index < 0 or index >= len(self._label_list): raise ValueError('index value (%d) not in expected range [0, %d)' % (index, len(self._label_list))) label = self._label_list[index] if label is None: raise ValueError('converted value for %s not specified.' % value) return label # Instantiate the various generic types. _boolean = _Boolean() _integer = _Integer() # For now, we don't do any special validation on these types: _string = _String() _file_name = _String() _folder_name = _String() _file_list = _StringList() _folder_list = _StringList() _string_list = _StringList() # Some boolean settings went from numerical values to boolean. The # mapping is 0: default, 1: false, 2: true. _newly_boolean = _Enumeration(['', 'false', 'true']) def _Same(tool, name, setting_type): """Defines a setting that has the same name in MSVS and MSBuild. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. name: the name of the setting. setting_type: the type of this setting. """ _Renamed(tool, name, name, setting_type) def _Renamed(tool, msvs_name, msbuild_name, setting_type): """Defines a setting for which the name has changed. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. msvs_name: the name of the MSVS setting. msbuild_name: the name of the MSBuild setting. setting_type: the type of this setting. """ def _Translate(value, msbuild_settings): msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value) _msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS _msbuild_validators[tool.msbuild_name][msbuild_name] = ( setting_type.ValidateMSBuild) _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate def _Moved(tool, settings_name, msbuild_tool_name, setting_type): _MovedAndRenamed(tool, settings_name, msbuild_tool_name, settings_name, setting_type) def _MovedAndRenamed(tool, msvs_settings_name, msbuild_tool_name, msbuild_settings_name, setting_type): """Defines a setting that may have moved to a new section. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. msvs_settings_name: the MSVS name of the setting. msbuild_tool_name: the name of the MSBuild tool to place the setting under. msbuild_settings_name: the MSBuild name of the setting. setting_type: the type of this setting. """ def _Translate(value, msbuild_settings): tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {}) tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value) _msvs_validators[tool.msvs_name][msvs_settings_name] = ( setting_type.ValidateMSVS) validator = setting_type.ValidateMSBuild _msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator _msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate def _MSVSOnly(tool, name, setting_type): """Defines a setting that is only found in MSVS. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. name: the name of the setting. setting_type: the type of this setting. """ def _Translate(unused_value, unused_msbuild_settings): # Since this is for MSVS only settings, no translation will happen. pass _msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate def _MSBuildOnly(tool, name, setting_type): """Defines a setting that is only found in MSBuild. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. name: the name of the setting. setting_type: the type of this setting. """ def _Translate(value, msbuild_settings): # Let msbuild-only properties get translated as-is from msvs_settings. tool_settings = msbuild_settings.setdefault(tool.msbuild_name, {}) tool_settings[name] = value _msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate def _ConvertedToAdditionalOption(tool, msvs_name, flag): """Defines a setting that's handled via a command line option in MSBuild. Args: tool: a dictionary that gives the names of the tool for MSVS and MSBuild. msvs_name: the name of the MSVS setting that if 'true' becomes a flag flag: the flag to insert at the end of the AdditionalOptions """ def _Translate(value, msbuild_settings): if value == 'true': tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) if 'AdditionalOptions' in tool_settings: new_flags = '%s %s' % (tool_settings['AdditionalOptions'], flag) else: new_flags = flag tool_settings['AdditionalOptions'] = new_flags _msvs_validators[tool.msvs_name][msvs_name] = _boolean.ValidateMSVS _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate def _CustomGeneratePreprocessedFile(tool, msvs_name): def _Translate(value, msbuild_settings): tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) if value == '0': tool_settings['PreprocessToFile'] = 'false' tool_settings['PreprocessSuppressLineNumbers'] = 'false' elif value == '1': # /P tool_settings['PreprocessToFile'] = 'true' tool_settings['PreprocessSuppressLineNumbers'] = 'false' elif value == '2': # /EP /P tool_settings['PreprocessToFile'] = 'true' tool_settings['PreprocessSuppressLineNumbers'] = 'true' else: raise ValueError('value must be one of [0, 1, 2]; got %s' % value) # Create a bogus validator that looks for '0', '1', or '2' msvs_validator = _Enumeration(['a', 'b', 'c']).ValidateMSVS _msvs_validators[tool.msvs_name][msvs_name] = msvs_validator msbuild_validator = _boolean.ValidateMSBuild msbuild_tool_validators = _msbuild_validators[tool.msbuild_name] msbuild_tool_validators['PreprocessToFile'] = msbuild_validator msbuild_tool_validators['PreprocessSuppressLineNumbers'] = msbuild_validator _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate fix_vc_macro_slashes_regex_list = ('IntDir', 'OutDir') fix_vc_macro_slashes_regex = re.compile( r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list) ) # Regular expression to detect keys that were generated by exclusion lists _EXCLUDED_SUFFIX_RE = re.compile('^(.*)_excluded$') def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr): """Verify that 'setting' is valid if it is generated from an exclusion list. If the setting appears to be generated from an exclusion list, the root name is checked. Args: setting: A string that is the setting name to validate settings: A dictionary where the keys are valid settings error_msg: The message to emit in the event of error stderr: The stream receiving the error messages. """ # This may be unrecognized because it's an exclusion list. If the # setting name has the _excluded suffix, then check the root name. unrecognized = True m = re.match(_EXCLUDED_SUFFIX_RE, setting) if m: root_setting = m.group(1) unrecognized = root_setting not in settings if unrecognized: # We don't know this setting. Give a warning. print >> stderr, error_msg def FixVCMacroSlashes(s): """Replace macros which have excessive following slashes. These macros are known to have a built-in trailing slash. Furthermore, many scripts hiccup on processing paths with extra slashes in the middle. This list is probably not exhaustive. Add as needed. """ if '$' in s: s = fix_vc_macro_slashes_regex.sub(r'\1', s) return s def ConvertVCMacrosToMSBuild(s): """Convert the the MSVS macros found in the string to the MSBuild equivalent. This list is probably not exhaustive. Add as needed. """ if '$' in s: replace_map = { '$(ConfigurationName)': '$(Configuration)', '$(InputDir)': '%(RelativeDir)', '$(InputExt)': '%(Extension)', '$(InputFileName)': '%(Filename)%(Extension)', '$(InputName)': '%(Filename)', '$(InputPath)': '%(Identity)', '$(ParentName)': '$(ProjectFileName)', '$(PlatformName)': '$(Platform)', '$(SafeInputName)': '%(Filename)', } for old, new in replace_map.iteritems(): s = s.replace(old, new) s = FixVCMacroSlashes(s) return s def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): """Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+). Args: msvs_settings: A dictionary. The key is the tool name. The values are themselves dictionaries of settings and their values. stderr: The stream receiving the error messages. Returns: A dictionary of MSBuild settings. The key is either the MSBuild tool name or the empty string (for the global settings). The values are themselves dictionaries of settings and their values. """ msbuild_settings = {} for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems(): if msvs_tool_name in _msvs_to_msbuild_converters: msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name] for msvs_setting, msvs_value in msvs_tool_settings.iteritems(): if msvs_setting in msvs_tool: # Invoke the translation function. try: msvs_tool[msvs_setting](msvs_value, msbuild_settings) except ValueError, e: print >> stderr, ('Warning: while converting %s/%s to MSBuild, ' '%s' % (msvs_tool_name, msvs_setting, e)) else: _ValidateExclusionSetting(msvs_setting, msvs_tool, ('Warning: unrecognized setting %s/%s ' 'while converting to MSBuild.' % (msvs_tool_name, msvs_setting)), stderr) else: print >> stderr, ('Warning: unrecognized tool %s while converting to ' 'MSBuild.' % msvs_tool_name) return msbuild_settings def ValidateMSVSSettings(settings, stderr=sys.stderr): """Validates that the names of the settings are valid for MSVS. Args: settings: A dictionary. The key is the tool name. The values are themselves dictionaries of settings and their values. stderr: The stream receiving the error messages. """ _ValidateSettings(_msvs_validators, settings, stderr) def ValidateMSBuildSettings(settings, stderr=sys.stderr): """Validates that the names of the settings are valid for MSBuild. Args: settings: A dictionary. The key is the tool name. The values are themselves dictionaries of settings and their values. stderr: The stream receiving the error messages. """ _ValidateSettings(_msbuild_validators, settings, stderr) def _ValidateSettings(validators, settings, stderr): """Validates that the settings are valid for MSBuild or MSVS. We currently only validate the names of the settings, not their values. Args: validators: A dictionary of tools and their validators. settings: A dictionary. The key is the tool name. The values are themselves dictionaries of settings and their values. stderr: The stream receiving the error messages. """ for tool_name in settings: if tool_name in validators: tool_validators = validators[tool_name] for setting, value in settings[tool_name].iteritems(): if setting in tool_validators: try: tool_validators[setting](value) except ValueError, e: print >> stderr, ('Warning: for %s/%s, %s' % (tool_name, setting, e)) else: _ValidateExclusionSetting(setting, tool_validators, ('Warning: unrecognized setting %s/%s' % (tool_name, setting)), stderr) else: print >> stderr, ('Warning: unrecognized tool %s' % tool_name) # MSVS and MBuild names of the tools. _compile = _Tool('VCCLCompilerTool', 'ClCompile') _link = _Tool('VCLinkerTool', 'Link') _midl = _Tool('VCMIDLTool', 'Midl') _rc = _Tool('VCResourceCompilerTool', 'ResourceCompile') _lib = _Tool('VCLibrarianTool', 'Lib') _manifest = _Tool('VCManifestTool', 'Manifest') _masm = _Tool('MASM', 'MASM') _AddTool(_compile) _AddTool(_link) _AddTool(_midl) _AddTool(_rc) _AddTool(_lib) _AddTool(_manifest) _AddTool(_masm) # Add sections only found in the MSBuild settings. _msbuild_validators[''] = {} _msbuild_validators['ProjectReference'] = {} _msbuild_validators['ManifestResourceCompile'] = {} # Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and # ClCompile in MSBuild. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for # the schema of the MSBuild ClCompile settings. # Options that have the same name in MSVS and MSBuild _Same(_compile, 'AdditionalIncludeDirectories', _folder_list) # /I _Same(_compile, 'AdditionalOptions', _string_list) _Same(_compile, 'AdditionalUsingDirectories', _folder_list) # /AI _Same(_compile, 'AssemblerListingLocation', _file_name) # /Fa _Same(_compile, 'BrowseInformationFile', _file_name) _Same(_compile, 'BufferSecurityCheck', _boolean) # /GS _Same(_compile, 'DisableLanguageExtensions', _boolean) # /Za _Same(_compile, 'DisableSpecificWarnings', _string_list) # /wd _Same(_compile, 'EnableFiberSafeOptimizations', _boolean) # /GT _Same(_compile, 'EnablePREfast', _boolean) # /analyze Visible='false' _Same(_compile, 'ExpandAttributedSource', _boolean) # /Fx _Same(_compile, 'FloatingPointExceptions', _boolean) # /fp:except _Same(_compile, 'ForceConformanceInForLoopScope', _boolean) # /Zc:forScope _Same(_compile, 'ForcedIncludeFiles', _file_list) # /FI _Same(_compile, 'ForcedUsingFiles', _file_list) # /FU _Same(_compile, 'GenerateXMLDocumentationFiles', _boolean) # /doc _Same(_compile, 'IgnoreStandardIncludePath', _boolean) # /X _Same(_compile, 'MinimalRebuild', _boolean) # /Gm _Same(_compile, 'OmitDefaultLibName', _boolean) # /Zl _Same(_compile, 'OmitFramePointers', _boolean) # /Oy _Same(_compile, 'PreprocessorDefinitions', _string_list) # /D _Same(_compile, 'ProgramDataBaseFileName', _file_name) # /Fd _Same(_compile, 'RuntimeTypeInfo', _boolean) # /GR _Same(_compile, 'ShowIncludes', _boolean) # /showIncludes _Same(_compile, 'SmallerTypeCheck', _boolean) # /RTCc _Same(_compile, 'StringPooling', _boolean) # /GF _Same(_compile, 'SuppressStartupBanner', _boolean) # /nologo _Same(_compile, 'TreatWChar_tAsBuiltInType', _boolean) # /Zc:wchar_t _Same(_compile, 'UndefineAllPreprocessorDefinitions', _boolean) # /u _Same(_compile, 'UndefinePreprocessorDefinitions', _string_list) # /U _Same(_compile, 'UseFullPaths', _boolean) # /FC _Same(_compile, 'WholeProgramOptimization', _boolean) # /GL _Same(_compile, 'XMLDocumentationFileName', _file_name) _Same(_compile, 'AssemblerOutput', _Enumeration(['NoListing', 'AssemblyCode', # /FA 'All', # /FAcs 'AssemblyAndMachineCode', # /FAc 'AssemblyAndSourceCode'])) # /FAs _Same(_compile, 'BasicRuntimeChecks', _Enumeration(['Default', 'StackFrameRuntimeCheck', # /RTCs 'UninitializedLocalUsageCheck', # /RTCu 'EnableFastChecks'])) # /RTC1 _Same(_compile, 'BrowseInformation', _Enumeration(['false', 'true', # /FR 'true'])) # /Fr _Same(_compile, 'CallingConvention', _Enumeration(['Cdecl', # /Gd 'FastCall', # /Gr 'StdCall', # /Gz 'VectorCall'])) # /Gv _Same(_compile, 'CompileAs', _Enumeration(['Default', 'CompileAsC', # /TC 'CompileAsCpp'])) # /TP _Same(_compile, 'DebugInformationFormat', _Enumeration(['', # Disabled 'OldStyle', # /Z7 None, 'ProgramDatabase', # /Zi 'EditAndContinue'])) # /ZI _Same(_compile, 'EnableEnhancedInstructionSet', _Enumeration(['NotSet', 'StreamingSIMDExtensions', # /arch:SSE 'StreamingSIMDExtensions2', # /arch:SSE2 'AdvancedVectorExtensions', # /arch:AVX (vs2012+) 'NoExtensions', # /arch:IA32 (vs2012+) # This one only exists in the new msbuild format. 'AdvancedVectorExtensions2', # /arch:AVX2 (vs2013r2+) ])) _Same(_compile, 'ErrorReporting', _Enumeration(['None', # /errorReport:none 'Prompt', # /errorReport:prompt 'Queue'], # /errorReport:queue new=['Send'])) # /errorReport:send" _Same(_compile, 'ExceptionHandling', _Enumeration(['false', 'Sync', # /EHsc 'Async'], # /EHa new=['SyncCThrow'])) # /EHs _Same(_compile, 'FavorSizeOrSpeed', _Enumeration(['Neither', 'Speed', # /Ot 'Size'])) # /Os _Same(_compile, 'FloatingPointModel', _Enumeration(['Precise', # /fp:precise 'Strict', # /fp:strict 'Fast'])) # /fp:fast _Same(_compile, 'InlineFunctionExpansion', _Enumeration(['Default', 'OnlyExplicitInline', # /Ob1 'AnySuitable'], # /Ob2 new=['Disabled'])) # /Ob0 _Same(_compile, 'Optimization', _Enumeration(['Disabled', # /Od 'MinSpace', # /O1 'MaxSpeed', # /O2 'Full'])) # /Ox _Same(_compile, 'RuntimeLibrary', _Enumeration(['MultiThreaded', # /MT 'MultiThreadedDebug', # /MTd 'MultiThreadedDLL', # /MD 'MultiThreadedDebugDLL'])) # /MDd _Same(_compile, 'StructMemberAlignment', _Enumeration(['Default', '1Byte', # /Zp1 '2Bytes', # /Zp2 '4Bytes', # /Zp4 '8Bytes', # /Zp8 '16Bytes'])) # /Zp16 _Same(_compile, 'WarningLevel', _Enumeration(['TurnOffAllWarnings', # /W0 'Level1', # /W1 'Level2', # /W2 'Level3', # /W3 'Level4'], # /W4 new=['EnableAllWarnings'])) # /Wall # Options found in MSVS that have been renamed in MSBuild. _Renamed(_compile, 'EnableFunctionLevelLinking', 'FunctionLevelLinking', _boolean) # /Gy _Renamed(_compile, 'EnableIntrinsicFunctions', 'IntrinsicFunctions', _boolean) # /Oi _Renamed(_compile, 'KeepComments', 'PreprocessKeepComments', _boolean) # /C _Renamed(_compile, 'ObjectFile', 'ObjectFileName', _file_name) # /Fo _Renamed(_compile, 'OpenMP', 'OpenMPSupport', _boolean) # /openmp _Renamed(_compile, 'PrecompiledHeaderThrough', 'PrecompiledHeaderFile', _file_name) # Used with /Yc and /Yu _Renamed(_compile, 'PrecompiledHeaderFile', 'PrecompiledHeaderOutputFile', _file_name) # /Fp _Renamed(_compile, 'UsePrecompiledHeader', 'PrecompiledHeader', _Enumeration(['NotUsing', # VS recognized '' for this value too. 'Create', # /Yc 'Use'])) # /Yu _Renamed(_compile, 'WarnAsError', 'TreatWarningAsError', _boolean) # /WX _ConvertedToAdditionalOption(_compile, 'DefaultCharIsUnsigned', '/J') # MSVS options not found in MSBuild. _MSVSOnly(_compile, 'Detect64BitPortabilityProblems', _boolean) _MSVSOnly(_compile, 'UseUnicodeResponseFiles', _boolean) # MSBuild options not found in MSVS. _MSBuildOnly(_compile, 'BuildingInIDE', _boolean) _MSBuildOnly(_compile, 'CompileAsManaged', _Enumeration([], new=['false', 'true'])) # /clr _MSBuildOnly(_compile, 'CreateHotpatchableImage', _boolean) # /hotpatch _MSBuildOnly(_compile, 'MultiProcessorCompilation', _boolean) # /MP _MSBuildOnly(_compile, 'PreprocessOutputPath', _string) # /Fi _MSBuildOnly(_compile, 'ProcessorNumber', _integer) # the number of processors _MSBuildOnly(_compile, 'TrackerLogDirectory', _folder_name) _MSBuildOnly(_compile, 'TreatSpecificWarningsAsErrors', _string_list) # /we _MSBuildOnly(_compile, 'UseUnicodeForAssemblerListing', _boolean) # /FAu # Defines a setting that needs very customized processing _CustomGeneratePreprocessedFile(_compile, 'GeneratePreprocessedFile') # Directives for converting MSVS VCLinkerTool to MSBuild Link. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for # the schema of the MSBuild Link settings. # Options that have the same name in MSVS and MSBuild _Same(_link, 'AdditionalDependencies', _file_list) _Same(_link, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH # /MANIFESTDEPENDENCY: _Same(_link, 'AdditionalManifestDependencies', _file_list) _Same(_link, 'AdditionalOptions', _string_list) _Same(_link, 'AddModuleNamesToAssembly', _file_list) # /ASSEMBLYMODULE _Same(_link, 'AllowIsolation', _boolean) # /ALLOWISOLATION _Same(_link, 'AssemblyLinkResource', _file_list) # /ASSEMBLYLINKRESOURCE _Same(_link, 'BaseAddress', _string) # /BASE _Same(_link, 'CLRUnmanagedCodeCheck', _boolean) # /CLRUNMANAGEDCODECHECK _Same(_link, 'DelayLoadDLLs', _file_list) # /DELAYLOAD _Same(_link, 'DelaySign', _boolean) # /DELAYSIGN _Same(_link, 'EmbedManagedResourceFile', _file_list) # /ASSEMBLYRESOURCE _Same(_link, 'EnableUAC', _boolean) # /MANIFESTUAC _Same(_link, 'EntryPointSymbol', _string) # /ENTRY _Same(_link, 'ForceSymbolReferences', _file_list) # /INCLUDE _Same(_link, 'FunctionOrder', _file_name) # /ORDER _Same(_link, 'GenerateDebugInformation', _boolean) # /DEBUG _Same(_link, 'GenerateMapFile', _boolean) # /MAP _Same(_link, 'HeapCommitSize', _string) _Same(_link, 'HeapReserveSize', _string) # /HEAP _Same(_link, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB _Same(_link, 'IgnoreEmbeddedIDL', _boolean) # /IGNOREIDL _Same(_link, 'ImportLibrary', _file_name) # /IMPLIB _Same(_link, 'KeyContainer', _file_name) # /KEYCONTAINER _Same(_link, 'KeyFile', _file_name) # /KEYFILE _Same(_link, 'ManifestFile', _file_name) # /ManifestFile _Same(_link, 'MapExports', _boolean) # /MAPINFO:EXPORTS _Same(_link, 'MapFileName', _file_name) _Same(_link, 'MergedIDLBaseFileName', _file_name) # /IDLOUT _Same(_link, 'MergeSections', _string) # /MERGE _Same(_link, 'MidlCommandFile', _file_name) # /MIDL _Same(_link, 'ModuleDefinitionFile', _file_name) # /DEF _Same(_link, 'OutputFile', _file_name) # /OUT _Same(_link, 'PerUserRedirection', _boolean) _Same(_link, 'Profile', _boolean) # /PROFILE _Same(_link, 'ProfileGuidedDatabase', _file_name) # /PGD _Same(_link, 'ProgramDatabaseFile', _file_name) # /PDB _Same(_link, 'RegisterOutput', _boolean) _Same(_link, 'SetChecksum', _boolean) # /RELEASE _Same(_link, 'StackCommitSize', _string) _Same(_link, 'StackReserveSize', _string) # /STACK _Same(_link, 'StripPrivateSymbols', _file_name) # /PDBSTRIPPED _Same(_link, 'SupportUnloadOfDelayLoadedDLL', _boolean) # /DELAY:UNLOAD _Same(_link, 'SuppressStartupBanner', _boolean) # /NOLOGO _Same(_link, 'SwapRunFromCD', _boolean) # /SWAPRUN:CD _Same(_link, 'TurnOffAssemblyGeneration', _boolean) # /NOASSEMBLY _Same(_link, 'TypeLibraryFile', _file_name) # /TLBOUT _Same(_link, 'TypeLibraryResourceID', _integer) # /TLBID _Same(_link, 'UACUIAccess', _boolean) # /uiAccess='true' _Same(_link, 'Version', _string) # /VERSION _Same(_link, 'EnableCOMDATFolding', _newly_boolean) # /OPT:ICF _Same(_link, 'FixedBaseAddress', _newly_boolean) # /FIXED _Same(_link, 'LargeAddressAware', _newly_boolean) # /LARGEADDRESSAWARE _Same(_link, 'OptimizeReferences', _newly_boolean) # /OPT:REF _Same(_link, 'RandomizedBaseAddress', _newly_boolean) # /DYNAMICBASE _Same(_link, 'TerminalServerAware', _newly_boolean) # /TSAWARE _subsystem_enumeration = _Enumeration( ['NotSet', 'Console', # /SUBSYSTEM:CONSOLE 'Windows', # /SUBSYSTEM:WINDOWS 'Native', # /SUBSYSTEM:NATIVE 'EFI Application', # /SUBSYSTEM:EFI_APPLICATION 'EFI Boot Service Driver', # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER 'EFI ROM', # /SUBSYSTEM:EFI_ROM 'EFI Runtime', # /SUBSYSTEM:EFI_RUNTIME_DRIVER 'WindowsCE'], # /SUBSYSTEM:WINDOWSCE new=['POSIX']) # /SUBSYSTEM:POSIX _target_machine_enumeration = _Enumeration( ['NotSet', 'MachineX86', # /MACHINE:X86 None, 'MachineARM', # /MACHINE:ARM 'MachineEBC', # /MACHINE:EBC 'MachineIA64', # /MACHINE:IA64 None, 'MachineMIPS', # /MACHINE:MIPS 'MachineMIPS16', # /MACHINE:MIPS16 'MachineMIPSFPU', # /MACHINE:MIPSFPU 'MachineMIPSFPU16', # /MACHINE:MIPSFPU16 None, None, None, 'MachineSH4', # /MACHINE:SH4 None, 'MachineTHUMB', # /MACHINE:THUMB 'MachineX64']) # /MACHINE:X64 _Same(_link, 'AssemblyDebug', _Enumeration(['', 'true', # /ASSEMBLYDEBUG 'false'])) # /ASSEMBLYDEBUG:DISABLE _Same(_link, 'CLRImageType', _Enumeration(['Default', 'ForceIJWImage', # /CLRIMAGETYPE:IJW 'ForcePureILImage', # /Switch="CLRIMAGETYPE:PURE 'ForceSafeILImage'])) # /Switch="CLRIMAGETYPE:SAFE _Same(_link, 'CLRThreadAttribute', _Enumeration(['DefaultThreadingAttribute', # /CLRTHREADATTRIBUTE:NONE 'MTAThreadingAttribute', # /CLRTHREADATTRIBUTE:MTA 'STAThreadingAttribute'])) # /CLRTHREADATTRIBUTE:STA _Same(_link, 'DataExecutionPrevention', _Enumeration(['', 'false', # /NXCOMPAT:NO 'true'])) # /NXCOMPAT _Same(_link, 'Driver', _Enumeration(['NotSet', 'Driver', # /Driver 'UpOnly', # /DRIVER:UPONLY 'WDM'])) # /DRIVER:WDM _Same(_link, 'LinkTimeCodeGeneration', _Enumeration(['Default', 'UseLinkTimeCodeGeneration', # /LTCG 'PGInstrument', # /LTCG:PGInstrument 'PGOptimization', # /LTCG:PGOptimize 'PGUpdate'])) # /LTCG:PGUpdate _Same(_link, 'ShowProgress', _Enumeration(['NotSet', 'LinkVerbose', # /VERBOSE 'LinkVerboseLib'], # /VERBOSE:Lib new=['LinkVerboseICF', # /VERBOSE:ICF 'LinkVerboseREF', # /VERBOSE:REF 'LinkVerboseSAFESEH', # /VERBOSE:SAFESEH 'LinkVerboseCLR'])) # /VERBOSE:CLR _Same(_link, 'SubSystem', _subsystem_enumeration) _Same(_link, 'TargetMachine', _target_machine_enumeration) _Same(_link, 'UACExecutionLevel', _Enumeration(['AsInvoker', # /level='asInvoker' 'HighestAvailable', # /level='highestAvailable' 'RequireAdministrator'])) # /level='requireAdministrator' _Same(_link, 'MinimumRequiredVersion', _string) _Same(_link, 'TreatLinkerWarningAsErrors', _boolean) # /WX # Options found in MSVS that have been renamed in MSBuild. _Renamed(_link, 'ErrorReporting', 'LinkErrorReporting', _Enumeration(['NoErrorReport', # /ERRORREPORT:NONE 'PromptImmediately', # /ERRORREPORT:PROMPT 'QueueForNextLogin'], # /ERRORREPORT:QUEUE new=['SendErrorReport'])) # /ERRORREPORT:SEND _Renamed(_link, 'IgnoreDefaultLibraryNames', 'IgnoreSpecificDefaultLibraries', _file_list) # /NODEFAULTLIB _Renamed(_link, 'ResourceOnlyDLL', 'NoEntryPoint', _boolean) # /NOENTRY _Renamed(_link, 'SwapRunFromNet', 'SwapRunFromNET', _boolean) # /SWAPRUN:NET _Moved(_link, 'GenerateManifest', '', _boolean) _Moved(_link, 'IgnoreImportLibrary', '', _boolean) _Moved(_link, 'LinkIncremental', '', _newly_boolean) _Moved(_link, 'LinkLibraryDependencies', 'ProjectReference', _boolean) _Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean) # MSVS options not found in MSBuild. _MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean) _MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean) # MSBuild options not found in MSVS. _MSBuildOnly(_link, 'BuildingInIDE', _boolean) _MSBuildOnly(_link, 'ImageHasSafeExceptionHandlers', _boolean) # /SAFESEH _MSBuildOnly(_link, 'LinkDLL', _boolean) # /DLL Visible='false' _MSBuildOnly(_link, 'LinkStatus', _boolean) # /LTCG:STATUS _MSBuildOnly(_link, 'PreventDllBinding', _boolean) # /ALLOWBIND _MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean) # /DELAY:NOBIND _MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name) _MSBuildOnly(_link, 'MSDOSStubFileName', _file_name) # /STUB Visible='false' _MSBuildOnly(_link, 'SectionAlignment', _integer) # /ALIGN _MSBuildOnly(_link, 'SpecifySectionAttributes', _string) # /SECTION _MSBuildOnly(_link, 'ForceFileOutput', _Enumeration([], new=['Enabled', # /FORCE # /FORCE:MULTIPLE 'MultiplyDefinedSymbolOnly', 'UndefinedSymbolOnly'])) # /FORCE:UNRESOLVED _MSBuildOnly(_link, 'CreateHotPatchableImage', _Enumeration([], new=['Enabled', # /FUNCTIONPADMIN 'X86Image', # /FUNCTIONPADMIN:5 'X64Image', # /FUNCTIONPADMIN:6 'ItaniumImage'])) # /FUNCTIONPADMIN:16 _MSBuildOnly(_link, 'CLRSupportLastError', _Enumeration([], new=['Enabled', # /CLRSupportLastError 'Disabled', # /CLRSupportLastError:NO # /CLRSupportLastError:SYSTEMDLL 'SystemDlls'])) # Directives for converting VCResourceCompilerTool to ResourceCompile. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for # the schema of the MSBuild ResourceCompile settings. _Same(_rc, 'AdditionalOptions', _string_list) _Same(_rc, 'AdditionalIncludeDirectories', _folder_list) # /I _Same(_rc, 'Culture', _Integer(msbuild_base=16)) _Same(_rc, 'IgnoreStandardIncludePath', _boolean) # /X _Same(_rc, 'PreprocessorDefinitions', _string_list) # /D _Same(_rc, 'ResourceOutputFileName', _string) # /fo _Same(_rc, 'ShowProgress', _boolean) # /v # There is no UI in VisualStudio 2008 to set the following properties. # However they are found in CL and other tools. Include them here for # completeness, as they are very likely to have the same usage pattern. _Same(_rc, 'SuppressStartupBanner', _boolean) # /nologo _Same(_rc, 'UndefinePreprocessorDefinitions', _string_list) # /u # MSBuild options not found in MSVS. _MSBuildOnly(_rc, 'NullTerminateStrings', _boolean) # /n _MSBuildOnly(_rc, 'TrackerLogDirectory', _folder_name) # Directives for converting VCMIDLTool to Midl. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for # the schema of the MSBuild Midl settings. _Same(_midl, 'AdditionalIncludeDirectories', _folder_list) # /I _Same(_midl, 'AdditionalOptions', _string_list) _Same(_midl, 'CPreprocessOptions', _string) # /cpp_opt _Same(_midl, 'ErrorCheckAllocations', _boolean) # /error allocation _Same(_midl, 'ErrorCheckBounds', _boolean) # /error bounds_check _Same(_midl, 'ErrorCheckEnumRange', _boolean) # /error enum _Same(_midl, 'ErrorCheckRefPointers', _boolean) # /error ref _Same(_midl, 'ErrorCheckStubData', _boolean) # /error stub_data _Same(_midl, 'GenerateStublessProxies', _boolean) # /Oicf _Same(_midl, 'GenerateTypeLibrary', _boolean) _Same(_midl, 'HeaderFileName', _file_name) # /h _Same(_midl, 'IgnoreStandardIncludePath', _boolean) # /no_def_idir _Same(_midl, 'InterfaceIdentifierFileName', _file_name) # /iid _Same(_midl, 'MkTypLibCompatible', _boolean) # /mktyplib203 _Same(_midl, 'OutputDirectory', _string) # /out _Same(_midl, 'PreprocessorDefinitions', _string_list) # /D _Same(_midl, 'ProxyFileName', _file_name) # /proxy _Same(_midl, 'RedirectOutputAndErrors', _file_name) # /o _Same(_midl, 'SuppressStartupBanner', _boolean) # /nologo _Same(_midl, 'TypeLibraryName', _file_name) # /tlb _Same(_midl, 'UndefinePreprocessorDefinitions', _string_list) # /U _Same(_midl, 'WarnAsError', _boolean) # /WX _Same(_midl, 'DefaultCharType', _Enumeration(['Unsigned', # /char unsigned 'Signed', # /char signed 'Ascii'])) # /char ascii7 _Same(_midl, 'TargetEnvironment', _Enumeration(['NotSet', 'Win32', # /env win32 'Itanium', # /env ia64 'X64'])) # /env x64 _Same(_midl, 'EnableErrorChecks', _Enumeration(['EnableCustom', 'None', # /error none 'All'])) # /error all _Same(_midl, 'StructMemberAlignment', _Enumeration(['NotSet', '1', # Zp1 '2', # Zp2 '4', # Zp4 '8'])) # Zp8 _Same(_midl, 'WarningLevel', _Enumeration(['0', # /W0 '1', # /W1 '2', # /W2 '3', # /W3 '4'])) # /W4 _Renamed(_midl, 'DLLDataFileName', 'DllDataFileName', _file_name) # /dlldata _Renamed(_midl, 'ValidateParameters', 'ValidateAllParameters', _boolean) # /robust # MSBuild options not found in MSVS. _MSBuildOnly(_midl, 'ApplicationConfigurationMode', _boolean) # /app_config _MSBuildOnly(_midl, 'ClientStubFile', _file_name) # /cstub _MSBuildOnly(_midl, 'GenerateClientFiles', _Enumeration([], new=['Stub', # /client stub 'None'])) # /client none _MSBuildOnly(_midl, 'GenerateServerFiles', _Enumeration([], new=['Stub', # /client stub 'None'])) # /client none _MSBuildOnly(_midl, 'LocaleID', _integer) # /lcid DECIMAL _MSBuildOnly(_midl, 'ServerStubFile', _file_name) # /sstub _MSBuildOnly(_midl, 'SuppressCompilerWarnings', _boolean) # /no_warn _MSBuildOnly(_midl, 'TrackerLogDirectory', _folder_name) _MSBuildOnly(_midl, 'TypeLibFormat', _Enumeration([], new=['NewFormat', # /newtlb 'OldFormat'])) # /oldtlb # Directives for converting VCLibrarianTool to Lib. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for # the schema of the MSBuild Lib settings. _Same(_lib, 'AdditionalDependencies', _file_list) _Same(_lib, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH _Same(_lib, 'AdditionalOptions', _string_list) _Same(_lib, 'ExportNamedFunctions', _string_list) # /EXPORT _Same(_lib, 'ForceSymbolReferences', _string) # /INCLUDE _Same(_lib, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB _Same(_lib, 'IgnoreSpecificDefaultLibraries', _file_list) # /NODEFAULTLIB _Same(_lib, 'ModuleDefinitionFile', _file_name) # /DEF _Same(_lib, 'OutputFile', _file_name) # /OUT _Same(_lib, 'SuppressStartupBanner', _boolean) # /NOLOGO _Same(_lib, 'UseUnicodeResponseFiles', _boolean) _Same(_lib, 'LinkTimeCodeGeneration', _boolean) # /LTCG _Same(_lib, 'TargetMachine', _target_machine_enumeration) # TODO(jeanluc) _link defines the same value that gets moved to # ProjectReference. We may want to validate that they are consistent. _Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean) _MSBuildOnly(_lib, 'DisplayLibrary', _string) # /LIST Visible='false' _MSBuildOnly(_lib, 'ErrorReporting', _Enumeration([], new=['PromptImmediately', # /ERRORREPORT:PROMPT 'QueueForNextLogin', # /ERRORREPORT:QUEUE 'SendErrorReport', # /ERRORREPORT:SEND 'NoErrorReport'])) # /ERRORREPORT:NONE _MSBuildOnly(_lib, 'MinimumRequiredVersion', _string) _MSBuildOnly(_lib, 'Name', _file_name) # /NAME _MSBuildOnly(_lib, 'RemoveObjects', _file_list) # /REMOVE _MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration) _MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name) _MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean) # /WX _MSBuildOnly(_lib, 'Verbose', _boolean) # Directives for converting VCManifestTool to Mt. # See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for # the schema of the MSBuild Lib settings. # Options that have the same name in MSVS and MSBuild _Same(_manifest, 'AdditionalManifestFiles', _file_list) # /manifest _Same(_manifest, 'AdditionalOptions', _string_list) _Same(_manifest, 'AssemblyIdentity', _string) # /identity: _Same(_manifest, 'ComponentFileName', _file_name) # /dll _Same(_manifest, 'GenerateCatalogFiles', _boolean) # /makecdfs _Same(_manifest, 'InputResourceManifests', _string) # /inputresource _Same(_manifest, 'OutputManifestFile', _file_name) # /out _Same(_manifest, 'RegistrarScriptFile', _file_name) # /rgs _Same(_manifest, 'ReplacementsFile', _file_name) # /replacements _Same(_manifest, 'SuppressStartupBanner', _boolean) # /nologo _Same(_manifest, 'TypeLibraryFile', _file_name) # /tlb: _Same(_manifest, 'UpdateFileHashes', _boolean) # /hashupdate _Same(_manifest, 'UpdateFileHashesSearchPath', _file_name) _Same(_manifest, 'VerboseOutput', _boolean) # /verbose # Options that have moved location. _MovedAndRenamed(_manifest, 'ManifestResourceFile', 'ManifestResourceCompile', 'ResourceOutputFileName', _file_name) _Moved(_manifest, 'EmbedManifest', '', _boolean) # MSVS options not found in MSBuild. _MSVSOnly(_manifest, 'DependencyInformationFile', _file_name) _MSVSOnly(_manifest, 'UseFAT32Workaround', _boolean) _MSVSOnly(_manifest, 'UseUnicodeResponseFiles', _boolean) # MSBuild options not found in MSVS. _MSBuildOnly(_manifest, 'EnableDPIAwareness', _boolean) _MSBuildOnly(_manifest, 'GenerateCategoryTags', _boolean) # /category _MSBuildOnly(_manifest, 'ManifestFromManagedAssembly', _file_name) # /managedassemblyname _MSBuildOnly(_manifest, 'OutputResourceManifests', _string) # /outputresource _MSBuildOnly(_manifest, 'SuppressDependencyElement', _boolean) # /nodependency _MSBuildOnly(_manifest, 'TrackerLogDirectory', _folder_name) # Directives for MASM. # See "$(VCTargetsPath)\BuildCustomizations\masm.xml" for the schema of the # MSBuild MASM settings. # Options that have the same name in MSVS and MSBuild. _Same(_masm, 'UseSafeExceptionHandlers', _boolean) # /safeseh
mit
eiginn/passpie
passpie/table.py
3
1435
from tabulate import tabulate import click class Table(object): def __init__(self, headers, table_format='rst', colors=None, hidden=None, missing=None, hidden_string="*****"): self.headers = headers self.colors = colors if colors else {} self.hidden = hidden if hidden else [] self.hidden_string = hidden_string self.table_format = table_format self.missing = missing def colorize(self, key, text): return click.style(text, fg=self.colors.get(key)) def render(self, data): data = sorted(data, key=lambda c: c[self.headers[0]]) rows = [] for entry in data: row = [] for header in self.headers: if header in self.hidden: entry[header] = self.hidden_string elif header in self.colors: text = entry[header] entry[header] = self.colorize(header, text) row.append(entry[header]) rows.append(row) headers = [click.style(h.title(), bold=True) for h in self.headers] if rows: return tabulate(rows, headers, tablefmt=self.table_format, missingval=self.missing, numalign='left')
mit
garnaat/boto
boto/ec2/spotinstancerequest.py
170
7288
# Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/ # Copyright (c) 2010, Eucalyptus Systems, Inc. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. """ Represents an EC2 Spot Instance Request """ from boto.ec2.ec2object import TaggedEC2Object from boto.ec2.launchspecification import LaunchSpecification class SpotInstanceStateFault(object): """ The fault codes for the Spot Instance request, if any. :ivar code: The reason code for the Spot Instance state change. :ivar message: The message for the Spot Instance state change. """ def __init__(self, code=None, message=None): self.code = code self.message = message def __repr__(self): return '(%s, %s)' % (self.code, self.message) def startElement(self, name, attrs, connection): return None def endElement(self, name, value, connection): if name == 'code': self.code = value elif name == 'message': self.message = value setattr(self, name, value) class SpotInstanceStatus(object): """ Contains the status of a Spot Instance Request. :ivar code: Status code of the request. :ivar message: The description for the status code for the Spot request. :ivar update_time: Time the status was stated. """ def __init__(self, code=None, update_time=None, message=None): self.code = code self.update_time = update_time self.message = message def __repr__(self): return '<Status: %s>' % self.code def startElement(self, name, attrs, connection): return None def endElement(self, name, value, connection): if name == 'code': self.code = value elif name == 'message': self.message = value elif name == 'updateTime': self.update_time = value class SpotInstanceRequest(TaggedEC2Object): """ :ivar id: The ID of the Spot Instance Request. :ivar price: The maximum hourly price for any Spot Instance launched to fulfill the request. :ivar type: The Spot Instance request type. :ivar state: The state of the Spot Instance request. :ivar fault: The fault codes for the Spot Instance request, if any. :ivar valid_from: The start date of the request. If this is a one-time request, the request becomes active at this date and time and remains active until all instances launch, the request expires, or the request is canceled. If the request is persistent, the request becomes active at this date and time and remains active until it expires or is canceled. :ivar valid_until: The end date of the request. If this is a one-time request, the request remains active until all instances launch, the request is canceled, or this date is reached. If the request is persistent, it remains active until it is canceled or this date is reached. :ivar launch_group: The instance launch group. Launch groups are Spot Instances that launch together and terminate together. :ivar launched_availability_zone: foo :ivar product_description: The Availability Zone in which the bid is launched. :ivar availability_zone_group: The Availability Zone group. If you specify the same Availability Zone group for all Spot Instance requests, all Spot Instances are launched in the same Availability Zone. :ivar create_time: The time stamp when the Spot Instance request was created. :ivar launch_specification: Additional information for launching instances. :ivar instance_id: The instance ID, if an instance has been launched to fulfill the Spot Instance request. :ivar status: The status code and status message describing the Spot Instance request. """ def __init__(self, connection=None): super(SpotInstanceRequest, self).__init__(connection) self.id = None self.price = None self.type = None self.state = None self.fault = None self.valid_from = None self.valid_until = None self.launch_group = None self.launched_availability_zone = None self.product_description = None self.availability_zone_group = None self.create_time = None self.launch_specification = None self.instance_id = None self.status = None def __repr__(self): return 'SpotInstanceRequest:%s' % self.id def startElement(self, name, attrs, connection): retval = super(SpotInstanceRequest, self).startElement(name, attrs, connection) if retval is not None: return retval if name == 'launchSpecification': self.launch_specification = LaunchSpecification(connection) return self.launch_specification elif name == 'fault': self.fault = SpotInstanceStateFault() return self.fault elif name == 'status': self.status = SpotInstanceStatus() return self.status else: return None def endElement(self, name, value, connection): if name == 'spotInstanceRequestId': self.id = value elif name == 'spotPrice': self.price = float(value) elif name == 'type': self.type = value elif name == 'state': self.state = value elif name == 'validFrom': self.valid_from = value elif name == 'validUntil': self.valid_until = value elif name == 'launchGroup': self.launch_group = value elif name == 'availabilityZoneGroup': self.availability_zone_group = value elif name == 'launchedAvailabilityZone': self.launched_availability_zone = value elif name == 'instanceId': self.instance_id = value elif name == 'createTime': self.create_time = value elif name == 'productDescription': self.product_description = value else: setattr(self, name, value) def cancel(self, dry_run=False): self.connection.cancel_spot_instance_requests( [self.id], dry_run=dry_run )
mit
vmarkovtsev/django
tests/migrations/test_optimizer.py
118
22678
# -*- coding: utf-8 -*- from django.db import migrations, models from django.db.migrations.optimizer import MigrationOptimizer from django.test import SimpleTestCase from .models import CustomModelBase, EmptyManager class OptimizerTests(SimpleTestCase): """ Tests the migration autodetector. """ def optimize(self, operations): """ Handy shortcut for getting results + number of loops """ optimizer = MigrationOptimizer() return optimizer.optimize(operations), optimizer._iterations def assertOptimizesTo(self, operations, expected, exact=None, less_than=None): result, iterations = self.optimize(operations) result = [repr(f.deconstruct()) for f in result] expected = [repr(f.deconstruct()) for f in expected] self.assertEqual(expected, result) if exact is not None and iterations != exact: raise self.failureException( "Optimization did not take exactly %s iterations (it took %s)" % (exact, iterations) ) if less_than is not None and iterations >= less_than: raise self.failureException( "Optimization did not take less than %s iterations (it took %s)" % (less_than, iterations) ) def assertDoesNotOptimize(self, operations): self.assertOptimizesTo(operations, operations) def test_single(self): """ Tests that the optimizer does nothing on a single operation, and that it does it in just one pass. """ self.assertOptimizesTo( [migrations.DeleteModel("Foo")], [migrations.DeleteModel("Foo")], exact=1, ) def test_create_delete_model(self): """ CreateModel and DeleteModel should collapse into nothing. """ self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.DeleteModel("Foo"), ], [], ) def test_create_rename_model(self): """ CreateModel should absorb RenameModels. """ managers = [('objects', EmptyManager())] self.assertOptimizesTo( [ migrations.CreateModel( name="Foo", fields=[("name", models.CharField(max_length=255))], options={'verbose_name': 'Foo'}, bases=(CustomModelBase), managers=managers, ), migrations.RenameModel("Foo", "Bar"), ], [ migrations.CreateModel( "Bar", [("name", models.CharField(max_length=255))], options={'verbose_name': 'Foo'}, bases=(CustomModelBase), managers=managers, ) ], ) def test_rename_model_self(self): """ RenameModels should absorb themselves. """ self.assertOptimizesTo( [ migrations.RenameModel("Foo", "Baa"), migrations.RenameModel("Baa", "Bar"), ], [ migrations.RenameModel("Foo", "Bar"), ], ) def _test_create_alter_foo_delete_model(self, alter_foo): """ CreateModel, AlterModelTable, AlterUniqueTogether/AlterIndexTogether/ AlterOrderWithRespectTo, and DeleteModel should collapse into nothing. """ self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.AlterModelTable("Foo", "woohoo"), alter_foo, migrations.DeleteModel("Foo"), ], [], ) def test_create_alter_unique_delete_model(self): self._test_create_alter_foo_delete_model(migrations.AlterUniqueTogether("Foo", [["a", "b"]])) def test_create_alter_index_delete_model(self): self._test_create_alter_foo_delete_model(migrations.AlterIndexTogether("Foo", [["a", "b"]])) def test_create_alter_owrt_delete_model(self): self._test_create_alter_foo_delete_model(migrations.AlterOrderWithRespectTo("Foo", "a")) def _test_alter_alter_model(self, alter_foo, alter_bar): """ Two AlterUniqueTogether/AlterIndexTogether/AlterOrderWithRespectTo should collapse into the second. """ self.assertOptimizesTo( [ alter_foo, alter_bar, ], [ alter_bar, ], ) def test_alter_alter_table_model(self): self._test_alter_alter_model( migrations.AlterModelTable("Foo", "a"), migrations.AlterModelTable("Foo", "b"), ) def test_alter_alter_unique_model(self): self._test_alter_alter_model( migrations.AlterUniqueTogether("Foo", [["a", "b"]]), migrations.AlterUniqueTogether("Foo", [["a", "c"]]), ) def test_alter_alter_index_model(self): self._test_alter_alter_model( migrations.AlterIndexTogether("Foo", [["a", "b"]]), migrations.AlterIndexTogether("Foo", [["a", "c"]]), ) def test_alter_alter_owrt_model(self): self._test_alter_alter_model( migrations.AlterOrderWithRespectTo("Foo", "a"), migrations.AlterOrderWithRespectTo("Foo", "b"), ) def test_optimize_through_create(self): """ We should be able to optimize away create/delete through a create or delete of a different model, but only if the create operation does not mention the model at all. """ # These should work self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())]), migrations.DeleteModel("Foo"), ], [ migrations.CreateModel("Bar", [("size", models.IntegerField())]), ], ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())]), migrations.DeleteModel("Bar"), migrations.DeleteModel("Foo"), ], [], ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())]), migrations.DeleteModel("Foo"), migrations.DeleteModel("Bar"), ], [], ) # This should not work - FK should block it self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]), migrations.DeleteModel("Foo"), ], [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("other", models.ForeignKey("testapp.Foo", models.CASCADE))]), migrations.DeleteModel("Foo"), ], ) # This should not work - bases should block it self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo", )), migrations.DeleteModel("Foo"), ], [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())], bases=("testapp.Foo", )), migrations.DeleteModel("Foo"), ], ) def test_create_model_add_field(self): """ AddField should optimize into CreateModel. """ managers = [('objects', EmptyManager())] self.assertOptimizesTo( [ migrations.CreateModel( name="Foo", fields=[("name", models.CharField(max_length=255))], options={'verbose_name': 'Foo'}, bases=(CustomModelBase), managers=managers, ), migrations.AddField("Foo", "age", models.IntegerField()), ], [ migrations.CreateModel( name="Foo", fields=[ ("name", models.CharField(max_length=255)), ("age", models.IntegerField()), ], options={'verbose_name': 'Foo'}, bases=(CustomModelBase), managers=managers, ), ], ) def test_create_model_add_field_not_through_fk(self): """ AddField should NOT optimize into CreateModel if it's an FK to a model that's between them. """ self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Link", [("url", models.TextField())]), migrations.AddField("Foo", "link", models.ForeignKey("migrations.Link", models.CASCADE)), ], [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Link", [("url", models.TextField())]), migrations.AddField("Foo", "link", models.ForeignKey("migrations.Link", models.CASCADE)), ], ) def test_create_model_add_field_not_through_m2m_through(self): """ AddField should NOT optimize into CreateModel if it's an M2M using a through that's created between them. """ # Note: The middle model is not actually a valid through model, # but that doesn't matter, as we never render it. self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("LinkThrough", []), migrations.AddField( "Foo", "link", models.ManyToManyField("migrations.Link", through="migrations.LinkThrough") ), ], [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("LinkThrough", []), migrations.AddField( "Foo", "link", models.ManyToManyField("migrations.Link", through="migrations.LinkThrough") ), ], ) def test_create_model_alter_field(self): """ AlterField should optimize into CreateModel. """ managers = [('objects', EmptyManager())] self.assertOptimizesTo( [ migrations.CreateModel( name="Foo", fields=[("name", models.CharField(max_length=255))], options={'verbose_name': 'Foo'}, bases=(CustomModelBase), managers=managers, ), migrations.AlterField("Foo", "name", models.IntegerField()), ], [ migrations.CreateModel( name="Foo", fields=[ ("name", models.IntegerField()), ], options={'verbose_name': 'Foo'}, bases=(CustomModelBase), managers=managers, ), ], ) def test_create_model_rename_field(self): """ RenameField should optimize into CreateModel. """ managers = [('objects', EmptyManager())] self.assertOptimizesTo( [ migrations.CreateModel( name="Foo", fields=[("name", models.CharField(max_length=255))], options={'verbose_name': 'Foo'}, bases=(CustomModelBase), managers=managers, ), migrations.RenameField("Foo", "name", "title"), ], [ migrations.CreateModel( name="Foo", fields=[ ("title", models.CharField(max_length=255)), ], options={'verbose_name': 'Foo'}, bases=(CustomModelBase), managers=managers, ), ], ) def test_add_field_rename_field(self): """ RenameField should optimize into AddField """ self.assertOptimizesTo( [ migrations.AddField("Foo", "name", models.CharField(max_length=255)), migrations.RenameField("Foo", "name", "title"), ], [ migrations.AddField("Foo", "title", models.CharField(max_length=255)), ], ) def test_alter_field_rename_field(self): """ RenameField should optimize to the other side of AlterField, and into itself. """ self.assertOptimizesTo( [ migrations.AlterField("Foo", "name", models.CharField(max_length=255)), migrations.RenameField("Foo", "name", "title"), migrations.RenameField("Foo", "title", "nom"), ], [ migrations.RenameField("Foo", "name", "nom"), migrations.AlterField("Foo", "nom", models.CharField(max_length=255)), ], ) def test_create_model_remove_field(self): """ RemoveField should optimize into CreateModel. """ managers = [('objects', EmptyManager())] self.assertOptimizesTo( [ migrations.CreateModel( name="Foo", fields=[ ("name", models.CharField(max_length=255)), ("age", models.IntegerField()), ], options={'verbose_name': 'Foo'}, bases=(CustomModelBase), managers=managers, ), migrations.RemoveField("Foo", "age"), ], [ migrations.CreateModel( name="Foo", fields=[ ("name", models.CharField(max_length=255)), ], options={'verbose_name': 'Foo'}, bases=(CustomModelBase), managers=managers, ), ], ) def test_add_field_alter_field(self): """ AlterField should optimize into AddField. """ self.assertOptimizesTo( [ migrations.AddField("Foo", "age", models.IntegerField()), migrations.AlterField("Foo", "age", models.FloatField(default=2.4)), ], [ migrations.AddField("Foo", name="age", field=models.FloatField(default=2.4)), ], ) def test_add_field_delete_field(self): """ RemoveField should cancel AddField """ self.assertOptimizesTo( [ migrations.AddField("Foo", "age", models.IntegerField()), migrations.RemoveField("Foo", "age"), ], [], ) def test_alter_field_delete_field(self): """ RemoveField should absorb AlterField """ self.assertOptimizesTo( [ migrations.AlterField("Foo", "age", models.IntegerField()), migrations.RemoveField("Foo", "age"), ], [ migrations.RemoveField("Foo", "age"), ], ) def _test_create_alter_foo_field(self, alter): """ CreateModel, AlterFooTogether/AlterOrderWithRespectTo followed by an add/alter/rename field should optimize to CreateModel and the Alter* """ # AddField self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, migrations.AddField("Foo", "c", models.IntegerField()), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("c", models.IntegerField()), ]), alter, ], ) # AlterField self.assertDoesNotOptimize( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, migrations.AlterField("Foo", "b", models.CharField(max_length=255)), ], ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("c", models.IntegerField()), ]), alter, migrations.AlterField("Foo", "c", models.CharField(max_length=255)), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("c", models.CharField(max_length=255)), ]), alter, ], ) # RenameField self.assertDoesNotOptimize( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, migrations.RenameField("Foo", "b", "c"), ], ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, migrations.RenameField("Foo", "b", "x"), migrations.RenameField("Foo", "x", "c"), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, migrations.RenameField("Foo", "b", "c"), ], ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("c", models.IntegerField()), ]), alter, migrations.RenameField("Foo", "c", "d"), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("d", models.IntegerField()), ]), alter, ], ) # RemoveField self.assertDoesNotOptimize( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, migrations.RemoveField("Foo", "b"), ], ) self.assertOptimizesTo( [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ("c", models.IntegerField()), ]), alter, migrations.RemoveField("Foo", "c"), ], [ migrations.CreateModel("Foo", [ ("a", models.IntegerField()), ("b", models.IntegerField()), ]), alter, ], ) def test_create_alter_unique_field(self): self._test_create_alter_foo_field(migrations.AlterUniqueTogether("Foo", [["a", "b"]])) def test_create_alter_index_field(self): self._test_create_alter_foo_field(migrations.AlterIndexTogether("Foo", [["a", "b"]])) def test_create_alter_owrt_field(self): self._test_create_alter_foo_field(migrations.AlterOrderWithRespectTo("Foo", "b")) def test_optimize_through_fields(self): """ Checks that field-level through checking is working. This should manage to collapse model Foo to nonexistence, and model Bar to a single IntegerField called "width". """ self.assertOptimizesTo( [ migrations.CreateModel("Foo", [("name", models.CharField(max_length=255))]), migrations.CreateModel("Bar", [("size", models.IntegerField())]), migrations.AddField("Foo", "age", models.IntegerField()), migrations.AddField("Bar", "width", models.IntegerField()), migrations.AlterField("Foo", "age", models.IntegerField()), migrations.RenameField("Bar", "size", "dimensions"), migrations.RemoveField("Foo", "age"), migrations.RenameModel("Foo", "Phou"), migrations.RemoveField("Bar", "dimensions"), migrations.RenameModel("Phou", "Fou"), migrations.DeleteModel("Fou"), ], [ migrations.CreateModel("Bar", [("width", models.IntegerField())]), ], )
bsd-3-clause
awatts/boto
tests/compat.py
115
1560
# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # # Use unittest2 for older versions of Python try: import unittest2 as unittest except ImportError: import unittest # Use thirdt party ordereddict for older versions of Python try: from collections import OrderedDict except ImportError: from ordereddict import OrderedDict # Use standard unittest.mock if possible. (mock doesn't support Python 3.4) try: from unittest import mock except ImportError: import mock
mit
csitarichie/boost_msm_bare_metal
boost/tools/build/v2/tools/darwin.py
49
2455
# Copyright (C) Christopher Currie 2003. Permission to copy, use, # modify, sell and distribute this software is granted provided this # copyright notice appears in all copies. This software is provided # "as is" without express or implied warranty, and with no claim as to # its suitability for any purpose. # Please see http://article.gmane.org/gmane.comp.lib.boost.build/3389/ # for explanation why it's a separate toolset. import common, gcc, builtin from b2.build import feature, toolset, type, action, generators from b2.util.utility import * toolset.register ('darwin') toolset.inherit_generators ('darwin', [], 'gcc') toolset.inherit_flags ('darwin', 'gcc') toolset.inherit_rules ('darwin', 'gcc') def init (version = None, command = None, options = None): options = to_seq (options) condition = common.check_init_parameters ('darwin', None, ('version', version)) command = common.get_invocation_command ('darwin', 'g++', command) common.handle_options ('darwin', condition, command, options) gcc.init_link_flags ('darwin', 'darwin', condition) # Darwin has a different shared library suffix type.set_generated_target_suffix ('SHARED_LIB', ['<toolset>darwin'], 'dylib') # we need to be able to tell the type of .dylib files type.register_suffixes ('dylib', 'SHARED_LIB') feature.feature ('framework', [], ['free']) toolset.flags ('darwin.compile', 'OPTIONS', '<link>shared', ['-dynamic']) toolset.flags ('darwin.compile', 'OPTIONS', None, ['-Wno-long-double', '-no-cpp-precomp']) toolset.flags ('darwin.compile.c++', 'OPTIONS', None, ['-fcoalesce-templates']) toolset.flags ('darwin.link', 'FRAMEWORK', '<framework>') # This is flag is useful for debugging the link step # uncomment to see what libtool is doing under the hood # toolset.flags ('darwin.link.dll', 'OPTIONS', None, '[-Wl,-v']) action.register ('darwin.compile.cpp', None, ['$(CONFIG_COMMAND) $(ST_OPTIONS) -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -framework$(_)$(FRAMEWORK) $(OPTIONS)']) # TODO: how to set 'bind LIBRARIES'? action.register ('darwin.link.dll', None, ['$(CONFIG_COMMAND) -dynamiclib -L"$(LINKPATH)" -o "$(<)" "$(>)" "$(LIBRARIES)" -l$(FINDLIBS-SA) -l$(FINDLIBS-ST) -framework$(_)$(FRAMEWORK) $(OPTIONS)']) def darwin_archive (manager, targets, sources, properties): pass action.register ('darwin.archive', darwin_archive, ['ar -c -r -s $(ARFLAGS) "$(<:T)" "$(>:T)"'])
gpl-2.0
curtisstpierre/django
django/contrib/admin/checks.py
72
38589
# -*- coding: utf-8 -*- from __future__ import unicode_literals from itertools import chain from django.contrib.admin.utils import ( NotRelationField, flatten, get_fields_from_path, ) from django.core import checks from django.core.exceptions import FieldDoesNotExist from django.db import models from django.forms.models import ( BaseModelForm, BaseModelFormSet, _get_foreign_key, ) def check_admin_app(**kwargs): from django.contrib.admin.sites import system_check_errors return system_check_errors class BaseModelAdminChecks(object): def check(self, cls, model, **kwargs): errors = [] errors.extend(self._check_raw_id_fields(cls, model)) errors.extend(self._check_fields(cls, model)) errors.extend(self._check_fieldsets(cls, model)) errors.extend(self._check_exclude(cls, model)) errors.extend(self._check_form(cls, model)) errors.extend(self._check_filter_vertical(cls, model)) errors.extend(self._check_filter_horizontal(cls, model)) errors.extend(self._check_radio_fields(cls, model)) errors.extend(self._check_prepopulated_fields(cls, model)) errors.extend(self._check_view_on_site_url(cls, model)) errors.extend(self._check_ordering(cls, model)) errors.extend(self._check_readonly_fields(cls, model)) return errors def _check_raw_id_fields(self, cls, model): """ Check that `raw_id_fields` only contains field names that are listed on the model. """ if not isinstance(cls.raw_id_fields, (list, tuple)): return must_be('a list or tuple', option='raw_id_fields', obj=cls, id='admin.E001') else: return list(chain(*[ self._check_raw_id_fields_item(cls, model, field_name, 'raw_id_fields[%d]' % index) for index, field_name in enumerate(cls.raw_id_fields) ])) def _check_raw_id_fields_item(self, cls, model, field_name, label): """ Check an item of `raw_id_fields`, i.e. check that field named `field_name` exists in model `model` and is a ForeignKey or a ManyToManyField. """ try: field = model._meta.get_field(field_name) except FieldDoesNotExist: return refer_to_missing_field(field=field_name, option=label, model=model, obj=cls, id='admin.E002') else: if not isinstance(field, (models.ForeignKey, models.ManyToManyField)): return must_be('a ForeignKey or ManyToManyField', option=label, obj=cls, id='admin.E003') else: return [] def _check_fields(self, cls, model): """ Check that `fields` only refer to existing fields, doesn't contain duplicates. Check if at most one of `fields` and `fieldsets` is defined. """ if cls.fields is None: return [] elif not isinstance(cls.fields, (list, tuple)): return must_be('a list or tuple', option='fields', obj=cls, id='admin.E004') elif cls.fieldsets: return [ checks.Error( "Both 'fieldsets' and 'fields' are specified.", hint=None, obj=cls, id='admin.E005', ) ] fields = flatten(cls.fields) if len(fields) != len(set(fields)): return [ checks.Error( "The value of 'fields' contains duplicate field(s).", hint=None, obj=cls, id='admin.E006', ) ] return list(chain(*[ self._check_field_spec(cls, model, field_name, 'fields') for field_name in cls.fields ])) def _check_fieldsets(self, cls, model): """ Check that fieldsets is properly formatted and doesn't contain duplicates. """ if cls.fieldsets is None: return [] elif not isinstance(cls.fieldsets, (list, tuple)): return must_be('a list or tuple', option='fieldsets', obj=cls, id='admin.E007') else: return list(chain(*[ self._check_fieldsets_item(cls, model, fieldset, 'fieldsets[%d]' % index) for index, fieldset in enumerate(cls.fieldsets) ])) def _check_fieldsets_item(self, cls, model, fieldset, label): """ Check an item of `fieldsets`, i.e. check that this is a pair of a set name and a dictionary containing "fields" key. """ if not isinstance(fieldset, (list, tuple)): return must_be('a list or tuple', option=label, obj=cls, id='admin.E008') elif len(fieldset) != 2: return must_be('of length 2', option=label, obj=cls, id='admin.E009') elif not isinstance(fieldset[1], dict): return must_be('a dictionary', option='%s[1]' % label, obj=cls, id='admin.E010') elif 'fields' not in fieldset[1]: return [ checks.Error( "The value of '%s[1]' must contain the key 'fields'." % label, hint=None, obj=cls, id='admin.E011', ) ] elif not isinstance(fieldset[1]['fields'], (list, tuple)): return must_be('a list or tuple', option="%s[1]['fields']" % label, obj=cls, id='admin.E008') fields = flatten(fieldset[1]['fields']) if len(fields) != len(set(fields)): return [ checks.Error( "There are duplicate field(s) in '%s[1]'." % label, hint=None, obj=cls, id='admin.E012', ) ] return list(chain(*[ self._check_field_spec(cls, model, fieldset_fields, '%s[1]["fields"]' % label) for fieldset_fields in fieldset[1]['fields'] ])) def _check_field_spec(self, cls, model, fields, label): """ `fields` should be an item of `fields` or an item of fieldset[1]['fields'] for any `fieldset` in `fieldsets`. It should be a field name or a tuple of field names. """ if isinstance(fields, tuple): return list(chain(*[ self._check_field_spec_item(cls, model, field_name, "%s[%d]" % (label, index)) for index, field_name in enumerate(fields) ])) else: return self._check_field_spec_item(cls, model, fields, label) def _check_field_spec_item(self, cls, model, field_name, label): if field_name in cls.readonly_fields: # Stuff can be put in fields that isn't actually a model field if # it's in readonly_fields, readonly_fields will handle the # validation of such things. return [] else: try: field = model._meta.get_field(field_name) except FieldDoesNotExist: # If we can't find a field on the model that matches, it could # be an extra field on the form. return [] else: if (isinstance(field, models.ManyToManyField) and not field.remote_field.through._meta.auto_created): return [ checks.Error( ("The value of '%s' cannot include the ManyToManyField '%s', " "because that field manually specifies a relationship model.") % (label, field_name), hint=None, obj=cls, id='admin.E013', ) ] else: return [] def _check_exclude(self, cls, model): """ Check that exclude is a sequence without duplicates. """ if cls.exclude is None: # default value is None return [] elif not isinstance(cls.exclude, (list, tuple)): return must_be('a list or tuple', option='exclude', obj=cls, id='admin.E014') elif len(cls.exclude) > len(set(cls.exclude)): return [ checks.Error( "The value of 'exclude' contains duplicate field(s).", hint=None, obj=cls, id='admin.E015', ) ] else: return [] def _check_form(self, cls, model): """ Check that form subclasses BaseModelForm. """ if hasattr(cls, 'form') and not issubclass(cls.form, BaseModelForm): return must_inherit_from(parent='BaseModelForm', option='form', obj=cls, id='admin.E016') else: return [] def _check_filter_vertical(self, cls, model): """ Check that filter_vertical is a sequence of field names. """ if not hasattr(cls, 'filter_vertical'): return [] elif not isinstance(cls.filter_vertical, (list, tuple)): return must_be('a list or tuple', option='filter_vertical', obj=cls, id='admin.E017') else: return list(chain(*[ self._check_filter_item(cls, model, field_name, "filter_vertical[%d]" % index) for index, field_name in enumerate(cls.filter_vertical) ])) def _check_filter_horizontal(self, cls, model): """ Check that filter_horizontal is a sequence of field names. """ if not hasattr(cls, 'filter_horizontal'): return [] elif not isinstance(cls.filter_horizontal, (list, tuple)): return must_be('a list or tuple', option='filter_horizontal', obj=cls, id='admin.E018') else: return list(chain(*[ self._check_filter_item(cls, model, field_name, "filter_horizontal[%d]" % index) for index, field_name in enumerate(cls.filter_horizontal) ])) def _check_filter_item(self, cls, model, field_name, label): """ Check one item of `filter_vertical` or `filter_horizontal`, i.e. check that given field exists and is a ManyToManyField. """ try: field = model._meta.get_field(field_name) except FieldDoesNotExist: return refer_to_missing_field(field=field_name, option=label, model=model, obj=cls, id='admin.E019') else: if not isinstance(field, models.ManyToManyField): return must_be('a ManyToManyField', option=label, obj=cls, id='admin.E020') else: return [] def _check_radio_fields(self, cls, model): """ Check that `radio_fields` is a dictionary. """ if not hasattr(cls, 'radio_fields'): return [] elif not isinstance(cls.radio_fields, dict): return must_be('a dictionary', option='radio_fields', obj=cls, id='admin.E021') else: return list(chain(*[ self._check_radio_fields_key(cls, model, field_name, 'radio_fields') + self._check_radio_fields_value(cls, model, val, 'radio_fields["%s"]' % field_name) for field_name, val in cls.radio_fields.items() ])) def _check_radio_fields_key(self, cls, model, field_name, label): """ Check that a key of `radio_fields` dictionary is name of existing field and that the field is a ForeignKey or has `choices` defined. """ try: field = model._meta.get_field(field_name) except FieldDoesNotExist: return refer_to_missing_field(field=field_name, option=label, model=model, obj=cls, id='admin.E022') else: if not (isinstance(field, models.ForeignKey) or field.choices): return [ checks.Error( "The value of '%s' refers to '%s', which is not an " "instance of ForeignKey, and does not have a 'choices' definition." % ( label, field_name ), hint=None, obj=cls, id='admin.E023', ) ] else: return [] def _check_radio_fields_value(self, cls, model, val, label): """ Check type of a value of `radio_fields` dictionary. """ from django.contrib.admin.options import HORIZONTAL, VERTICAL if val not in (HORIZONTAL, VERTICAL): return [ checks.Error( "The value of '%s' must be either admin.HORIZONTAL or admin.VERTICAL." % label, hint=None, obj=cls, id='admin.E024', ) ] else: return [] def _check_view_on_site_url(self, cls, model): if hasattr(cls, 'view_on_site'): if not callable(cls.view_on_site) and not isinstance(cls.view_on_site, bool): return [ checks.Error( "The value of 'view_on_site' must be a callable or a boolean value.", hint=None, obj=cls, id='admin.E025', ) ] else: return [] else: return [] def _check_prepopulated_fields(self, cls, model): """ Check that `prepopulated_fields` is a dictionary containing allowed field types. """ if not hasattr(cls, 'prepopulated_fields'): return [] elif not isinstance(cls.prepopulated_fields, dict): return must_be('a dictionary', option='prepopulated_fields', obj=cls, id='admin.E026') else: return list(chain(*[ self._check_prepopulated_fields_key(cls, model, field_name, 'prepopulated_fields') + self._check_prepopulated_fields_value(cls, model, val, 'prepopulated_fields["%s"]' % field_name) for field_name, val in cls.prepopulated_fields.items() ])) def _check_prepopulated_fields_key(self, cls, model, field_name, label): """ Check a key of `prepopulated_fields` dictionary, i.e. check that it is a name of existing field and the field is one of the allowed types. """ forbidden_field_types = ( models.DateTimeField, models.ForeignKey, models.ManyToManyField ) try: field = model._meta.get_field(field_name) except FieldDoesNotExist: return refer_to_missing_field(field=field_name, option=label, model=model, obj=cls, id='admin.E027') else: if isinstance(field, forbidden_field_types): return [ checks.Error( "The value of '%s' refers to '%s', which must not be a DateTimeField, " "ForeignKey or ManyToManyField." % ( label, field_name ), hint=None, obj=cls, id='admin.E028', ) ] else: return [] def _check_prepopulated_fields_value(self, cls, model, val, label): """ Check a value of `prepopulated_fields` dictionary, i.e. it's an iterable of existing fields. """ if not isinstance(val, (list, tuple)): return must_be('a list or tuple', option=label, obj=cls, id='admin.E029') else: return list(chain(*[ self._check_prepopulated_fields_value_item(cls, model, subfield_name, "%s[%r]" % (label, index)) for index, subfield_name in enumerate(val) ])) def _check_prepopulated_fields_value_item(self, cls, model, field_name, label): """ For `prepopulated_fields` equal to {"slug": ("title",)}, `field_name` is "title". """ try: model._meta.get_field(field_name) except FieldDoesNotExist: return refer_to_missing_field(field=field_name, option=label, model=model, obj=cls, id='admin.E030') else: return [] def _check_ordering(self, cls, model): """ Check that ordering refers to existing fields or is random. """ # ordering = None if cls.ordering is None: # The default value is None return [] elif not isinstance(cls.ordering, (list, tuple)): return must_be('a list or tuple', option='ordering', obj=cls, id='admin.E031') else: return list(chain(*[ self._check_ordering_item(cls, model, field_name, 'ordering[%d]' % index) for index, field_name in enumerate(cls.ordering) ])) def _check_ordering_item(self, cls, model, field_name, label): """ Check that `ordering` refers to existing fields. """ if field_name == '?' and len(cls.ordering) != 1: return [ checks.Error( ("The value of 'ordering' has the random ordering marker '?', " "but contains other fields as well."), hint='Either remove the "?", or remove the other fields.', obj=cls, id='admin.E032', ) ] elif field_name == '?': return [] elif '__' in field_name: # Skip ordering in the format field1__field2 (FIXME: checking # this format would be nice, but it's a little fiddly). return [] else: if field_name.startswith('-'): field_name = field_name[1:] try: model._meta.get_field(field_name) except FieldDoesNotExist: return refer_to_missing_field(field=field_name, option=label, model=model, obj=cls, id='admin.E033') else: return [] def _check_readonly_fields(self, cls, model): """ Check that readonly_fields refers to proper attribute or field. """ if cls.readonly_fields == (): return [] elif not isinstance(cls.readonly_fields, (list, tuple)): return must_be('a list or tuple', option='readonly_fields', obj=cls, id='admin.E034') else: return list(chain(*[ self._check_readonly_fields_item(cls, model, field_name, "readonly_fields[%d]" % index) for index, field_name in enumerate(cls.readonly_fields) ])) def _check_readonly_fields_item(self, cls, model, field_name, label): if callable(field_name): return [] elif hasattr(cls, field_name): return [] elif hasattr(model, field_name): return [] else: try: model._meta.get_field(field_name) except FieldDoesNotExist: return [ checks.Error( "The value of '%s' is not a callable, an attribute of '%s', or an attribute of '%s.%s'." % ( label, cls.__name__, model._meta.app_label, model._meta.object_name ), hint=None, obj=cls, id='admin.E035', ) ] else: return [] class ModelAdminChecks(BaseModelAdminChecks): def check(self, cls, model, **kwargs): errors = super(ModelAdminChecks, self).check(cls, model=model, **kwargs) errors.extend(self._check_save_as(cls, model)) errors.extend(self._check_save_on_top(cls, model)) errors.extend(self._check_inlines(cls, model)) errors.extend(self._check_list_display(cls, model)) errors.extend(self._check_list_display_links(cls, model)) errors.extend(self._check_list_filter(cls, model)) errors.extend(self._check_list_select_related(cls, model)) errors.extend(self._check_list_per_page(cls, model)) errors.extend(self._check_list_max_show_all(cls, model)) errors.extend(self._check_list_editable(cls, model)) errors.extend(self._check_search_fields(cls, model)) errors.extend(self._check_date_hierarchy(cls, model)) return errors def _check_save_as(self, cls, model): """ Check save_as is a boolean. """ if not isinstance(cls.save_as, bool): return must_be('a boolean', option='save_as', obj=cls, id='admin.E101') else: return [] def _check_save_on_top(self, cls, model): """ Check save_on_top is a boolean. """ if not isinstance(cls.save_on_top, bool): return must_be('a boolean', option='save_on_top', obj=cls, id='admin.E102') else: return [] def _check_inlines(self, cls, model): """ Check all inline model admin classes. """ if not isinstance(cls.inlines, (list, tuple)): return must_be('a list or tuple', option='inlines', obj=cls, id='admin.E103') else: return list(chain(*[ self._check_inlines_item(cls, model, item, "inlines[%d]" % index) for index, item in enumerate(cls.inlines) ])) def _check_inlines_item(self, cls, model, inline, label): """ Check one inline model admin. """ inline_label = '.'.join([inline.__module__, inline.__name__]) from django.contrib.admin.options import BaseModelAdmin if not issubclass(inline, BaseModelAdmin): return [ checks.Error( "'%s' must inherit from 'BaseModelAdmin'." % inline_label, hint=None, obj=cls, id='admin.E104', ) ] elif not inline.model: return [ checks.Error( "'%s' must have a 'model' attribute." % inline_label, hint=None, obj=cls, id='admin.E105', ) ] elif not issubclass(inline.model, models.Model): return must_be('a Model', option='%s.model' % inline_label, obj=cls, id='admin.E106') else: return inline.check(model) def _check_list_display(self, cls, model): """ Check that list_display only contains fields or usable attributes. """ if not isinstance(cls.list_display, (list, tuple)): return must_be('a list or tuple', option='list_display', obj=cls, id='admin.E107') else: return list(chain(*[ self._check_list_display_item(cls, model, item, "list_display[%d]" % index) for index, item in enumerate(cls.list_display) ])) def _check_list_display_item(self, cls, model, item, label): if callable(item): return [] elif hasattr(cls, item): return [] elif hasattr(model, item): # getattr(model, item) could be an X_RelatedObjectsDescriptor try: field = model._meta.get_field(item) except FieldDoesNotExist: try: field = getattr(model, item) except AttributeError: field = None if field is None: return [ checks.Error( "The value of '%s' refers to '%s', which is not a " "callable, an attribute of '%s', or an attribute or method on '%s.%s'." % ( label, item, cls.__name__, model._meta.app_label, model._meta.object_name ), hint=None, obj=cls, id='admin.E108', ) ] elif isinstance(field, models.ManyToManyField): return [ checks.Error( "The value of '%s' must not be a ManyToManyField." % label, hint=None, obj=cls, id='admin.E109', ) ] else: return [] else: try: model._meta.get_field(item) except FieldDoesNotExist: return [ # This is a deliberate repeat of E108; there's more than one path # required to test this condition. checks.Error( "The value of '%s' refers to '%s', which is not a callable, " "an attribute of '%s', or an attribute or method on '%s.%s'." % ( label, item, cls.__name__, model._meta.app_label, model._meta.object_name ), hint=None, obj=cls, id='admin.E108', ) ] else: return [] def _check_list_display_links(self, cls, model): """ Check that list_display_links is a unique subset of list_display. """ if cls.list_display_links is None: return [] elif not isinstance(cls.list_display_links, (list, tuple)): return must_be('a list, a tuple, or None', option='list_display_links', obj=cls, id='admin.E110') else: return list(chain(*[ self._check_list_display_links_item(cls, model, field_name, "list_display_links[%d]" % index) for index, field_name in enumerate(cls.list_display_links) ])) def _check_list_display_links_item(self, cls, model, field_name, label): if field_name not in cls.list_display: return [ checks.Error( "The value of '%s' refers to '%s', which is not defined in 'list_display'." % ( label, field_name ), hint=None, obj=cls, id='admin.E111', ) ] else: return [] def _check_list_filter(self, cls, model): if not isinstance(cls.list_filter, (list, tuple)): return must_be('a list or tuple', option='list_filter', obj=cls, id='admin.E112') else: return list(chain(*[ self._check_list_filter_item(cls, model, item, "list_filter[%d]" % index) for index, item in enumerate(cls.list_filter) ])) def _check_list_filter_item(self, cls, model, item, label): """ Check one item of `list_filter`, i.e. check if it is one of three options: 1. 'field' -- a basic field filter, possibly w/ relationships (e.g. 'field__rel') 2. ('field', SomeFieldListFilter) - a field-based list filter class 3. SomeListFilter - a non-field list filter class """ from django.contrib.admin import ListFilter, FieldListFilter if callable(item) and not isinstance(item, models.Field): # If item is option 3, it should be a ListFilter... if not issubclass(item, ListFilter): return must_inherit_from(parent='ListFilter', option=label, obj=cls, id='admin.E113') # ... but not a FieldListFilter. elif issubclass(item, FieldListFilter): return [ checks.Error( "The value of '%s' must not inherit from 'FieldListFilter'." % label, hint=None, obj=cls, id='admin.E114', ) ] else: return [] elif isinstance(item, (tuple, list)): # item is option #2 field, list_filter_class = item if not issubclass(list_filter_class, FieldListFilter): return must_inherit_from(parent='FieldListFilter', option='%s[1]' % label, obj=cls, id='admin.E115') else: return [] else: # item is option #1 field = item # Validate the field string try: get_fields_from_path(model, field) except (NotRelationField, FieldDoesNotExist): return [ checks.Error( "The value of '%s' refers to '%s', which does not refer to a Field." % (label, field), hint=None, obj=cls, id='admin.E116', ) ] else: return [] def _check_list_select_related(self, cls, model): """ Check that list_select_related is a boolean, a list or a tuple. """ if not isinstance(cls.list_select_related, (bool, list, tuple)): return must_be('a boolean, tuple or list', option='list_select_related', obj=cls, id='admin.E117') else: return [] def _check_list_per_page(self, cls, model): """ Check that list_per_page is an integer. """ if not isinstance(cls.list_per_page, int): return must_be('an integer', option='list_per_page', obj=cls, id='admin.E118') else: return [] def _check_list_max_show_all(self, cls, model): """ Check that list_max_show_all is an integer. """ if not isinstance(cls.list_max_show_all, int): return must_be('an integer', option='list_max_show_all', obj=cls, id='admin.E119') else: return [] def _check_list_editable(self, cls, model): """ Check that list_editable is a sequence of editable fields from list_display without first element. """ if not isinstance(cls.list_editable, (list, tuple)): return must_be('a list or tuple', option='list_editable', obj=cls, id='admin.E120') else: return list(chain(*[ self._check_list_editable_item(cls, model, item, "list_editable[%d]" % index) for index, item in enumerate(cls.list_editable) ])) def _check_list_editable_item(self, cls, model, field_name, label): try: field = model._meta.get_field(field_name) except FieldDoesNotExist: return refer_to_missing_field(field=field_name, option=label, model=model, obj=cls, id='admin.E121') else: if field_name not in cls.list_display: return [ checks.Error( "The value of '%s' refers to '%s', which is not " "contained in 'list_display'." % (label, field_name), hint=None, obj=cls, id='admin.E122', ) ] elif cls.list_display_links and field_name in cls.list_display_links: return [ checks.Error( "The value of '%s' cannot be in both 'list_editable' and 'list_display_links'." % field_name, hint=None, obj=cls, id='admin.E123', ) ] # Check that list_display_links is set, and that the first values of list_editable and list_display are # not the same. See ticket #22792 for the use case relating to this. elif (cls.list_display[0] in cls.list_editable and cls.list_display[0] != cls.list_editable[0] and cls.list_display_links is not None): return [ checks.Error( "The value of '%s' refers to the first field in 'list_display' ('%s'), " "which cannot be used unless 'list_display_links' is set." % ( label, cls.list_display[0] ), hint=None, obj=cls, id='admin.E124', ) ] elif not field.editable: return [ checks.Error( "The value of '%s' refers to '%s', which is not editable through the admin." % ( label, field_name ), hint=None, obj=cls, id='admin.E125', ) ] else: return [] def _check_search_fields(self, cls, model): """ Check search_fields is a sequence. """ if not isinstance(cls.search_fields, (list, tuple)): return must_be('a list or tuple', option='search_fields', obj=cls, id='admin.E126') else: return [] def _check_date_hierarchy(self, cls, model): """ Check that date_hierarchy refers to DateField or DateTimeField. """ if cls.date_hierarchy is None: return [] else: try: field = model._meta.get_field(cls.date_hierarchy) except FieldDoesNotExist: return refer_to_missing_field(option='date_hierarchy', field=cls.date_hierarchy, model=model, obj=cls, id='admin.E127') else: if not isinstance(field, (models.DateField, models.DateTimeField)): return must_be('a DateField or DateTimeField', option='date_hierarchy', obj=cls, id='admin.E128') else: return [] class InlineModelAdminChecks(BaseModelAdminChecks): def check(self, cls, parent_model, **kwargs): errors = super(InlineModelAdminChecks, self).check(cls, model=cls.model, **kwargs) errors.extend(self._check_relation(cls, parent_model)) errors.extend(self._check_exclude_of_parent_model(cls, parent_model)) errors.extend(self._check_extra(cls)) errors.extend(self._check_max_num(cls)) errors.extend(self._check_min_num(cls)) errors.extend(self._check_formset(cls)) return errors def _check_exclude_of_parent_model(self, cls, parent_model): # Do not perform more specific checks if the base checks result in an # error. errors = super(InlineModelAdminChecks, self)._check_exclude(cls, parent_model) if errors: return [] # Skip if `fk_name` is invalid. if self._check_relation(cls, parent_model): return [] if cls.exclude is None: return [] fk = _get_foreign_key(parent_model, cls.model, fk_name=cls.fk_name) if fk.name in cls.exclude: return [ checks.Error( "Cannot exclude the field '%s', because it is the foreign key " "to the parent model '%s.%s'." % ( fk.name, parent_model._meta.app_label, parent_model._meta.object_name ), hint=None, obj=cls, id='admin.E201', ) ] else: return [] def _check_relation(self, cls, parent_model): try: _get_foreign_key(parent_model, cls.model, fk_name=cls.fk_name) except ValueError as e: return [checks.Error(e.args[0], hint=None, obj=cls, id='admin.E202')] else: return [] def _check_extra(self, cls): """ Check that extra is an integer. """ if not isinstance(cls.extra, int): return must_be('an integer', option='extra', obj=cls, id='admin.E203') else: return [] def _check_max_num(self, cls): """ Check that max_num is an integer. """ if cls.max_num is None: return [] elif not isinstance(cls.max_num, int): return must_be('an integer', option='max_num', obj=cls, id='admin.E204') else: return [] def _check_min_num(self, cls): """ Check that min_num is an integer. """ if cls.min_num is None: return [] elif not isinstance(cls.min_num, int): return must_be('an integer', option='min_num', obj=cls, id='admin.E205') else: return [] def _check_formset(self, cls): """ Check formset is a subclass of BaseModelFormSet. """ if not issubclass(cls.formset, BaseModelFormSet): return must_inherit_from(parent='BaseModelFormSet', option='formset', obj=cls, id='admin.E206') else: return [] def must_be(type, option, obj, id): return [ checks.Error( "The value of '%s' must be %s." % (option, type), hint=None, obj=obj, id=id, ), ] def must_inherit_from(parent, option, obj, id): return [ checks.Error( "The value of '%s' must inherit from '%s'." % (option, parent), hint=None, obj=obj, id=id, ), ] def refer_to_missing_field(field, option, model, obj, id): return [ checks.Error( "The value of '%s' refers to '%s', which is not an attribute of '%s.%s'." % ( option, field, model._meta.app_label, model._meta.object_name ), hint=None, obj=obj, id=id, ), ]
bsd-3-clause
Shaps/ansible
test/support/network-integration/collections/ansible_collections/cisco/ios/plugins/doc_fragments/ios.py
47
3576
# -*- coding: utf-8 -*- # Copyright: (c) 2015, Peter Sprygada <psprygada@ansible.com> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) class ModuleDocFragment(object): # Standard files documentation fragment DOCUMENTATION = r"""options: provider: description: - B(Deprecated) - 'Starting with Ansible 2.5 we recommend using C(connection: network_cli).' - For more information please see the L(IOS Platform Options guide, ../network/user_guide/platform_ios.html). - HORIZONTALLINE - A dict object containing connection details. type: dict suboptions: host: description: - Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport. type: str required: true port: description: - Specifies the port to use when building the connection to the remote device. type: int default: 22 username: description: - Configures the username to use to authenticate the connection to the remote device. This value is used to authenticate the SSH session. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_USERNAME) will be used instead. type: str password: description: - Specifies the password to use to authenticate the connection to the remote device. This value is used to authenticate the SSH session. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_PASSWORD) will be used instead. type: str timeout: description: - Specifies the timeout in seconds for communicating with the network device for either connecting or sending commands. If the timeout is exceeded before the operation is completed, the module will error. type: int default: 10 ssh_keyfile: description: - Specifies the SSH key to use to authenticate the connection to the remote device. This value is the path to the key used to authenticate the SSH session. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_SSH_KEYFILE) will be used instead. type: path authorize: description: - Instructs the module to enter privileged mode on the remote device before sending any commands. If not specified, the device will attempt to execute all commands in non-privileged mode. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_AUTHORIZE) will be used instead. type: bool default: false auth_pass: description: - Specifies the password to use if required to enter privileged mode on the remote device. If I(authorize) is false, then this argument does nothing. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_AUTH_PASS) will be used instead. type: str notes: - For more information on using Ansible to manage network devices see the :ref:`Ansible Network Guide <network_guide>` - For more information on using Ansible to manage Cisco devices see the `Cisco integration page <https://www.ansible.com/integrations/networks/cisco>`_. """
gpl-3.0
awni/tensorflow
tensorflow/python/training/adam.py
5
6642
# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Adam for TensorFlow.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.framework import ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import state_ops from tensorflow.python.ops import variables from tensorflow.python.training import optimizer from tensorflow.python.training import training_ops class AdamOptimizer(optimizer.Optimizer): """Optimizer that implements the Adam algorithm. See [Kingma et. al., 2014](http://arxiv.org/abs/1412.6980) ([pdf](http://arxiv.org/pdf/1412.6980.pdf)). @@__init__ """ def __init__(self, learning_rate=0.001, beta1=0.9, beta2=0.999, epsilon=1e-8, use_locking=False, name="Adam"): """Construct a new Adam optimizer. Initialization: ``` m_0 <- 0 (Initialize initial 1st moment vector) v_0 <- 0 (Initialize initial 2nd moment vector) t <- 0 (Initialize timestep) ``` The update rule for `variable` with gradient `g` uses an optimization described at the end of section2 of the paper: ``` t <- t + 1 lr_t <- learning_rate * sqrt(1 - beta2^t) / (1 - beta1^t) m_t <- beta1 * m_{t-1} + (1 - beta1) * g v_t <- beta2 * v_{t-1} + (1 - beta2) * g * g variable <- variable - lr_t * m_t / (sqrt(v_t) + epsilon) ``` The default value of 1e-8 for epsilon might not be a good default in general. For example, when training an Inception network on ImageNet a current good choice is 1.0 or 0.1. Args: learning_rate: A Tensor or a floating point value. The learning rate. beta1: A float value or a constant float tensor. The exponential decay rate for the 1st moment estimates. beta2: A float value or a constant float tensor. The exponential decay rate for the 2nd moment estimates. epsilon: A small constant for numerical stability. use_locking: If True use locks for update operations. name: Optional name for the operations created when applying gradients. Defaults to "Adam". """ super(AdamOptimizer, self).__init__(use_locking, name) self._lr = learning_rate self._beta1 = beta1 self._beta2 = beta2 self._epsilon = epsilon # Tensor versions of the constructor arguments, created in _prepare(). self._lr_t = None self._beta1_t = None self._beta2_t = None self._epsilon_t = None # Variables to accumulate the powers of the beta parameters. # Created in _create_slots when we know the variables to optimize. self._beta1_power = None self._beta2_power = None # Created in SparseApply if needed. self._updated_lr = None def _get_beta_accumulators(self): return self._beta1_power, self._beta2_power def _create_slots(self, var_list): # Create the beta1 and beta2 accumulators on the same device as the first # variable. if self._beta1_power is None: with ops.colocate_with(var_list[0]): self._beta1_power = variables.Variable(self._beta1, name="beta1_power", trainable=False) self._beta2_power = variables.Variable(self._beta2, name="beta2_power", trainable=False) # Create slots for the first and second moments. for v in var_list: self._zeros_slot(v, "m", self._name) self._zeros_slot(v, "v", self._name) def _prepare(self): self._lr_t = ops.convert_to_tensor(self._lr, name="learning_rate") self._beta1_t = ops.convert_to_tensor(self._beta1, name="beta1") self._beta2_t = ops.convert_to_tensor(self._beta2, name="beta2") self._epsilon_t = ops.convert_to_tensor(self._epsilon, name="epsilon") def _apply_dense(self, grad, var): m = self.get_slot(var, "m") v = self.get_slot(var, "v") return training_ops.apply_adam( var, m, v, self._beta1_power, self._beta2_power, self._lr_t, self._beta1_t, self._beta2_t, self._epsilon_t, grad, use_locking=self._use_locking).op def _apply_sparse(self, grad, var): lr = (self._lr_t * math_ops.sqrt(1 - self._beta2_power) / (1 - self._beta1_power)) # m_t = beta1 * m + (1 - beta1) * g_t m = self.get_slot(var, "m") m_scaled_g_values = grad.values * (1 - self._beta1_t) m_t = state_ops.assign(m, m * self._beta1_t, use_locking=self._use_locking) m_t = state_ops.scatter_add(m_t, grad.indices, m_scaled_g_values, use_locking=self._use_locking) # v_t = beta2 * v + (1 - beta2) * (g_t * g_t) v = self.get_slot(var, "v") v_scaled_g_values = (grad.values * grad.values) * (1 - self._beta2_t) v_t = state_ops.assign(v, v * self._beta2_t, use_locking=self._use_locking) v_t = state_ops.scatter_add(v_t, grad.indices, v_scaled_g_values, use_locking=self._use_locking) v_sqrt = math_ops.sqrt(v_t) var_update = state_ops.assign_sub(var, lr * m_t / (v_sqrt + self._epsilon_t), use_locking=self._use_locking) return control_flow_ops.group(*[var_update, m_t, v_t]) def _finish(self, update_ops, name_scope): # Update the power accumulators. with ops.control_dependencies(update_ops): with ops.colocate_with(self._beta1_power): update_beta1 = self._beta1_power.assign( self._beta1_power * self._beta1_t, use_locking=self._use_locking) update_beta2 = self._beta2_power.assign( self._beta2_power * self._beta2_t, use_locking=self._use_locking) return control_flow_ops.group(*update_ops + [update_beta1, update_beta2], name=name_scope)
apache-2.0
rgaino/three.js
utils/exporters/blender/addons/io_three/exporter/__init__.py
178
2661
import os import sys import traceback from .. import constants, logger, exceptions, dialogs from . import scene, geometry, api, base_classes def _error_handler(func): def inner(filepath, options, *args, **kwargs): level = options.get(constants.LOGGING, constants.DEBUG) version = options.get('addon_version') logger.init('io_three.export.log', level=level) if version is not None: logger.debug("Addon Version %s", version) api.init() try: func(filepath, options, *args, **kwargs) except: info = sys.exc_info() trace = traceback.format_exception( info[0], info[1], info[2].tb_next) trace = ''.join(trace) logger.error(trace) print('Error recorded to %s' % logger.LOG_FILE) raise else: print('Log: %s' % logger.LOG_FILE) return inner @_error_handler def export_scene(filepath, options): selected = [] # during scene exports unselect everything. this is needed for # applying modifiers, if it is necessary # record the selected nodes so that selection is restored later for obj in api.selected_objects(): api.object.unselect(obj) selected.append(obj) active = api.active_object() try: scene_ = scene.Scene(filepath, options=options) scene_.parse() scene_.write() except: _restore_selection(selected, active) raise _restore_selection(selected, active) @_error_handler def export_geometry(filepath, options, node=None): msg = "" exception = None if node is None: node = api.active_object() if node is None: msg = "Nothing selected" logger.error(msg) exception = exceptions.SelectionError if node.type != 'MESH': msg = "%s is not a valid mesh object" % node.name logger.error(msg) exception = exceptions.GeometryError if exception is not None: if api.batch_mode(): raise exception(msg) else: dialogs.error(msg) return mesh = api.object.mesh(node, options) parent = base_classes.BaseScene(filepath, options) geo = geometry.Geometry(mesh, parent) geo.parse() geo.write() if not options.get(constants.EMBED_ANIMATION, True): geo.write_animation(os.path.dirname(filepath)) def _restore_selection(objects, active): for obj in objects: api.object.select(obj) api.set_active_object(active)
mit
sanguinariojoe/FreeCAD
src/Mod/Fem/femexamples/constraint_transform_beam_hinged.py
8
7322
# *************************************************************************** # * Copyright (c) 2020 Sudhanshu Dubey <sudhanshu.thethunder@gmail.com> * # * Copyright (c) 2021 Bernd Hahnebach <bernd@bimstatik.org> * # * * # * This file is part of the FreeCAD CAx development system. * # * * # * This program is free software; you can redistribute it and/or modify * # * it under the terms of the GNU Lesser General Public License (LGPL) * # * as published by the Free Software Foundation; either version 2 of * # * the License, or (at your option) any later version. * # * for detail see the LICENCE text file. * # * * # * This program is distributed in the hope that it will be useful, * # * but WITHOUT ANY WARRANTY; without even the implied warranty of * # * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * # * GNU Library General Public License for more details. * # * * # * You should have received a copy of the GNU Library General Public * # * License along with this program; if not, write to the Free Software * # * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * # * USA * # * * # *************************************************************************** import FreeCAD from FreeCAD import Rotation from FreeCAD import Vector from CompoundTools import CompoundFilter import Fem import ObjectsFem from . import manager from .manager import get_meshname from .manager import init_doc def get_information(): return { "name": "Constraint Transform Beam Hinged", "meshtype": "solid", "meshelement": "Tet10", "constraints": ["pressure", "displacement", "transform"], "solvers": ["calculix"], "material": "solid", "equation": "mechanical" } def get_explanation(header=""): return header + """ To run the example from Python console use: from femexamples.constraint_transform_beam_hinged import setup setup() See forum topic post: https://forum.freecadweb.org/viewtopic.php?f=18&t=20238#p157643 Constraint transform on a beam """ def setup(doc=None, solvertype="ccxtools"): # init FreeCAD document if doc is None: doc = init_doc() # explanation object # just keep the following line and change text string in get_explanation method manager.add_explanation_obj(doc, get_explanation(manager.get_header(get_information()))) # geometric object # name is important because the other method in this module use obj name cube = doc.addObject("Part::Box", "Cube") cube.Height = "20 mm" cube.Length = "100 mm" cylinder = doc.addObject("Part::Cylinder", "Cylinder") cylinder.Height = "20 mm" cylinder.Radius = "6 mm" cylinder.Placement = FreeCAD.Placement( Vector(10, 12, 10), Rotation(0, 0, 90), Vector(0, 0, 0), ) cut = doc.addObject("Part::Cut", "Cut") cut.Base = cube cut.Tool = cylinder # mirroring mirror = doc.addObject("Part::Mirroring", "Mirror") mirror.Source = cut mirror.Normal = (1, 0, 0) mirror.Base = (100, 100, 20) # fusing fusion = doc.addObject("Part::Fuse", "Fusion") fusion.Base = cut fusion.Tool = mirror fusion.Refine = True # compound filter geom_obj = CompoundFilter.makeCompoundFilter(name='CompoundFilter') geom_obj.Base = fusion geom_obj.FilterType = 'window-volume' doc.recompute() if FreeCAD.GuiUp: geom_obj.Base.ViewObject.hide() geom_obj.ViewObject.Document.activeView().viewAxonometric() geom_obj.ViewObject.Document.activeView().fitAll() # analysis analysis = ObjectsFem.makeAnalysis(doc, "Analysis") # solver if solvertype == "calculix": solver_obj = ObjectsFem.makeSolverCalculix(doc, "SolverCalculiX") elif solvertype == "ccxtools": solver_obj = ObjectsFem.makeSolverCalculixCcxTools(doc, "CalculiXccxTools") solver_obj.WorkingDir = u"" else: FreeCAD.Console.PrintWarning( "Not known or not supported solver type: {}. " "No solver object was created.\n".format(solvertype) ) if solvertype == "calculix" or solvertype == "ccxtools": solver_obj.SplitInputWriter = False solver_obj.AnalysisType = "static" solver_obj.GeometricalNonlinearity = "linear" solver_obj.ThermoMechSteadyState = False solver_obj.MatrixSolverType = "default" solver_obj.IterationsControlParameterTimeUse = False analysis.addObject(solver_obj) # material material_obj = ObjectsFem.makeMaterialSolid(doc, "FemMaterial") mat = material_obj.Material mat["Name"] = "CalculiX-Steel" mat["YoungsModulus"] = "210000 MPa" mat["PoissonRatio"] = "0.30" mat["Density"] = "7900 kg/m^3" mat["ThermalExpansionCoefficient"] = "0.012 mm/m/K" material_obj.Material = mat # constraint pressure con_pressure = ObjectsFem.makeConstraintPressure(doc, name="FemConstraintPressure") con_pressure.References = [(geom_obj, "Face8")] con_pressure.Pressure = 10.0 con_pressure.Reversed = False analysis.addObject(con_pressure) # constraint displacement con_disp = ObjectsFem.makeConstraintDisplacement(doc, name="FemConstraintDisplacment") con_disp.References = [(geom_obj, "Face4"), (geom_obj, "Face5")] con_disp.xFree = False con_disp.xFix = True analysis.addObject(con_disp) # constraints transform con_transform1 = ObjectsFem.makeConstraintTransform(doc, name="FemConstraintTransform1") con_transform1.References = [(geom_obj, "Face4")] con_transform1.TransformType = "Cylindrical" con_transform1.X_rot = 0.0 con_transform1.Y_rot = 0.0 con_transform1.Z_rot = 0.0 analysis.addObject(con_transform1) con_transform2 = ObjectsFem.makeConstraintTransform(doc, name="FemConstraintTransform2") con_transform2.References = [(geom_obj, "Face5")] con_transform2.TransformType = "Cylindrical" con_transform2.X_rot = 0.0 con_transform2.Y_rot = 0.0 con_transform2.Z_rot = 0.0 analysis.addObject(con_transform2) # mesh from .meshes.mesh_transform_beam_hinged_tetra10 import create_nodes, create_elements fem_mesh = Fem.FemMesh() control = create_nodes(fem_mesh) if not control: FreeCAD.Console.PrintError("Error on creating nodes.\n") control = create_elements(fem_mesh) if not control: FreeCAD.Console.PrintError("Error on creating elements.\n") femmesh_obj = analysis.addObject(ObjectsFem.makeMeshGmsh(doc, get_meshname()))[0] femmesh_obj.FemMesh = fem_mesh femmesh_obj.Part = geom_obj femmesh_obj.SecondOrderLinear = False femmesh_obj.CharacteristicLengthMax = '7 mm' doc.recompute() return doc
lgpl-2.1
wkl/linux-509
tools/perf/python/twatch.py
1565
1316
#! /usr/bin/python # -*- python -*- # -*- coding: utf-8 -*- # twatch - Experimental use of the perf python interface # Copyright (C) 2011 Arnaldo Carvalho de Melo <acme@redhat.com> # # This application is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; version 2. # # This application is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. import perf def main(): cpus = perf.cpu_map() threads = perf.thread_map() evsel = perf.evsel(task = 1, comm = 1, mmap = 0, wakeup_events = 1, watermark = 1, sample_id_all = 1, sample_type = perf.SAMPLE_PERIOD | perf.SAMPLE_TID | perf.SAMPLE_CPU) evsel.open(cpus = cpus, threads = threads); evlist = perf.evlist(cpus, threads) evlist.add(evsel) evlist.mmap() while True: evlist.poll(timeout = -1) for cpu in cpus: event = evlist.read_on_cpu(cpu) if not event: continue print "cpu: %2d, pid: %4d, tid: %4d" % (event.sample_cpu, event.sample_pid, event.sample_tid), print event if __name__ == '__main__': main()
gpl-2.0
Greennut/ostproject
pygments/formatters/img.py
14
18051
# -*- coding: utf-8 -*- """ pygments.formatters.img ~~~~~~~~~~~~~~~~~~~~~~~ Formatter for Pixmap output. :copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import sys from commands import getstatusoutput from pygments.formatter import Formatter from pygments.util import get_bool_opt, get_int_opt, \ get_list_opt, get_choice_opt # Import this carefully try: from PIL import Image, ImageDraw, ImageFont pil_available = True except ImportError: pil_available = False try: import _winreg except ImportError: _winreg = None __all__ = ['ImageFormatter', 'GifImageFormatter', 'JpgImageFormatter', 'BmpImageFormatter'] # For some unknown reason every font calls it something different STYLES = { 'NORMAL': ['', 'Roman', 'Book', 'Normal', 'Regular', 'Medium'], 'ITALIC': ['Oblique', 'Italic'], 'BOLD': ['Bold'], 'BOLDITALIC': ['Bold Oblique', 'Bold Italic'], } # A sane default for modern systems DEFAULT_FONT_NAME_NIX = 'Bitstream Vera Sans Mono' DEFAULT_FONT_NAME_WIN = 'Courier New' class PilNotAvailable(ImportError): """When Python imaging library is not available""" class FontNotFound(Exception): """When there are no usable fonts specified""" class FontManager(object): """ Manages a set of fonts: normal, italic, bold, etc... """ def __init__(self, font_name, font_size=14): self.font_name = font_name self.font_size = font_size self.fonts = {} self.encoding = None if sys.platform.startswith('win'): if not font_name: self.font_name = DEFAULT_FONT_NAME_WIN self._create_win() else: if not font_name: self.font_name = DEFAULT_FONT_NAME_NIX self._create_nix() def _get_nix_font_path(self, name, style): exit, out = getstatusoutput('fc-list "%s:style=%s" file' % (name, style)) if not exit: lines = out.splitlines() if lines: path = lines[0].strip().strip(':') return path def _create_nix(self): for name in STYLES['NORMAL']: path = self._get_nix_font_path(self.font_name, name) if path is not None: self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size) break else: raise FontNotFound('No usable fonts named: "%s"' % self.font_name) for style in ('ITALIC', 'BOLD', 'BOLDITALIC'): for stylename in STYLES[style]: path = self._get_nix_font_path(self.font_name, stylename) if path is not None: self.fonts[style] = ImageFont.truetype(path, self.font_size) break else: if style == 'BOLDITALIC': self.fonts[style] = self.fonts['BOLD'] else: self.fonts[style] = self.fonts['NORMAL'] def _lookup_win(self, key, basename, styles, fail=False): for suffix in ('', ' (TrueType)'): for style in styles: try: valname = '%s%s%s' % (basename, style and ' '+style, suffix) val, _ = _winreg.QueryValueEx(key, valname) return val except EnvironmentError: continue else: if fail: raise FontNotFound('Font %s (%s) not found in registry' % (basename, styles[0])) return None def _create_win(self): try: key = _winreg.OpenKey( _winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows NT\CurrentVersion\Fonts') except EnvironmentError: try: key = _winreg.OpenKey( _winreg.HKEY_LOCAL_MACHINE, r'Software\Microsoft\Windows\CurrentVersion\Fonts') except EnvironmentError: raise FontNotFound('Can\'t open Windows font registry key') try: path = self._lookup_win(key, self.font_name, STYLES['NORMAL'], True) self.fonts['NORMAL'] = ImageFont.truetype(path, self.font_size) for style in ('ITALIC', 'BOLD', 'BOLDITALIC'): path = self._lookup_win(key, self.font_name, STYLES[style]) if path: self.fonts[style] = ImageFont.truetype(path, self.font_size) else: if style == 'BOLDITALIC': self.fonts[style] = self.fonts['BOLD'] else: self.fonts[style] = self.fonts['NORMAL'] finally: _winreg.CloseKey(key) def get_char_size(self): """ Get the character size. """ return self.fonts['NORMAL'].getsize('M') def get_font(self, bold, oblique): """ Get the font based on bold and italic flags. """ if bold and oblique: return self.fonts['BOLDITALIC'] elif bold: return self.fonts['BOLD'] elif oblique: return self.fonts['ITALIC'] else: return self.fonts['NORMAL'] class ImageFormatter(Formatter): """ Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code. *New in Pygments 0.10.* Additional options accepted: `image_format` An image format to output to that is recognised by PIL, these include: * "PNG" (default) * "JPEG" * "BMP" * "GIF" `line_pad` The extra spacing (in pixels) between each line of text. Default: 2 `font_name` The font name to be used as the base font from which others, such as bold and italic fonts will be generated. This really should be a monospace font to look sane. Default: "Bitstream Vera Sans Mono" `font_size` The font size in points to be used. Default: 14 `image_pad` The padding, in pixels to be used at each edge of the resulting image. Default: 10 `line_numbers` Whether line numbers should be shown: True/False Default: True `line_number_start` The line number of the first line. Default: 1 `line_number_step` The step used when printing line numbers. Default: 1 `line_number_bg` The background colour (in "#123456" format) of the line number bar, or None to use the style background color. Default: "#eed" `line_number_fg` The text color of the line numbers (in "#123456"-like format). Default: "#886" `line_number_chars` The number of columns of line numbers allowable in the line number margin. Default: 2 `line_number_bold` Whether line numbers will be bold: True/False Default: False `line_number_italic` Whether line numbers will be italicized: True/False Default: False `line_number_separator` Whether a line will be drawn between the line number area and the source code area: True/False Default: True `line_number_pad` The horizontal padding (in pixels) between the line number margin, and the source code area. Default: 6 `hl_lines` Specify a list of lines to be highlighted. *New in Pygments 1.2.* Default: empty list `hl_color` Specify the color for highlighting lines. *New in Pygments 1.2.* Default: highlight color of the selected style """ # Required by the pygments mapper name = 'img' aliases = ['img', 'IMG', 'png'] filenames = ['*.png'] unicodeoutput = False default_image_format = 'png' def __init__(self, **options): """ See the class docstring for explanation of options. """ if not pil_available: raise PilNotAvailable( 'Python Imaging Library is required for this formatter') Formatter.__init__(self, **options) # Read the style self.styles = dict(self.style) if self.style.background_color is None: self.background_color = '#fff' else: self.background_color = self.style.background_color # Image options self.image_format = get_choice_opt( options, 'image_format', ['png', 'jpeg', 'gif', 'bmp'], self.default_image_format, normcase=True) self.image_pad = get_int_opt(options, 'image_pad', 10) self.line_pad = get_int_opt(options, 'line_pad', 2) # The fonts fontsize = get_int_opt(options, 'font_size', 14) self.fonts = FontManager(options.get('font_name', ''), fontsize) self.fontw, self.fonth = self.fonts.get_char_size() # Line number options self.line_number_fg = options.get('line_number_fg', '#886') self.line_number_bg = options.get('line_number_bg', '#eed') self.line_number_chars = get_int_opt(options, 'line_number_chars', 2) self.line_number_bold = get_bool_opt(options, 'line_number_bold', False) self.line_number_italic = get_bool_opt(options, 'line_number_italic', False) self.line_number_pad = get_int_opt(options, 'line_number_pad', 6) self.line_numbers = get_bool_opt(options, 'line_numbers', True) self.line_number_separator = get_bool_opt(options, 'line_number_separator', True) self.line_number_step = get_int_opt(options, 'line_number_step', 1) self.line_number_start = get_int_opt(options, 'line_number_start', 1) if self.line_numbers: self.line_number_width = (self.fontw * self.line_number_chars + self.line_number_pad * 2) else: self.line_number_width = 0 self.hl_lines = [] hl_lines_str = get_list_opt(options, 'hl_lines', []) for line in hl_lines_str: try: self.hl_lines.append(int(line)) except ValueError: pass self.hl_color = options.get('hl_color', self.style.highlight_color) or '#f90' self.drawables = [] def get_style_defs(self, arg=''): raise NotImplementedError('The -S option is meaningless for the image ' 'formatter. Use -O style=<stylename> instead.') def _get_line_height(self): """ Get the height of a line. """ return self.fonth + self.line_pad def _get_line_y(self, lineno): """ Get the Y coordinate of a line number. """ return lineno * self._get_line_height() + self.image_pad def _get_char_width(self): """ Get the width of a character. """ return self.fontw def _get_char_x(self, charno): """ Get the X coordinate of a character position. """ return charno * self.fontw + self.image_pad + self.line_number_width def _get_text_pos(self, charno, lineno): """ Get the actual position for a character and line position. """ return self._get_char_x(charno), self._get_line_y(lineno) def _get_linenumber_pos(self, lineno): """ Get the actual position for the start of a line number. """ return (self.image_pad, self._get_line_y(lineno)) def _get_text_color(self, style): """ Get the correct color for the token from the style. """ if style['color'] is not None: fill = '#' + style['color'] else: fill = '#000' return fill def _get_style_font(self, style): """ Get the correct font for the style. """ return self.fonts.get_font(style['bold'], style['italic']) def _get_image_size(self, maxcharno, maxlineno): """ Get the required image size. """ return (self._get_char_x(maxcharno) + self.image_pad, self._get_line_y(maxlineno + 0) + self.image_pad) def _draw_linenumber(self, posno, lineno): """ Remember a line number drawable to paint later. """ self._draw_text( self._get_linenumber_pos(posno), str(lineno).rjust(self.line_number_chars), font=self.fonts.get_font(self.line_number_bold, self.line_number_italic), fill=self.line_number_fg, ) def _draw_text(self, pos, text, font, **kw): """ Remember a single drawable tuple to paint later. """ self.drawables.append((pos, text, font, kw)) def _create_drawables(self, tokensource): """ Create drawables for the token content. """ lineno = charno = maxcharno = 0 for ttype, value in tokensource: while ttype not in self.styles: ttype = ttype.parent style = self.styles[ttype] # TODO: make sure tab expansion happens earlier in the chain. It # really ought to be done on the input, as to do it right here is # quite complex. value = value.expandtabs(4) lines = value.splitlines(True) #print lines for i, line in enumerate(lines): temp = line.rstrip('\n') if temp: self._draw_text( self._get_text_pos(charno, lineno), temp, font = self._get_style_font(style), fill = self._get_text_color(style) ) charno += len(temp) maxcharno = max(maxcharno, charno) if line.endswith('\n'): # add a line for each extra line in the value charno = 0 lineno += 1 self.maxcharno = maxcharno self.maxlineno = lineno def _draw_line_numbers(self): """ Create drawables for the line numbers. """ if not self.line_numbers: return for p in xrange(self.maxlineno): n = p + self.line_number_start if (n % self.line_number_step) == 0: self._draw_linenumber(p, n) def _paint_line_number_bg(self, im): """ Paint the line number background on the image. """ if not self.line_numbers: return if self.line_number_fg is None: return draw = ImageDraw.Draw(im) recth = im.size[-1] rectw = self.image_pad + self.line_number_width - self.line_number_pad draw.rectangle([(0, 0), (rectw, recth)], fill=self.line_number_bg) draw.line([(rectw, 0), (rectw, recth)], fill=self.line_number_fg) del draw def format(self, tokensource, outfile): """ Format ``tokensource``, an iterable of ``(tokentype, tokenstring)`` tuples and write it into ``outfile``. This implementation calculates where it should draw each token on the pixmap, then calculates the required pixmap size and draws the items. """ self._create_drawables(tokensource) self._draw_line_numbers() im = Image.new( 'RGB', self._get_image_size(self.maxcharno, self.maxlineno), self.background_color ) self._paint_line_number_bg(im) draw = ImageDraw.Draw(im) # Highlight if self.hl_lines: x = self.image_pad + self.line_number_width - self.line_number_pad + 1 recth = self._get_line_height() rectw = im.size[0] - x for linenumber in self.hl_lines: y = self._get_line_y(linenumber - 1) draw.rectangle([(x, y), (x + rectw, y + recth)], fill=self.hl_color) for pos, value, font, kw in self.drawables: draw.text(pos, value, font=font, **kw) im.save(outfile, self.image_format.upper()) # Add one formatter per format, so that the "-f gif" option gives the correct result # when used in pygmentize. class GifImageFormatter(ImageFormatter): """ Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code. *New in Pygments 1.0.* (You could create GIF images before by passing a suitable `image_format` option to the `ImageFormatter`.) """ name = 'img_gif' aliases = ['gif'] filenames = ['*.gif'] default_image_format = 'gif' class JpgImageFormatter(ImageFormatter): """ Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code. *New in Pygments 1.0.* (You could create JPEG images before by passing a suitable `image_format` option to the `ImageFormatter`.) """ name = 'img_jpg' aliases = ['jpg', 'jpeg'] filenames = ['*.jpg'] default_image_format = 'jpeg' class BmpImageFormatter(ImageFormatter): """ Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code. *New in Pygments 1.0.* (You could create bitmap images before by passing a suitable `image_format` option to the `ImageFormatter`.) """ name = 'img_bmp' aliases = ['bmp', 'bitmap'] filenames = ['*.bmp'] default_image_format = 'bmp'
bsd-3-clause
victor-gil-sepulveda/PhD-ANMPythonHelpers
anmichelpers/tools/tools.py
1
3490
""" Created on 9/2/2015 @author: victor """ import numpy import math import prody try: from pyproct.data.handler.sourceGenerator import SourceGenerator from pyproct.data.handler.protein.proteinEnsembleDataLoader import ProteinEnsembleDataLoader except: print "[WARNING] pyProCT was not found. Some functions cannot be used" def norm(v): """ Numpy compliant norm implementation. @param v: The vector used to calculate the norm. @return: A norm or an array of norms. """ if len(v.shape) == 1: return numpy.sqrt(numpy.dot(v,v)) elif len(v.shape) == 2: norms = [] for i in range(len(v)): norms.append(norm(v[i])) return numpy.array(norms) else: return None def frec_from_eigenvalue(e_val): """ Calculates the proportional frequency of a given eigenvalue (if it comes from a vibrational study). @param e_val: The eigenvalue. @return: The computed frequency (no units). """ return e_val / (2*math.pi) def ensure_modes_layout(modes): """ If the layout of the modes is flat, it converts it to a (M,N,3) layout. @param modes: [In/Out] A numpy array containing all the modes. @return: The same numpy array with a (M,N,3) layout or (N,3) """ if len(modes.shape) == 3: return modes elif len(modes.shape) == 2: number_of_modes = len(modes) number_of_nodes = modes.shape[1] / 3 return numpy.reshape(modes, (number_of_modes, number_of_nodes, 3)) else: raise ValueError("The array has an unexpected size") def load_all_pdbs_ca(pdb_list): """ Loads a list of pdbs in pyproct format (this includes the use of globs and 'base_selection'. @param pdb_list: A list of pdbs in pyproct format. @return: The pyproct data object and the list of sources (prody pdb structure -> data.structure_ensemble source from pyproct source -> s.source["source"] ) """ class MockParams: def __init__(self): pass def get_value(self,a,b): return "" sources = SourceGenerator(pdb_list).source_list loader = ProteinEnsembleDataLoader(MockParams()) for source in sources: loader.load(source) # Retrieve the data object data = loader.close() return data, sources def get_all_betas(sources): """ Loads CA temperature factors from a list of pyproct sources. @return: A matrix with all the beta factors. """ betas = [] for s in sources: pdb = prody.parsePDB(s.source["source"]).select("name CA") betas.append(pdb.getBetas()) betas = numpy.array(betas) mean_betas = betas.mean(axis = 0) for beta_array in betas: for i in range(len(beta_array)): if beta_array[i] == 0.0: print "Zero beta value @ %d; exchanging with mean."%i beta_array[i] = mean_betas[i] return betas def normalize(v): max_val = max(v) return v / abs(max_val) def normalize_zero_one(v): max_val = max(v) min_val = min(v) val_range = max_val - min_val return (v - min_val) / val_range def is_int(this_str): try: int(this_str) return True except ValueError: return False def find(f, seq): """Return first item in sequence where f(item) == True.""" for item in seq: if f(item): return item
mit
Alwnikrotikz/marinemap
lingcod/openid/middleware.py
3
2397
# -*- coding: utf-8 -*- # Copyright 2007, 2008,2009 by Benoît Chesneau <benoitc@e-engura.org> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from lingcod.openid.utils.mimeparse import best_match from django.http import HttpResponseRedirect from django.core.urlresolvers import reverse from lingcod.openid.models import UserAssociation from lingcod.openid.views import xrdf from lingcod.common.utils import get_logger __all__ = ["OpenIDMiddleware"] log = get_logger() class OpenIDMiddleware(object): """ Populate request.openid. This comes either from cookie or from session, depending on the presence of OPENID_USE_SESSIONS. MP- HUH? I dont see that setting used anywhere """ def process_request(self, request): request.openid = request.session.get('openid', None) request.openids = request.session.get('openids', []) # The code below seems benign and perfectly understandable (just grabs the openids and attaches a list to the request object) # But for some unknown reason, this filter interacts with sessions in such a way that # load_sessions fails to work on requests from the GE plugin # # Not sure what the implications are for excluding it but we shall see # #rels = UserAssociation.objects.filter(user__id=request.user.id) rels = [] request.associated_openids = [rel.openid_url for rel in rels] def process_response(self, request, response): if response.status_code != 200 or len(response.content) < 200: return response path = request.get_full_path() if path == "/" and request.META.has_key('HTTP_ACCEPT') and \ best_match(['text/html', 'application/xrds+xml'], request.META['HTTP_ACCEPT']) == 'application/xrds+xml': response = xrdf(request) return response
bsd-3-clause
mnunezdm/cazasteroides
karmaserver/modules/selection/provider/__init__.py
1
3116
''' EFES Provider module ''' from karmaserver.modules.selection.provider.evaluator import ObservationEvaluator from karmaserver.modules.selection.provider.filter import ObservationFilter from karmaserver.modules.selection.provider.eraser import ObservationEraser from karmaserver.modules.selection.provider.selector import RandomObservationSelector import karmaserver.utils.print as print_ from karmaserver.utils import start_timer, stop_timer from karmaserver.data.content_resolver import content_resolver from karmaserver.data.models.observation import Observation class ObservationSelectionProviderAbstract: # pragma: no cover ''' Abstract class of the EFES Provider class ''' def select_observation_for_discover(self, user_id, karma_level): ''' Returns the Observation based on the karma_level and the user_id ''' raise NotImplementedError('Abstract class, this method should have been implemented') def select_observation_for_votation(self, user_id, karma_level): ''' Returns the Observation based on the karma_level and the user_id ''' raise NotImplementedError('Abstract class, this method should have been implemented') def print_info(self): ''' Prints the Provider Configuration ''' raise NotImplementedError('Abstract class, this method should have been implemented') class ObservationSelectionProvider(ObservationSelectionProviderAbstract): ''' Implementation of the EFES Provider class ''' def __init__(self, number_of_karma_levels, number_of_filter_levels): self.number_of_filter_levels = number_of_filter_levels self.evaluator = ObservationEvaluator() self.filter = ObservationFilter(number_of_karma_levels, number_of_filter_levels) self.eraser = ObservationEraser() self.selector = RandomObservationSelector() self.print_info() def print_info(self): print_.initialize_info(self.__class__.__name__, True) print_.key_value_list('Maximum Filter Level', self.number_of_filter_levels) def select_observation_for_discover(self, user_id, karma_level): observation_list = content_resolver.get(Observation) return self.__get_observation(observation_list, user_id, karma_level) def select_observation_for_votation(self, user_id, karma_level): observation_list = content_resolver.get(Observation) return self.__get_observation(observation_list, user_id, karma_level) def __get_observation(self, observation_list, user_id, karma_level): evaluated_observations = self.evaluator.evaluate(observation_list) filtered_observations, level = self.filter.observations(evaluated_observations, karma_level) erased__observations = self.eraser.erase(filtered_observations, user_id) selected__observation = self.selector.select(erased__observations) if selected__observation: serialized = selected__observation.serialize(id_position=True) serialized['filter_level'] = level return serialized
mit
cruzegoodin/TSC-ShippingDetails
flask/lib/python2.7/site-packages/sqlalchemy/sql/type_api.py
21
37851
# sql/types_api.py # Copyright (C) 2005-2015 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Base types API. """ from .. import exc, util from . import operators from .visitors import Visitable # these are back-assigned by sqltypes. BOOLEANTYPE = None INTEGERTYPE = None NULLTYPE = None STRINGTYPE = None class TypeEngine(Visitable): """The ultimate base class for all SQL datatypes. Common subclasses of :class:`.TypeEngine` include :class:`.String`, :class:`.Integer`, and :class:`.Boolean`. For an overview of the SQLAlchemy typing system, see :ref:`types_toplevel`. .. seealso:: :ref:`types_toplevel` """ _sqla_type = True _isnull = False class Comparator(operators.ColumnOperators): """Base class for custom comparison operations defined at the type level. See :attr:`.TypeEngine.comparator_factory`. """ def __init__(self, expr): self.expr = expr def __reduce__(self): return _reconstitute_comparator, (self.expr, ) hashable = True """Flag, if False, means values from this type aren't hashable. Used by the ORM when uniquing result lists. """ comparator_factory = Comparator """A :class:`.TypeEngine.Comparator` class which will apply to operations performed by owning :class:`.ColumnElement` objects. The :attr:`.comparator_factory` attribute is a hook consulted by the core expression system when column and SQL expression operations are performed. When a :class:`.TypeEngine.Comparator` class is associated with this attribute, it allows custom re-definition of all existing operators, as well as definition of new operators. Existing operators include those provided by Python operator overloading such as :meth:`.operators.ColumnOperators.__add__` and :meth:`.operators.ColumnOperators.__eq__`, those provided as standard attributes of :class:`.operators.ColumnOperators` such as :meth:`.operators.ColumnOperators.like` and :meth:`.operators.ColumnOperators.in_`. Rudimentary usage of this hook is allowed through simple subclassing of existing types, or alternatively by using :class:`.TypeDecorator`. See the documentation section :ref:`types_operators` for examples. .. versionadded:: 0.8 The expression system was enhanced to support customization of operators on a per-type level. """ def copy_value(self, value): return value def literal_processor(self, dialect): """Return a conversion function for processing literal values that are to be rendered directly without using binds. This function is used when the compiler makes use of the "literal_binds" flag, typically used in DDL generation as well as in certain scenarios where backends don't accept bound parameters. .. versionadded:: 0.9.0 """ return None def bind_processor(self, dialect): """Return a conversion function for processing bind values. Returns a callable which will receive a bind parameter value as the sole positional argument and will return a value to send to the DB-API. If processing is not necessary, the method should return ``None``. :param dialect: Dialect instance in use. """ return None def result_processor(self, dialect, coltype): """Return a conversion function for processing result row values. Returns a callable which will receive a result row column value as the sole positional argument and will return a value to return to the user. If processing is not necessary, the method should return ``None``. :param dialect: Dialect instance in use. :param coltype: DBAPI coltype argument received in cursor.description. """ return None def column_expression(self, colexpr): """Given a SELECT column expression, return a wrapping SQL expression. This is typically a SQL function that wraps a column expression as rendered in the columns clause of a SELECT statement. It is used for special data types that require columns to be wrapped in some special database function in order to coerce the value before being sent back to the application. It is the SQL analogue of the :meth:`.TypeEngine.result_processor` method. The method is evaluated at statement compile time, as opposed to statement construction time. See also: :ref:`types_sql_value_processing` """ return None @util.memoized_property def _has_column_expression(self): """memoized boolean, check if column_expression is implemented. Allows the method to be skipped for the vast majority of expression types that don't use this feature. """ return self.__class__.column_expression.__code__ \ is not TypeEngine.column_expression.__code__ def bind_expression(self, bindvalue): """"Given a bind value (i.e. a :class:`.BindParameter` instance), return a SQL expression in its place. This is typically a SQL function that wraps the existing bound parameter within the statement. It is used for special data types that require literals being wrapped in some special database function in order to coerce an application-level value into a database-specific format. It is the SQL analogue of the :meth:`.TypeEngine.bind_processor` method. The method is evaluated at statement compile time, as opposed to statement construction time. Note that this method, when implemented, should always return the exact same structure, without any conditional logic, as it may be used in an executemany() call against an arbitrary number of bound parameter sets. See also: :ref:`types_sql_value_processing` """ return None @util.memoized_property def _has_bind_expression(self): """memoized boolean, check if bind_expression is implemented. Allows the method to be skipped for the vast majority of expression types that don't use this feature. """ return self.__class__.bind_expression.__code__ \ is not TypeEngine.bind_expression.__code__ def compare_values(self, x, y): """Compare two values for equality.""" return x == y def get_dbapi_type(self, dbapi): """Return the corresponding type object from the underlying DB-API, if any. This can be useful for calling ``setinputsizes()``, for example. """ return None @property def python_type(self): """Return the Python type object expected to be returned by instances of this type, if known. Basically, for those types which enforce a return type, or are known across the board to do such for all common DBAPIs (like ``int`` for example), will return that type. If a return type is not defined, raises ``NotImplementedError``. Note that any type also accommodates NULL in SQL which means you can also get back ``None`` from any type in practice. """ raise NotImplementedError() def with_variant(self, type_, dialect_name): """Produce a new type object that will utilize the given type when applied to the dialect of the given name. e.g.:: from sqlalchemy.types import String from sqlalchemy.dialects import mysql s = String() s = s.with_variant(mysql.VARCHAR(collation='foo'), 'mysql') The construction of :meth:`.TypeEngine.with_variant` is always from the "fallback" type to that which is dialect specific. The returned type is an instance of :class:`.Variant`, which itself provides a :meth:`.Variant.with_variant` that can be called repeatedly. :param type_: a :class:`.TypeEngine` that will be selected as a variant from the originating type, when a dialect of the given name is in use. :param dialect_name: base name of the dialect which uses this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.) .. versionadded:: 0.7.2 """ return Variant(self, {dialect_name: to_instance(type_)}) @util.memoized_property def _type_affinity(self): """Return a rudimental 'affinity' value expressing the general class of type.""" typ = None for t in self.__class__.__mro__: if t in (TypeEngine, UserDefinedType): return typ elif issubclass(t, (TypeEngine, UserDefinedType)): typ = t else: return self.__class__ def dialect_impl(self, dialect): """Return a dialect-specific implementation for this :class:`.TypeEngine`. """ try: return dialect._type_memos[self]['impl'] except KeyError: return self._dialect_info(dialect)['impl'] def _cached_literal_processor(self, dialect): """Return a dialect-specific literal processor for this type.""" try: return dialect._type_memos[self]['literal'] except KeyError: d = self._dialect_info(dialect) d['literal'] = lp = d['impl'].literal_processor(dialect) return lp def _cached_bind_processor(self, dialect): """Return a dialect-specific bind processor for this type.""" try: return dialect._type_memos[self]['bind'] except KeyError: d = self._dialect_info(dialect) d['bind'] = bp = d['impl'].bind_processor(dialect) return bp def _cached_result_processor(self, dialect, coltype): """Return a dialect-specific result processor for this type.""" try: return dialect._type_memos[self][coltype] except KeyError: d = self._dialect_info(dialect) # key assumption: DBAPI type codes are # constants. Else this dictionary would # grow unbounded. d[coltype] = rp = d['impl'].result_processor(dialect, coltype) return rp def _dialect_info(self, dialect): """Return a dialect-specific registry which caches a dialect-specific implementation, bind processing function, and one or more result processing functions.""" if self in dialect._type_memos: return dialect._type_memos[self] else: impl = self._gen_dialect_impl(dialect) if impl is self: impl = self.adapt(type(self)) # this can't be self, else we create a cycle assert impl is not self dialect._type_memos[self] = d = {'impl': impl} return d def _gen_dialect_impl(self, dialect): return dialect.type_descriptor(self) def adapt(self, cls, **kw): """Produce an "adapted" form of this type, given an "impl" class to work with. This method is used internally to associate generic types with "implementation" types that are specific to a particular dialect. """ return util.constructor_copy(self, cls, **kw) def coerce_compared_value(self, op, value): """Suggest a type for a 'coerced' Python value in an expression. Given an operator and value, gives the type a chance to return a type which the value should be coerced into. The default behavior here is conservative; if the right-hand side is already coerced into a SQL type based on its Python type, it is usually left alone. End-user functionality extension here should generally be via :class:`.TypeDecorator`, which provides more liberal behavior in that it defaults to coercing the other side of the expression into this type, thus applying special Python conversions above and beyond those needed by the DBAPI to both ides. It also provides the public method :meth:`.TypeDecorator.coerce_compared_value` which is intended for end-user customization of this behavior. """ _coerced_type = _type_map.get(type(value), NULLTYPE) if _coerced_type is NULLTYPE or _coerced_type._type_affinity \ is self._type_affinity: return self else: return _coerced_type def _compare_type_affinity(self, other): return self._type_affinity is other._type_affinity def compile(self, dialect=None): """Produce a string-compiled form of this :class:`.TypeEngine`. When called with no arguments, uses a "default" dialect to produce a string result. :param dialect: a :class:`.Dialect` instance. """ # arg, return value is inconsistent with # ClauseElement.compile()....this is a mistake. if not dialect: dialect = self._default_dialect() return dialect.type_compiler.process(self) @util.dependencies("sqlalchemy.engine.default") def _default_dialect(self, default): if self.__class__.__module__.startswith("sqlalchemy.dialects"): tokens = self.__class__.__module__.split(".")[0:3] mod = ".".join(tokens) return getattr(__import__(mod).dialects, tokens[-1]).dialect() else: return default.DefaultDialect() def __str__(self): if util.py2k: return unicode(self.compile()).\ encode('ascii', 'backslashreplace') else: return str(self.compile()) def __repr__(self): return util.generic_repr(self) class UserDefinedType(TypeEngine): """Base for user defined types. This should be the base of new types. Note that for most cases, :class:`.TypeDecorator` is probably more appropriate:: import sqlalchemy.types as types class MyType(types.UserDefinedType): def __init__(self, precision = 8): self.precision = precision def get_col_spec(self): return "MYTYPE(%s)" % self.precision def bind_processor(self, dialect): def process(value): return value return process def result_processor(self, dialect, coltype): def process(value): return value return process Once the type is made, it's immediately usable:: table = Table('foo', meta, Column('id', Integer, primary_key=True), Column('data', MyType(16)) ) """ __visit_name__ = "user_defined" class Comparator(TypeEngine.Comparator): def _adapt_expression(self, op, other_comparator): if hasattr(self.type, 'adapt_operator'): util.warn_deprecated( "UserDefinedType.adapt_operator is deprecated. Create " "a UserDefinedType.Comparator subclass instead which " "generates the desired expression constructs, given a " "particular operator." ) return self.type.adapt_operator(op), self.type else: return op, self.type comparator_factory = Comparator def coerce_compared_value(self, op, value): """Suggest a type for a 'coerced' Python value in an expression. Default behavior for :class:`.UserDefinedType` is the same as that of :class:`.TypeDecorator`; by default it returns ``self``, assuming the compared value should be coerced into the same type as this one. See :meth:`.TypeDecorator.coerce_compared_value` for more detail. .. versionchanged:: 0.8 :meth:`.UserDefinedType.coerce_compared_value` now returns ``self`` by default, rather than falling onto the more fundamental behavior of :meth:`.TypeEngine.coerce_compared_value`. """ return self class TypeDecorator(TypeEngine): """Allows the creation of types which add additional functionality to an existing type. This method is preferred to direct subclassing of SQLAlchemy's built-in types as it ensures that all required functionality of the underlying type is kept in place. Typical usage:: import sqlalchemy.types as types class MyType(types.TypeDecorator): '''Prefixes Unicode values with "PREFIX:" on the way in and strips it off on the way out. ''' impl = types.Unicode def process_bind_param(self, value, dialect): return "PREFIX:" + value def process_result_value(self, value, dialect): return value[7:] def copy(self): return MyType(self.impl.length) The class-level "impl" attribute is required, and can reference any TypeEngine class. Alternatively, the load_dialect_impl() method can be used to provide different type classes based on the dialect given; in this case, the "impl" variable can reference ``TypeEngine`` as a placeholder. Types that receive a Python type that isn't similar to the ultimate type used may want to define the :meth:`TypeDecorator.coerce_compared_value` method. This is used to give the expression system a hint when coercing Python objects into bind parameters within expressions. Consider this expression:: mytable.c.somecol + datetime.date(2009, 5, 15) Above, if "somecol" is an ``Integer`` variant, it makes sense that we're doing date arithmetic, where above is usually interpreted by databases as adding a number of days to the given date. The expression system does the right thing by not attempting to coerce the "date()" value into an integer-oriented bind parameter. However, in the case of ``TypeDecorator``, we are usually changing an incoming Python type to something new - ``TypeDecorator`` by default will "coerce" the non-typed side to be the same type as itself. Such as below, we define an "epoch" type that stores a date value as an integer:: class MyEpochType(types.TypeDecorator): impl = types.Integer epoch = datetime.date(1970, 1, 1) def process_bind_param(self, value, dialect): return (value - self.epoch).days def process_result_value(self, value, dialect): return self.epoch + timedelta(days=value) Our expression of ``somecol + date`` with the above type will coerce the "date" on the right side to also be treated as ``MyEpochType``. This behavior can be overridden via the :meth:`~TypeDecorator.coerce_compared_value` method, which returns a type that should be used for the value of the expression. Below we set it such that an integer value will be treated as an ``Integer``, and any other value is assumed to be a date and will be treated as a ``MyEpochType``:: def coerce_compared_value(self, op, value): if isinstance(value, int): return Integer() else: return self """ __visit_name__ = "type_decorator" def __init__(self, *args, **kwargs): """Construct a :class:`.TypeDecorator`. Arguments sent here are passed to the constructor of the class assigned to the ``impl`` class level attribute, assuming the ``impl`` is a callable, and the resulting object is assigned to the ``self.impl`` instance attribute (thus overriding the class attribute of the same name). If the class level ``impl`` is not a callable (the unusual case), it will be assigned to the same instance attribute 'as-is', ignoring those arguments passed to the constructor. Subclasses can override this to customize the generation of ``self.impl`` entirely. """ if not hasattr(self.__class__, 'impl'): raise AssertionError("TypeDecorator implementations " "require a class-level variable " "'impl' which refers to the class of " "type being decorated") self.impl = to_instance(self.__class__.impl, *args, **kwargs) coerce_to_is_types = (util.NoneType, ) """Specify those Python types which should be coerced at the expression level to "IS <constant>" when compared using ``==`` (and same for ``IS NOT`` in conjunction with ``!=``. For most SQLAlchemy types, this includes ``NoneType``, as well as ``bool``. :class:`.TypeDecorator` modifies this list to only include ``NoneType``, as typedecorator implementations that deal with boolean types are common. Custom :class:`.TypeDecorator` classes can override this attribute to return an empty tuple, in which case no values will be coerced to constants. ..versionadded:: 0.8.2 Added :attr:`.TypeDecorator.coerce_to_is_types` to allow for easier control of ``__eq__()`` ``__ne__()`` operations. """ class Comparator(TypeEngine.Comparator): def operate(self, op, *other, **kwargs): kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types return super(TypeDecorator.Comparator, self).operate( op, *other, **kwargs) def reverse_operate(self, op, other, **kwargs): kwargs['_python_is_types'] = self.expr.type.coerce_to_is_types return super(TypeDecorator.Comparator, self).reverse_operate( op, other, **kwargs) @property def comparator_factory(self): if TypeDecorator.Comparator in self.impl.comparator_factory.__mro__: return self.impl.comparator_factory else: return type("TDComparator", (TypeDecorator.Comparator, self.impl.comparator_factory), {}) def _gen_dialect_impl(self, dialect): """ #todo """ adapted = dialect.type_descriptor(self) if adapted is not self: return adapted # otherwise adapt the impl type, link # to a copy of this TypeDecorator and return # that. typedesc = self.load_dialect_impl(dialect).dialect_impl(dialect) tt = self.copy() if not isinstance(tt, self.__class__): raise AssertionError('Type object %s does not properly ' 'implement the copy() method, it must ' 'return an object of type %s' % (self, self.__class__)) tt.impl = typedesc return tt @property def _type_affinity(self): """ #todo """ return self.impl._type_affinity def type_engine(self, dialect): """Return a dialect-specific :class:`.TypeEngine` instance for this :class:`.TypeDecorator`. In most cases this returns a dialect-adapted form of the :class:`.TypeEngine` type represented by ``self.impl``. Makes usage of :meth:`dialect_impl` but also traverses into wrapped :class:`.TypeDecorator` instances. Behavior can be customized here by overriding :meth:`load_dialect_impl`. """ adapted = dialect.type_descriptor(self) if not isinstance(adapted, type(self)): return adapted elif isinstance(self.impl, TypeDecorator): return self.impl.type_engine(dialect) else: return self.load_dialect_impl(dialect) def load_dialect_impl(self, dialect): """Return a :class:`.TypeEngine` object corresponding to a dialect. This is an end-user override hook that can be used to provide differing types depending on the given dialect. It is used by the :class:`.TypeDecorator` implementation of :meth:`type_engine` to help determine what type should ultimately be returned for a given :class:`.TypeDecorator`. By default returns ``self.impl``. """ return self.impl def __getattr__(self, key): """Proxy all other undefined accessors to the underlying implementation.""" return getattr(self.impl, key) def process_literal_param(self, value, dialect): """Receive a literal parameter value to be rendered inline within a statement. This method is used when the compiler renders a literal value without using binds, typically within DDL such as in the "server default" of a column or an expression within a CHECK constraint. The returned string will be rendered into the output string. .. versionadded:: 0.9.0 """ raise NotImplementedError() def process_bind_param(self, value, dialect): """Receive a bound parameter value to be converted. Subclasses override this method to return the value that should be passed along to the underlying :class:`.TypeEngine` object, and from there to the DBAPI ``execute()`` method. The operation could be anything desired to perform custom behavior, such as transforming or serializing data. This could also be used as a hook for validating logic. This operation should be designed with the reverse operation in mind, which would be the process_result_value method of this class. :param value: Data to operate upon, of any type expected by this method in the subclass. Can be ``None``. :param dialect: the :class:`.Dialect` in use. """ raise NotImplementedError() def process_result_value(self, value, dialect): """Receive a result-row column value to be converted. Subclasses should implement this method to operate on data fetched from the database. Subclasses override this method to return the value that should be passed back to the application, given a value that is already processed by the underlying :class:`.TypeEngine` object, originally from the DBAPI cursor method ``fetchone()`` or similar. The operation could be anything desired to perform custom behavior, such as transforming or serializing data. This could also be used as a hook for validating logic. :param value: Data to operate upon, of any type expected by this method in the subclass. Can be ``None``. :param dialect: the :class:`.Dialect` in use. This operation should be designed to be reversible by the "process_bind_param" method of this class. """ raise NotImplementedError() @util.memoized_property def _has_bind_processor(self): """memoized boolean, check if process_bind_param is implemented. Allows the base process_bind_param to raise NotImplementedError without needing to test an expensive exception throw. """ return self.__class__.process_bind_param.__code__ \ is not TypeDecorator.process_bind_param.__code__ @util.memoized_property def _has_literal_processor(self): """memoized boolean, check if process_literal_param is implemented. """ return self.__class__.process_literal_param.__code__ \ is not TypeDecorator.process_literal_param.__code__ def literal_processor(self, dialect): """Provide a literal processing function for the given :class:`.Dialect`. Subclasses here will typically override :meth:`.TypeDecorator.process_literal_param` instead of this method directly. By default, this method makes use of :meth:`.TypeDecorator.process_bind_param` if that method is implemented, where :meth:`.TypeDecorator.process_literal_param` is not. The rationale here is that :class:`.TypeDecorator` typically deals with Python conversions of data that are above the layer of database presentation. With the value converted by :meth:`.TypeDecorator.process_bind_param`, the underlying type will then handle whether it needs to be presented to the DBAPI as a bound parameter or to the database as an inline SQL value. .. versionadded:: 0.9.0 """ if self._has_literal_processor: process_param = self.process_literal_param elif self._has_bind_processor: # the bind processor should normally be OK # for TypeDecorator since it isn't doing DB-level # handling, the handling here won't be different for bound vs. # literals. process_param = self.process_bind_param else: process_param = None if process_param: impl_processor = self.impl.literal_processor(dialect) if impl_processor: def process(value): return impl_processor(process_param(value, dialect)) else: def process(value): return process_param(value, dialect) return process else: return self.impl.literal_processor(dialect) def bind_processor(self, dialect): """Provide a bound value processing function for the given :class:`.Dialect`. This is the method that fulfills the :class:`.TypeEngine` contract for bound value conversion. :class:`.TypeDecorator` will wrap a user-defined implementation of :meth:`process_bind_param` here. User-defined code can override this method directly, though its likely best to use :meth:`process_bind_param` so that the processing provided by ``self.impl`` is maintained. :param dialect: Dialect instance in use. This method is the reverse counterpart to the :meth:`result_processor` method of this class. """ if self._has_bind_processor: process_param = self.process_bind_param impl_processor = self.impl.bind_processor(dialect) if impl_processor: def process(value): return impl_processor(process_param(value, dialect)) else: def process(value): return process_param(value, dialect) return process else: return self.impl.bind_processor(dialect) @util.memoized_property def _has_result_processor(self): """memoized boolean, check if process_result_value is implemented. Allows the base process_result_value to raise NotImplementedError without needing to test an expensive exception throw. """ return self.__class__.process_result_value.__code__ \ is not TypeDecorator.process_result_value.__code__ def result_processor(self, dialect, coltype): """Provide a result value processing function for the given :class:`.Dialect`. This is the method that fulfills the :class:`.TypeEngine` contract for result value conversion. :class:`.TypeDecorator` will wrap a user-defined implementation of :meth:`process_result_value` here. User-defined code can override this method directly, though its likely best to use :meth:`process_result_value` so that the processing provided by ``self.impl`` is maintained. :param dialect: Dialect instance in use. :param coltype: An SQLAlchemy data type This method is the reverse counterpart to the :meth:`bind_processor` method of this class. """ if self._has_result_processor: process_value = self.process_result_value impl_processor = self.impl.result_processor(dialect, coltype) if impl_processor: def process(value): return process_value(impl_processor(value), dialect) else: def process(value): return process_value(value, dialect) return process else: return self.impl.result_processor(dialect, coltype) def coerce_compared_value(self, op, value): """Suggest a type for a 'coerced' Python value in an expression. By default, returns self. This method is called by the expression system when an object using this type is on the left or right side of an expression against a plain Python object which does not yet have a SQLAlchemy type assigned:: expr = table.c.somecolumn + 35 Where above, if ``somecolumn`` uses this type, this method will be called with the value ``operator.add`` and ``35``. The return value is whatever SQLAlchemy type should be used for ``35`` for this particular operation. """ return self def copy(self): """Produce a copy of this :class:`.TypeDecorator` instance. This is a shallow copy and is provided to fulfill part of the :class:`.TypeEngine` contract. It usually does not need to be overridden unless the user-defined :class:`.TypeDecorator` has local state that should be deep-copied. """ instance = self.__class__.__new__(self.__class__) instance.__dict__.update(self.__dict__) return instance def get_dbapi_type(self, dbapi): """Return the DBAPI type object represented by this :class:`.TypeDecorator`. By default this calls upon :meth:`.TypeEngine.get_dbapi_type` of the underlying "impl". """ return self.impl.get_dbapi_type(dbapi) def compare_values(self, x, y): """Given two values, compare them for equality. By default this calls upon :meth:`.TypeEngine.compare_values` of the underlying "impl", which in turn usually uses the Python equals operator ``==``. This function is used by the ORM to compare an original-loaded value with an intercepted "changed" value, to determine if a net change has occurred. """ return self.impl.compare_values(x, y) def __repr__(self): return util.generic_repr(self, to_inspect=self.impl) class Variant(TypeDecorator): """A wrapping type that selects among a variety of implementations based on dialect in use. The :class:`.Variant` type is typically constructed using the :meth:`.TypeEngine.with_variant` method. .. versionadded:: 0.7.2 .. seealso:: :meth:`.TypeEngine.with_variant` for an example of use. """ def __init__(self, base, mapping): """Construct a new :class:`.Variant`. :param base: the base 'fallback' type :param mapping: dictionary of string dialect names to :class:`.TypeEngine` instances. """ self.impl = base self.mapping = mapping def load_dialect_impl(self, dialect): if dialect.name in self.mapping: return self.mapping[dialect.name] else: return self.impl def with_variant(self, type_, dialect_name): """Return a new :class:`.Variant` which adds the given type + dialect name to the mapping, in addition to the mapping present in this :class:`.Variant`. :param type_: a :class:`.TypeEngine` that will be selected as a variant from the originating type, when a dialect of the given name is in use. :param dialect_name: base name of the dialect which uses this type. (i.e. ``'postgresql'``, ``'mysql'``, etc.) """ if dialect_name in self.mapping: raise exc.ArgumentError( "Dialect '%s' is already present in " "the mapping for this Variant" % dialect_name) mapping = self.mapping.copy() mapping[dialect_name] = type_ return Variant(self.impl, mapping) @property def comparator_factory(self): """express comparison behavior in terms of the base type""" return self.impl.comparator_factory def _reconstitute_comparator(expression): return expression.comparator def to_instance(typeobj, *arg, **kw): if typeobj is None: return NULLTYPE if util.callable(typeobj): return typeobj(*arg, **kw) else: return typeobj def adapt_type(typeobj, colspecs): if isinstance(typeobj, type): typeobj = typeobj() for t in typeobj.__class__.__mro__[0:-1]: try: impltype = colspecs[t] break except KeyError: pass else: # couldn't adapt - so just return the type itself # (it may be a user-defined type) return typeobj # if we adapted the given generic type to a database-specific type, # but it turns out the originally given "generic" type # is actually a subclass of our resulting type, then we were already # given a more specific type than that required; so use that. if (issubclass(typeobj.__class__, impltype)): return typeobj return typeobj.adapt(impltype)
bsd-3-clause
NathanW2/QGIS
python/plugins/processing/tools/system.py
16
3475
# -*- coding: utf-8 -*- """ *************************************************************************** py --------------------- Date : August 2012 Copyright : (C) 2012 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'August 2012' __copyright__ = '(C) 2012, Victor Olaya' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' import os import time import sys import uuid import math from qgis.PyQt.QtCore import QDir from qgis.core import (QgsApplication, QgsProcessingUtils) numExported = 1 def userFolder(): userDir = os.path.join(QgsApplication.qgisSettingsDirPath(), 'processing') if not QDir(userDir).exists(): QDir().mkpath(userDir) return str(QDir.toNativeSeparators(userDir)) def defaultOutputFolder(): folder = os.path.join(userFolder(), 'outputs') if not QDir(folder).exists(): QDir().mkpath(folder) return str(QDir.toNativeSeparators(folder)) def isWindows(): return os.name == 'nt' def isMac(): return sys.platform == 'darwin' def getTempFilename(ext=None): tmpPath = QgsProcessingUtils.tempFolder() t = time.time() m = math.floor(t) uid = '{:8x}{:05x}'.format(m, int((t - m) * 1000000)) if ext is None: filename = os.path.join(tmpPath, '{}{}'.format(uid, getNumExportedLayers())) else: filename = os.path.join(tmpPath, '{}{}.{}'.format(uid, getNumExportedLayers(), ext)) return filename def getTempDirInTempFolder(): """Returns a temporary directory, putting it into a temp folder. """ path = QgsProcessingUtils.tempFolder() path = os.path.join(path, uuid.uuid4().hex) mkdir(path) return path def removeInvalidChars(string): validChars = \ 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789:.' string = ''.join(c for c in string if c in validChars) return string def getNumExportedLayers(): global numExported numExported += 1 return numExported def mkdir(newdir): newdir = newdir.strip('\n\r ') if os.path.isdir(newdir): pass else: (head, tail) = os.path.split(newdir) if head and not os.path.isdir(head): mkdir(head) if tail: os.mkdir(newdir) def tempHelpFolder(): tmp = os.path.join(str(QDir.tempPath()), 'processing_help') if not QDir(tmp).exists(): QDir().mkpath(tmp) return str(os.path.abspath(tmp)) def escapeAndJoin(strList): joined = '' for s in strList: if s[0] != '-' and ' ' in s: escaped = '"' + s.replace('\\', '\\\\').replace('"', '\\"') \ + '"' else: escaped = s joined += escaped + ' ' return joined.strip()
gpl-2.0
shaufi10/odoo
addons/document/content_index.py
430
6619
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import logging import os import tempfile from subprocess import Popen, PIPE _logger = logging.getLogger(__name__) class NhException(Exception): pass class indexer(object): """ An indexer knows how to parse the content of some file. Typically, one indexer should be instantiated per file type. Override this class to add more functionality. Note that you should only override the Content or the File methods that give an optimal result. """ def _getMimeTypes(self): """ Return supported mimetypes """ return [] def _getExtensions(self): return [] def _getDefMime(self, ext): """ Return a mimetype for this document type, ideally the closest to the extension ext. """ mts = self._getMimeTypes(); if len (mts): return mts[0] return None def indexContent(self, content, filename=None, realfile=None): """ Use either content or the real file, to index. Some parsers will work better with the actual content, others parse a file easier. Try the optimal. """ res = '' try: if content != None: return self._doIndexContent(content) except NhException: pass if realfile != None: try: return self._doIndexFile(realfile) except NhException: pass fp = open(realfile,'rb') try: content2 = fp.read() finally: fp.close() # The not-handled exception may be raised here return self._doIndexContent(content2) # last try, with a tmp file if content: try: fname,ext = filename and os.path.splitext(filename) or ('','') fd, rfname = tempfile.mkstemp(suffix=ext) os.write(fd, content) os.close(fd) res = self._doIndexFile(rfname) os.unlink(rfname) return res except NhException: pass raise NhException('No appropriate method to index file.') def _doIndexContent(self, content): raise NhException("Content cannot be handled here.") def _doIndexFile(self, fpath): raise NhException("Content cannot be handled here.") def __repr__(self): return "<indexer %s.%s>" %(self.__module__, self.__class__.__name__) def mime_match(mime, mdict): if mdict.has_key(mime): return (mime, mdict[mime]) if '/' in mime: mpat = mime.split('/')[0]+'/*' if mdict.has_key(mpat): return (mime, mdict[mpat]) return (None, None) class contentIndex(object): def __init__(self): self.mimes = {} self.exts = {} def register(self, obj): f = False for mime in obj._getMimeTypes(): self.mimes[mime] = obj f = True for ext in obj._getExtensions(): self.exts[ext] = obj f = True if f: _logger.debug('Register content indexer: %r.', obj) if not f: raise Exception("Your indexer should at least support a mimetype or extension.") def doIndex(self, content, filename=None, content_type=None, realfname=None, debug=False): fobj = None fname = None mime = None if content_type and self.mimes.has_key(content_type): mime = content_type fobj = self.mimes[content_type] elif filename: bname,ext = os.path.splitext(filename) if self.exts.has_key(ext): fobj = self.exts[ext] mime = fobj._getDefMime(ext) if content_type and not fobj: mime,fobj = mime_match(content_type, self.mimes) if not fobj: try: if realfname : fname = realfname else: try: bname,ext = os.path.splitext(filename or 'test.tmp') except Exception: bname, ext = filename, 'tmp' fd, fname = tempfile.mkstemp(suffix=ext) os.write(fd, content) os.close(fd) pop = Popen(['file','-b','--mime',fname], shell=False, stdout=PIPE) (result, _) = pop.communicate() mime2 = result.split(';')[0] _logger.debug('File gives us: %s', mime2) # Note that the temporary file still exists now. mime,fobj = mime_match(mime2, self.mimes) if not mime: mime = mime2 except Exception: _logger.exception('Cannot determine mime type.') try: if fobj: res = (mime, fobj.indexContent(content,filename,fname or realfname) ) else: _logger.debug("Have no object, return (%s, None).", mime) res = (mime, '') except Exception: _logger.exception("Cannot index file %s (%s).", filename, fname or realfname) res = (mime, '') # If we created a tmp file, unlink it now if not realfname and fname: try: os.unlink(fname) except Exception: _logger.exception("Cannot unlink %s.", fname) return res cntIndex = contentIndex() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
puzan/ansible
test/units/modules/cloud/amazon/test_s3.py
49
1262
import pytest boto = pytest.importorskip("boto") import unittest import ansible.modules.cloud.amazon.s3 as s3 from ansible.module_utils.six.moves.urllib.parse import urlparse class TestUrlparse(unittest.TestCase): def test_urlparse(self): actual = urlparse("http://test.com/here") self.assertEqual("http", actual.scheme) self.assertEqual("test.com", actual.netloc) self.assertEqual("/here", actual.path) def test_is_fakes3(self): actual = s3.is_fakes3("fakes3://bla.blubb") self.assertEqual(True, actual) def test_is_walrus(self): actual = s3.is_walrus("trulywalrus_but_invalid_url") #I don't know if this makes sense, but this is the current behaviour... self.assertEqual(True, actual) actual = s3.is_walrus("http://notwalrus.amazonaws.com") self.assertEqual(False, actual) def test_get_s3_connection(self): aws_connect_kwargs = dict(aws_access_key_id="access_key", aws_secret_access_key="secret_key") location=None rgw=True s3_url="http://bla.blubb" actual = s3.get_s3_connection(aws_connect_kwargs, location, rgw, s3_url) self.assertEqual("bla.blubb", actual.host)
gpl-3.0
sorenk/ansible
lib/ansible/modules/cloud/ovirt/ovirt_mac_pools.py
73
5430
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (c) 2016 Red Hat, Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: ovirt_mac_pools short_description: Module to manage MAC pools in oVirt/RHV version_added: "2.3" author: "Ondra Machacek (@machacekondra)" description: - "This module manage MAC pools in oVirt/RHV." options: name: description: - "Name of the MAC pool to manage." required: true description: description: - "Description of the MAC pool." state: description: - "Should the mac pool be present or absent." choices: ['present', 'absent'] default: present allow_duplicates: description: - "If (true) allow a MAC address to be used multiple times in a pool." - "Default value is set by oVirt/RHV engine to I(false)." ranges: description: - "List of MAC ranges. The from and to should be split by comma." - "For example: 00:1a:4a:16:01:51,00:1a:4a:16:01:61" extends_documentation_fragment: ovirt ''' EXAMPLES = ''' # Examples don't contain auth parameter for simplicity, # look at ovirt_auth module to see how to reuse authentication: # Create MAC pool: - ovirt_mac_pools: name: mymacpool allow_duplicates: false ranges: - 00:1a:4a:16:01:51,00:1a:4a:16:01:61 - 00:1a:4a:16:02:51,00:1a:4a:16:02:61 # Remove MAC pool: - ovirt_mac_pools: state: absent name: mymacpool ''' RETURN = ''' id: description: ID of the MAC pool which is managed returned: On success if MAC pool is found. type: str sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c template: description: "Dictionary of all the MAC pool attributes. MAC pool attributes can be found on your oVirt/RHV instance at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/mac_pool." returned: On success if MAC pool is found. type: dict ''' import traceback try: import ovirtsdk4.types as otypes except ImportError: pass from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ovirt import ( BaseModule, check_sdk, equal, create_connection, ovirt_full_argument_spec, ) class MACPoolModule(BaseModule): def build_entity(self): return otypes.MacPool( name=self._module.params['name'], allow_duplicates=self._module.params['allow_duplicates'], description=self._module.params['description'], ranges=[ otypes.Range( from_=mac_range.split(',')[0], to=mac_range.split(',')[1], ) for mac_range in self._module.params['ranges'] ], ) def _compare_ranges(self, entity): if self._module.params['ranges'] is not None: ranges = sorted([ '%s,%s' % (mac_range.from_, mac_range.to) for mac_range in entity.ranges ]) return equal(sorted(self._module.params['ranges']), ranges) return True def update_check(self, entity): return ( self._compare_ranges(entity) and equal(self._module.params['allow_duplicates'], entity.allow_duplicates) and equal(self._module.params['description'], entity.description) ) def main(): argument_spec = ovirt_full_argument_spec( state=dict( choices=['present', 'absent'], default='present', ), name=dict(default=None, required=True), allow_duplicates=dict(default=None, type='bool'), description=dict(default=None), ranges=dict(default=None, type='list'), ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, ) check_sdk(module) try: auth = module.params.pop('auth') connection = create_connection(auth) mac_pools_service = connection.system_service().mac_pools_service() mac_pools_module = MACPoolModule( connection=connection, module=module, service=mac_pools_service, ) state = module.params['state'] if state == 'present': ret = mac_pools_module.create() elif state == 'absent': ret = mac_pools_module.remove() module.exit_json(**ret) except Exception as e: module.fail_json(msg=str(e), exception=traceback.format_exc()) finally: connection.close(logout=auth.get('token') is None) if __name__ == "__main__": main()
gpl-3.0
foolprooflabs/JackB03
plugins/ti.alloy/plugin.py
1729
5251
import os, sys, subprocess, hashlib import subprocess def check_output(*popenargs, **kwargs): r"""Run command with arguments and return its output as a byte string. Backported from Python 2.7 as it's implemented as pure python on stdlib. >>> check_output(['/usr/bin/python', '--version']) Python 2.6.2 """ process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) output, unused_err = process.communicate() retcode = process.poll() if retcode: cmd = kwargs.get("args") if cmd is None: cmd = popenargs[0] error = subprocess.CalledProcessError(retcode, cmd) error.output = output raise error return output def compile(config): paths = {} binaries = ["alloy","node"] dotAlloy = os.path.abspath(os.path.join(config['project_dir'], 'build', '.alloynewcli')) if os.path.exists(dotAlloy): print "[DEBUG] build/.alloynewcli file found, skipping plugin..." os.remove(dotAlloy) else: for binary in binaries: try: # see if the environment variable is defined paths[binary] = os.environ["ALLOY_" + ("NODE_" if binary == "node" else "") + "PATH"] except KeyError as ex: # next try PATH, and then our guess paths if sys.platform == "darwin" or sys.platform.startswith('linux'): userPath = os.environ["HOME"] guessPaths = [ "/usr/local/bin/"+binary, "/opt/local/bin/"+binary, userPath+"/local/bin/"+binary, "/opt/bin/"+binary, "/usr/bin/"+binary, "/usr/local/share/npm/bin/"+binary ] try: binaryPath = check_output(["which",binary], stderr=subprocess.STDOUT).strip() print "[DEBUG] %s installed at '%s'" % (binary,binaryPath) except: print "[WARN] Couldn't find %s on your PATH:" % binary print "[WARN] %s" % os.environ["PATH"] print "[WARN]" print "[WARN] Checking for %s in a few default locations:" % binary for p in guessPaths: sys.stdout.write("[WARN] %s -> " % p) if os.path.exists(p): binaryPath = p print "FOUND" break else: print "not found" binaryPath = None if binaryPath is None: print "[ERROR] Couldn't find %s" % binary sys.exit(1) else: paths[binary] = binaryPath # no guesses on windows, just use the PATH elif sys.platform == "win32": paths["alloy"] = "alloy.cmd" f = os.path.abspath(os.path.join(config['project_dir'], 'app')) if os.path.exists(f): print "[INFO] alloy app found at %s" % f rd = os.path.abspath(os.path.join(config['project_dir'], 'Resources')) devicefamily = 'none' simtype = 'none' version = '0' deploytype = 'development' if config['platform']==u'ios': version = config['iphone_version'] devicefamily = config['devicefamily'] deploytype = config['deploytype'] if config['platform']==u'android': builder = config['android_builder'] version = builder.tool_api_level deploytype = config['deploy_type'] if config['platform']==u'mobileweb': builder = config['mobileweb_builder'] deploytype = config['deploytype'] cfg = "platform=%s,version=%s,simtype=%s,devicefamily=%s,deploytype=%s," % (config['platform'],version,simtype,devicefamily,deploytype) if sys.platform == "win32": cmd = [paths["alloy"], "compile", f, "--no-colors", "--config", cfg] else: cmd = [paths["node"], paths["alloy"], "compile", f, "--no-colors", "--config", cfg] print "[INFO] Executing Alloy compile:" print "[INFO] %s" % " ".join(cmd) try: print check_output(cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as ex: if hasattr(ex, 'output'): print ex.output print "[ERROR] Alloy compile failed" retcode = 1 if hasattr(ex, 'returncode'): retcode = ex.returncode sys.exit(retcode) except EnvironmentError as ex: print "[ERROR] Unexpected error with Alloy compiler plugin: %s" % ex.strerror sys.exit(2)
apache-2.0
qma/pants
src/python/pants/help/help_formatter.py
4
2867
# coding=utf-8 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) from textwrap import wrap from colors import blue, cyan, green, red from pants.help.help_info_extracter import HelpInfoExtracter class HelpFormatter(object): def __init__(self, scope, show_recursive, show_advanced, color): self._scope = scope self._show_recursive = show_recursive self._show_advanced = show_advanced self._color = color def _maybe_blue(self, s): return self._maybe_color(blue, s) def _maybe_cyan(self, s): return self._maybe_color(cyan, s) def _maybe_green(self, s): return self._maybe_color(green, s) def _maybe_red(self, s): return self._maybe_color(red, s) def _maybe_color(self, color, s): return color(s) if self._color else s def format_options(self, scope, description, option_registrations_iter): """Return a help message for the specified options. :param option_registrations_iter: An iterator over (args, kwargs) pairs, as passed in to options registration. """ oshi = HelpInfoExtracter(self._scope).get_option_scope_help_info(option_registrations_iter) lines = [] def add_option(category, ohis): if ohis: lines.append('') display_scope = scope or 'Global' if category: lines.append(self._maybe_blue('{} {} options:'.format(display_scope, category))) else: lines.append(self._maybe_blue('{} options:'.format(display_scope))) if description: lines.append(description) lines.append(' ') for ohi in ohis: lines.extend(self.format_option(ohi)) add_option('', oshi.basic) if self._show_recursive: add_option('recursive', oshi.recursive) if self._show_advanced: add_option('advanced', oshi.advanced) return lines def format_option(self, ohi): lines = [] arg_line = ('{args} {fromfile}{dflt}' .format(args=self._maybe_cyan(', '.join(ohi.display_args)), dflt=self._maybe_green('(default: {})'.format(ohi.default)), fromfile=self._maybe_green('(@fromfile value supported) ' if ohi.fromfile else ''))) lines.append(arg_line) indent = ' ' lines.extend(['{}{}'.format(indent, s) for s in wrap(ohi.help, 76)]) if ohi.deprecated_message: lines.append(self._maybe_red('{}{}.'.format(indent, ohi.deprecated_message))) if ohi.deprecated_hint: lines.append(self._maybe_red('{}{}'.format(indent, ohi.deprecated_hint))) return lines
apache-2.0
veselosky/schemazoid
schemazoid/micromodels/models.py
1
6716
import six from .fields import Field class MetaModel(type): """The metaclass for :class:`~schemazoid.micromodels.Model`. The main function of this metaclass is to move all of fields into the ``_clsfields`` variable on the class. """ def __new__(cls, name, bases, attrs): fields = {} for base in bases[::-1]: if hasattr(base, '_clsfields'): fields.update(base._clsfields) # Somehow if you iterate over attrs before creating the class, the # class docstring gets lost. So we create the class first and # manipulate its attrs after. newclass = super(MetaModel, cls).__new__(cls, name, bases, attrs) to_remove = [] for name in dir(newclass): if isinstance(getattr(newclass, name), Field): fields[name] = getattr(newclass, name) to_remove.append(name) for name in to_remove: delattr(newclass, name) newclass._clsfields = fields return newclass # TODO Add model-level validation to support cross-field dependencies. @six.add_metaclass(MetaModel) class Model(object): """The ``Model`` is the key class of the micromodels framework. To begin modeling your data structure, subclass ``Model`` and add Fields describing its structure. :: >>> from schemazoid import micromodels as m >>> class Thing(m.Model): ... name = m.CharField() ... description = m.CharField() A Model instance may be constructed as with any Python object. :: >>> thing = Thing() More useful and typical is to instatiate a model from a dictionary. :: >>> data = {'name': 'spoon', 'description': 'There is no spoon.'} >>> thing = Thing(data) >>> thing.name u'spoon' >>> thing.description u'There is no spoon.' >>> thing.update(name='spork') >>> thing.name u'spork' >>> fork = {'name': 'fork', 'description': "Stick it in me, I'm done."} >>> thing.update(fork) >>> thing.description u"Stick it in me, I'm done." """ def __init__(self, *args, **kwargs): super(Model, self).__init__() # an edge case, we can't call our own __setattr__ before # _instance_fields is initialized, since it calls get_field() super(Model, self).__setattr__('_instance_fields', {}) if args: self.update(args[0]) if kwargs: self.update(kwargs) # We override __setattr__ so that setting attributes passes through field # conversion/validation functions. def __setattr__(self, key, value): field = self.get_field(key) if field: super(Model, self).__setattr__(key, field.to_python(value)) else: super(Model, self).__setattr__(key, value) @classmethod def get_class_field(cls, name): """Return the Field instance for the class field of the given name. Returns None if there is no Field by that name on the class. """ return cls._clsfields.get(name, None) @classmethod def get_class_fields(cls): """Return a dictionary of Fields on this class, keyed by name.""" return cls._clsfields @classmethod def add_class_field(cls, name, field): """Extend a class by adding a new field to the class definition.""" if not isinstance(field, Field): msg = "Second argument to add_class_field must be a Field instance" raise TypeError(msg) cls._clsfields[name] = field def get_field(self, name): """Return the Field instance for the given name on this object. This instance method searches both the instance and the class. """ field = self._instance_fields.get(name, None) if not field: field = self.__class__.get_class_field(name) return field def get_all_fields(self): """Return a dictionary of all Fields on this instance, keyed by name. Includes both class fields and instance fields. """ return dict(self.__class__.get_class_fields(), **self._instance_fields) def update(self, *args, **kwargs): """As with the :class:`dict` method of the same name, given a dictionary or keyword arguments, sets the values of the instance attributes corresponding to the key names, overriding any existing value. """ data = args[0] if args else {} for name in self.get_all_fields(): if name in kwargs: setattr(self, name, kwargs[name]) elif name in data: setattr(self, name, data[name]) def add_field(self, name, field): """Adds an instance field to this Model instance. Instance fields allow you to validate and serialize arbitrary attributes on a Model instance even if the class does not support them. """ self._instance_fields[name] = field if hasattr(self, name): # Should raise exception if current value not valid setattr(self, name, getattr(self, name)) def to_dict(self, serial=False): """Returns a dictionary representing the data of the instance, containing native Python objects which might not be serializable (for example, :class:`~datetime.datetime` objects). To obtain a serializable dictionary, call the :meth:`~schemazoid.micromodels.Model.to_serial` method instead, or pass the ``serial`` argument with a True value. Note that only attributes declared as Fields will be included in the dictionary. Although you may set other attributes on the instance, those additional attributes will not be returned. """ if serial: return dict( (key, self.get_field(key).to_serial(getattr(self, key))) for key in self.get_all_fields() if hasattr(self, key)) else: return dict((key, getattr(self, key)) for key in self.get_all_fields() if hasattr(self, key)) # Fields have to_serial, for symmetry models should have it to. def to_serial(self): """Returns a serializable dictionary representing the data of the instance. It should be safe to hand this dictionary as-is to :func:`json.dumps`. Note that only attributes declared as Fields will be included in the dictionary. Although you may set other attributes on the instance, those additional attributes will not be returned. """ return self.to_dict(serial=True)
apache-2.0
LumPenPacK/NetworkExtractionFromImages
osx_build/nefi2_osx_amd64_xcode_2015/site-packages/networkx/utils/tests/test_heaps.py
64
3979
from nose.tools import * import networkx as nx from networkx.utils import * class X(object): def __eq__(self, other): raise self is other def __ne__(self, other): raise self is not other def __lt__(self, other): raise TypeError('cannot compare') def __le__(self, other): raise TypeError('cannot compare') def __ge__(self, other): raise TypeError('cannot compare') def __gt__(self, other): raise TypeError('cannot compare') def __hash__(self): return hash(id(self)) x = X() data = [# min should not invent an element. ('min', nx.NetworkXError), # Popping an empty heap should fail. ('pop', nx.NetworkXError), # Getting nonexisting elements should return None. ('get', 0, None), ('get', x, None), ('get', None, None), # Inserting a new key should succeed. ('insert', x, 1, True), ('get', x, 1), ('min', (x, 1)), # min should not pop the top element. ('min', (x, 1)), # Inserting a new key of different type should succeed. ('insert', 1, -2.0, True), # int and float values should interop. ('min', (1, -2.0)), # pop removes minimum-valued element. ('insert', 3, -10 ** 100, True), ('insert', 4, 5, True), ('pop', (3, -10 ** 100)), ('pop', (1, -2.0)), # Decrease-insert should succeed. ('insert', 4, -50, True), ('insert', 4, -60, False, True), # Decrease-insert should not create duplicate keys. ('pop', (4, -60)), ('pop', (x, 1)), # Popping all elements should empty the heap. ('min', nx.NetworkXError), ('pop', nx.NetworkXError), # Non-value-changing insert should fail. ('insert', x, 0, True), ('insert', x, 0, False, False), ('min', (x, 0)), ('insert', x, 0, True, False), ('min', (x, 0)), # Failed insert should not create duplicate keys. ('pop', (x, 0)), ('pop', nx.NetworkXError), # Increase-insert should succeed when allowed. ('insert', None, 0, True), ('insert', 2, -1, True), ('min', (2, -1)), ('insert', 2, 1, True, False), ('min', (None, 0)), # Increase-insert should fail when disallowed. ('insert', None, 2, False, False), ('min', (None, 0)), # Failed increase-insert should not create duplicate keys. ('pop', (None, 0)), ('pop', (2, 1)), ('min', nx.NetworkXError), ('pop', nx.NetworkXError)] def _test_heap_class(cls, *args, **kwargs): heap = cls(*args, **kwargs) # Basic behavioral test for op in data: if op[-1] is not nx.NetworkXError: assert_equal(op[-1], getattr(heap, op[0])(*op[1:-1])) else: assert_raises(op[-1], getattr(heap, op[0]), *op[1:-1]) # Coverage test. for i in range(99, -1, -1): assert_true(heap.insert(i, i)) for i in range(50): assert_equal(heap.pop(), (i, i)) for i in range(100): assert_equal(heap.insert(i, i), i < 50) for i in range(100): assert_false(heap.insert(i, i + 1)) for i in range(50): assert_equal(heap.pop(), (i, i)) for i in range(100): assert_equal(heap.insert(i, i + 1), i < 50) for i in range(49): assert_equal(heap.pop(), (i, i + 1)) assert_equal(sorted([heap.pop(), heap.pop()]), [(49, 50), (50, 50)]) for i in range(51, 100): assert_false(heap.insert(i, i + 1, True)) for i in range(51, 70): assert_equal(heap.pop(), (i, i + 1)) for i in range(100): assert_true(heap.insert(i, i)) for i in range(100): assert_equal(heap.pop(), (i, i)) assert_raises(nx.NetworkXError, heap.pop) def test_PairingHeap(): _test_heap_class(PairingHeap) def test_BinaryHeap(): _test_heap_class(BinaryHeap)
bsd-2-clause
arju88nair/projectCulminate
venv/lib/python3.5/site-packages/urllib3/util/request.py
205
3705
from __future__ import absolute_import from base64 import b64encode from ..packages.six import b, integer_types from ..exceptions import UnrewindableBodyError ACCEPT_ENCODING = 'gzip,deflate' _FAILEDTELL = object() def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, basic_auth=None, proxy_basic_auth=None, disable_cache=None): """ Shortcuts for generating request headers. :param keep_alive: If ``True``, adds 'connection: keep-alive' header. :param accept_encoding: Can be a boolean, list, or string. ``True`` translates to 'gzip,deflate'. List will get joined by comma. String will be used as provided. :param user_agent: String representing the user-agent you want, such as "python-urllib3/0.6" :param basic_auth: Colon-separated username:password string for 'authorization: basic ...' auth header. :param proxy_basic_auth: Colon-separated username:password string for 'proxy-authorization: basic ...' auth header. :param disable_cache: If ``True``, adds 'cache-control: no-cache' header. Example:: >>> make_headers(keep_alive=True, user_agent="Batman/1.0") {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} >>> make_headers(accept_encoding=True) {'accept-encoding': 'gzip,deflate'} """ headers = {} if accept_encoding: if isinstance(accept_encoding, str): pass elif isinstance(accept_encoding, list): accept_encoding = ','.join(accept_encoding) else: accept_encoding = ACCEPT_ENCODING headers['accept-encoding'] = accept_encoding if user_agent: headers['user-agent'] = user_agent if keep_alive: headers['connection'] = 'keep-alive' if basic_auth: headers['authorization'] = 'Basic ' + \ b64encode(b(basic_auth)).decode('utf-8') if proxy_basic_auth: headers['proxy-authorization'] = 'Basic ' + \ b64encode(b(proxy_basic_auth)).decode('utf-8') if disable_cache: headers['cache-control'] = 'no-cache' return headers def set_file_position(body, pos): """ If a position is provided, move file to that point. Otherwise, we'll attempt to record a position for future use. """ if pos is not None: rewind_body(body, pos) elif getattr(body, 'tell', None) is not None: try: pos = body.tell() except (IOError, OSError): # This differentiates from None, allowing us to catch # a failed `tell()` later when trying to rewind the body. pos = _FAILEDTELL return pos def rewind_body(body, body_pos): """ Attempt to rewind body to a certain position. Primarily used for request redirects and retries. :param body: File-like object that supports seek. :param int pos: Position to seek to in file. """ body_seek = getattr(body, 'seek', None) if body_seek is not None and isinstance(body_pos, integer_types): try: body_seek(body_pos) except (IOError, OSError): raise UnrewindableBodyError("An error occurred when rewinding request " "body for redirect/retry.") elif body_pos is _FAILEDTELL: raise UnrewindableBodyError("Unable to record file position for rewinding " "request body during a redirect/retry.") else: raise ValueError("body_pos must be of type integer, " "instead it was %s." % type(body_pos))
apache-2.0
csrocha/OpenUpgrade
addons/account_analytic_plans/account_analytic_plans.py
123
23336
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import time from lxml import etree from openerp.osv import fields, osv from openerp import tools from openerp.tools.translate import _ class one2many_mod2(fields.one2many): def get(self, cr, obj, ids, name, user=None, offset=0, context=None, values=None): if context is None: context = {} res = {} for id in ids: res[id] = [] ids2 = None if 'journal_id' in context: journal = obj.pool.get('account.journal').browse(cr, user, context['journal_id'], context=context) pnum = int(name[7]) -1 plan = journal.plan_id if plan and len(plan.plan_ids) > pnum: acc_id = plan.plan_ids[pnum].root_analytic_id.id ids2 = obj.pool[self._obj].search(cr, user, [(self._fields_id,'in',ids),('analytic_account_id','child_of',[acc_id])], limit=self._limit) if ids2 is None: ids2 = obj.pool[self._obj].search(cr, user, [(self._fields_id,'in',ids)], limit=self._limit) for r in obj.pool[self._obj].read(cr, user, ids2, [self._fields_id], context=context, load='_classic_write'): key = r[self._fields_id] if isinstance(key, tuple): # Read return a tuple in the case where the field is a many2one # but we want to get the id of this field. key = key[0] res[key].append( r['id'] ) return res class account_analytic_line(osv.osv): _inherit = 'account.analytic.line' _description = 'Analytic Line' def _get_amount(self, cr, uid, ids, name, args, context=None): res = {} for id in ids: res.setdefault(id, 0.0) for line in self.browse(cr, uid, ids, context=context): amount = line.move_id and line.move_id.amount_currency * (line.percentage / 100) or 0.0 res[line.id] = amount return res _columns = { 'amount_currency': fields.function(_get_amount, string="Amount Currency", type="float", store=True, help="The amount expressed in the related account currency if not equal to the company one.", readonly=True), 'percentage': fields.float('Percentage') } class account_analytic_plan(osv.osv): _name = "account.analytic.plan" _description = "Analytic Plan" _columns = { 'name': fields.char('Analytic Plan', required=True, select=True), 'plan_ids': fields.one2many('account.analytic.plan.line', 'plan_id', 'Analytic Plans', copy=True), } class account_analytic_plan_line(osv.osv): _name = "account.analytic.plan.line" _description = "Analytic Plan Line" _order = "sequence, id" _columns = { 'plan_id': fields.many2one('account.analytic.plan','Analytic Plan',required=True), 'name': fields.char('Axis Name', required=True, select=True), 'sequence': fields.integer('Sequence'), 'root_analytic_id': fields.many2one('account.analytic.account', 'Root Account', help="Root account of this plan.", required=False), 'min_required': fields.float('Minimum Allowed (%)'), 'max_required': fields.float('Maximum Allowed (%)'), } _defaults = { 'min_required': 100.0, 'max_required': 100.0, } class account_analytic_plan_instance(osv.osv): _name = "account.analytic.plan.instance" _description = "Analytic Plan Instance" _columns = { 'name': fields.char('Analytic Distribution'), 'code': fields.char('Distribution Code', size=16), 'journal_id': fields.many2one('account.analytic.journal', 'Analytic Journal' ), 'account_ids': fields.one2many('account.analytic.plan.instance.line', 'plan_id', 'Account Id', copy=True), 'account1_ids': one2many_mod2('account.analytic.plan.instance.line', 'plan_id', 'Account1 Id'), 'account2_ids': one2many_mod2('account.analytic.plan.instance.line', 'plan_id', 'Account2 Id'), 'account3_ids': one2many_mod2('account.analytic.plan.instance.line', 'plan_id', 'Account3 Id'), 'account4_ids': one2many_mod2('account.analytic.plan.instance.line', 'plan_id', 'Account4 Id'), 'account5_ids': one2many_mod2('account.analytic.plan.instance.line', 'plan_id', 'Account5 Id'), 'account6_ids': one2many_mod2('account.analytic.plan.instance.line', 'plan_id', 'Account6 Id'), 'plan_id': fields.many2one('account.analytic.plan', "Model's Plan"), } def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False): if context is None: context = {} journal_obj = self.pool.get('account.journal') if context.get('journal_id', False): journal = journal_obj.browse(cr, user, [context['journal_id']], context=context)[0] analytic_journal = journal.analytic_journal_id and journal.analytic_journal_id.id or False args.append('|') args.append(('journal_id', '=', analytic_journal)) args.append(('journal_id', '=', False)) res = super(account_analytic_plan_instance, self).search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count) return res def _default_journal(self, cr, uid, context=None): if context is None: context = {} journal_obj = self.pool.get('account.journal') if context.has_key('journal_id') and context['journal_id']: journal = journal_obj.browse(cr, uid, context['journal_id'], context=context) if journal.analytic_journal_id: return journal.analytic_journal_id.id return False _defaults = { 'plan_id': False, 'journal_id': _default_journal, } def name_get(self, cr, uid, ids, context=None): res = [] for inst in self.browse(cr, uid, ids, context=context): name = inst.name or '/' if name and inst.code: name=name+' ('+inst.code+')' res.append((inst.id, name)) return res def name_search(self, cr, uid, name, args=None, operator='ilike', context=None, limit=100): args = args or [] if name: ids = self.search(cr, uid, [('code', '=', name)] + args, limit=limit, context=context or {}) if not ids: ids = self.search(cr, uid, [('name', operator, name)] + args, limit=limit, context=context or {}) else: ids = self.search(cr, uid, args, limit=limit, context=context or {}) return self.name_get(cr, uid, ids, context or {}) def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): if context is None: context = {} wiz_id = self.pool.get('ir.actions.act_window').search(cr, uid, [("name","=","analytic.plan.create.model.action")], context=context) res = super(account_analytic_plan_instance,self).fields_view_get(cr, uid, view_id, view_type, context, toolbar=toolbar, submenu=submenu) journal_obj = self.pool.get('account.journal') analytic_plan_obj = self.pool.get('account.analytic.plan') if (res['type']=='form'): plan_id = False if context.get('journal_id', False): plan_id = journal_obj.browse(cr, uid, int(context['journal_id']), context=context).plan_id elif context.get('plan_id', False): plan_id = analytic_plan_obj.browse(cr, uid, int(context['plan_id']), context=context) if plan_id: i=1 res['arch'] = """<form string="%s"> <field name="name"/> <field name="code"/> <field name="journal_id"/> <button name="%d" string="Save This Distribution as a Model" type="action" colspan="2"/> """% (tools.to_xml(plan_id.name), wiz_id[0]) for line in plan_id.plan_ids: res['arch']+=""" <field name="account%d_ids" string="%s" nolabel="1" colspan="4"> <tree string="%s" editable="bottom"> <field name="rate"/> <field name="analytic_account_id" domain="[('parent_id','child_of',[%d])]" groups="analytic.group_analytic_accounting"/> </tree> </field> <newline/>"""%(i,tools.to_xml(line.name),tools.to_xml(line.name),line.root_analytic_id and line.root_analytic_id.id or 0) i+=1 res['arch'] += "</form>" doc = etree.fromstring(res['arch'].encode('utf8')) xarch, xfields = self._view_look_dom_arch(cr, uid, doc, view_id, context=context) res['arch'] = xarch res['fields'] = xfields return res else: return res def create(self, cr, uid, vals, context=None): journal_obj = self.pool.get('account.journal') ana_plan_instance_obj = self.pool.get('account.analytic.plan.instance') acct_anal_acct = self.pool.get('account.analytic.account') acct_anal_plan_line_obj = self.pool.get('account.analytic.plan.line') if context and context.get('journal_id'): journal = journal_obj.browse(cr, uid, context['journal_id'], context=context) pids = ana_plan_instance_obj.search(cr, uid, [('name','=',vals['name']), ('code','=',vals['code']), ('plan_id','<>',False)], context=context) if pids: raise osv.except_osv(_('Error!'), _('A model with this name and code already exists.')) res = acct_anal_plan_line_obj.search(cr, uid, [('plan_id','=',journal.plan_id.id)], context=context) for i in res: total_per_plan = 0 item = acct_anal_plan_line_obj.browse(cr, uid, i, context=context) temp_list = ['account1_ids','account2_ids','account3_ids','account4_ids','account5_ids','account6_ids'] for l in temp_list: if vals.has_key(l): for tempo in vals[l]: if acct_anal_acct.search(cr, uid, [('parent_id', 'child_of', [item.root_analytic_id.id]), ('id', '=', tempo[2]['analytic_account_id'])], context=context): total_per_plan += tempo[2]['rate'] if total_per_plan < item.min_required or total_per_plan > item.max_required: raise osv.except_osv(_('Error!'),_('The total should be between %s and %s.') % (str(item.min_required), str(item.max_required))) return super(account_analytic_plan_instance, self).create(cr, uid, vals, context=context) def write(self, cr, uid, ids, vals, context=None, check=True, update_check=True): if context is None: context = {} this = self.browse(cr, uid, ids[0], context=context) invoice_line_obj = self.pool.get('account.invoice.line') if this.plan_id and not vals.has_key('plan_id'): #this instance is a model, so we have to create a new plan instance instead of modifying it #copy the existing model temp_id = self.copy(cr, uid, this.id, None, context=context) #get the list of the invoice line that were linked to the model lists = invoice_line_obj.search(cr, uid, [('analytics_id','=',this.id)], context=context) #make them link to the copy invoice_line_obj.write(cr, uid, lists, {'analytics_id':temp_id}, context=context) #and finally modify the old model to be not a model anymore vals['plan_id'] = False if not vals.has_key('name'): vals['name'] = this.name and (str(this.name)+'*') or "*" if not vals.has_key('code'): vals['code'] = this.code and (str(this.code)+'*') or "*" return super(account_analytic_plan_instance, self).write(cr, uid, ids, vals, context=context) class account_analytic_plan_instance_line(osv.osv): _name = "account.analytic.plan.instance.line" _description = "Analytic Instance Line" _rec_name = "analytic_account_id" _columns = { 'plan_id': fields.many2one('account.analytic.plan.instance', 'Plan Id'), 'analytic_account_id': fields.many2one('account.analytic.account', 'Analytic Account', required=True, domain=[('type','<>','view')]), 'rate': fields.float('Rate (%)', required=True), } _defaults = { 'rate': 100.0 } def name_get(self, cr, uid, ids, context=None): if not ids: return [] reads = self.read(cr, uid, ids, ['analytic_account_id'], context=context) res = [] for record in reads: res.append((record['id'], record['analytic_account_id'])) return res class account_journal(osv.osv): _inherit = "account.journal" _name = "account.journal" _columns = { 'plan_id': fields.many2one('account.analytic.plan', 'Analytic Plans'), } class account_invoice_line(osv.osv): _inherit = "account.invoice.line" _name = "account.invoice.line" _columns = { 'analytics_id': fields.many2one('account.analytic.plan.instance', 'Analytic Distribution'), } def create(self, cr, uid, vals, context=None): if 'analytics_id' in vals and isinstance(vals['analytics_id'], tuple): vals['analytics_id'] = vals['analytics_id'][0] return super(account_invoice_line, self).create(cr, uid, vals, context=context) def move_line_get_item(self, cr, uid, line, context=None): res = super(account_invoice_line, self).move_line_get_item(cr, uid, line, context=context) res ['analytics_id'] = line.analytics_id and line.analytics_id.id or False return res def product_id_change(self, cr, uid, ids, product, uom_id, qty=0, name='', type='out_invoice', partner_id=False, fposition_id=False, price_unit=False, currency_id=False, company_id=None, context=None): res_prod = super(account_invoice_line, self).product_id_change(cr, uid, ids, product, uom_id, qty, name, type, partner_id, fposition_id, price_unit, currency_id, company_id=company_id, context=context) rec = self.pool.get('account.analytic.default').account_get(cr, uid, product, partner_id, uid, time.strftime('%Y-%m-%d'), context=context) if rec and rec.analytics_id: res_prod['value'].update({'analytics_id': rec.analytics_id.id}) return res_prod class account_move_line(osv.osv): _inherit = "account.move.line" _name = "account.move.line" _columns = { 'analytics_id':fields.many2one('account.analytic.plan.instance', 'Analytic Distribution'), } def _default_get_move_form_hook(self, cursor, user, data): data = super(account_move_line, self)._default_get_move_form_hook(cursor, user, data) if data.has_key('analytics_id'): del(data['analytics_id']) return data def create_analytic_lines(self, cr, uid, ids, context=None): if context is None: context = {} super(account_move_line, self).create_analytic_lines(cr, uid, ids, context=context) analytic_line_obj = self.pool.get('account.analytic.line') for line in self.browse(cr, uid, ids, context=context): if line.analytics_id: if not line.journal_id.analytic_journal_id: raise osv.except_osv(_('No Analytic Journal!'),_("You have to define an analytic journal on the '%s' journal.") % (line.journal_id.name,)) toremove = analytic_line_obj.search(cr, uid, [('move_id','=',line.id)], context=context) if toremove: analytic_line_obj.unlink(cr, uid, toremove, context=context) for line2 in line.analytics_id.account_ids: val = (line.credit or 0.0) - (line.debit or 0.0) amt=val * (line2.rate/100) al_vals={ 'name': line.name, 'date': line.date, 'account_id': line2.analytic_account_id.id, 'unit_amount': line.quantity, 'product_id': line.product_id and line.product_id.id or False, 'product_uom_id': line.product_uom_id and line.product_uom_id.id or False, 'amount': amt, 'general_account_id': line.account_id.id, 'move_id': line.id, 'journal_id': line.journal_id.analytic_journal_id.id, 'ref': line.ref, 'percentage': line2.rate } analytic_line_obj.create(cr, uid, al_vals, context=context) return True def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False): if context is None: context = {} result = super(account_move_line, self).fields_view_get(cr, uid, view_id, view_type, context, toolbar=toolbar, submenu=submenu) return result class account_invoice(osv.osv): _name = "account.invoice" _inherit = "account.invoice" def line_get_convert(self, cr, uid, x, part, date, context=None): res=super(account_invoice,self).line_get_convert(cr, uid, x, part, date, context=context) res['analytics_id'] = x.get('analytics_id', False) return res def _get_analytic_lines(self, cr, uid, ids, context=None): inv = self.browse(cr, uid, ids)[0] cur_obj = self.pool.get('res.currency') invoice_line_obj = self.pool.get('account.invoice.line') acct_ins_obj = self.pool.get('account.analytic.plan.instance') company_currency = inv.company_id.currency_id.id if inv.type in ('out_invoice', 'in_refund'): sign = 1 else: sign = -1 iml = invoice_line_obj.move_line_get(cr, uid, inv.id, context=context) for il in iml: if il.get('analytics_id', False): if inv.type in ('in_invoice', 'in_refund'): ref = inv.reference else: ref = inv.number obj_move_line = acct_ins_obj.browse(cr, uid, il['analytics_id'], context=context) ctx = context.copy() ctx.update({'date': inv.date_invoice}) amount_calc = cur_obj.compute(cr, uid, inv.currency_id.id, company_currency, il['price'], context=ctx) * sign qty = il['quantity'] il['analytic_lines'] = [] for line2 in obj_move_line.account_ids: amt = amount_calc * (line2.rate/100) qtty = qty* (line2.rate/100) al_vals = { 'name': il['name'], 'date': inv['date_invoice'], 'unit_amount': qtty, 'product_id': il['product_id'], 'account_id': line2.analytic_account_id.id, 'amount': amt, 'product_uom_id': il['uos_id'], 'general_account_id': il['account_id'], 'journal_id': self._get_journal_analytic(cr, uid, inv.type), 'ref': ref, } il['analytic_lines'].append((0, 0, al_vals)) return iml class account_analytic_plan(osv.osv): _inherit = "account.analytic.plan" _columns = { 'default_instance_id': fields.many2one('account.analytic.plan.instance', 'Default Entries'), } class analytic_default(osv.osv): _inherit = "account.analytic.default" _columns = { 'analytics_id': fields.many2one('account.analytic.plan.instance', 'Analytic Distribution'), } class sale_order_line(osv.osv): _inherit = "sale.order.line" # Method overridden to set the analytic account by default on criterion match def invoice_line_create(self, cr, uid, ids, context=None): create_ids = super(sale_order_line,self).invoice_line_create(cr, uid, ids, context=context) inv_line_obj = self.pool.get('account.invoice.line') acct_anal_def_obj = self.pool.get('account.analytic.default') if ids: sale_line = self.browse(cr, uid, ids[0], context=context) for line in inv_line_obj.browse(cr, uid, create_ids, context=context): rec = acct_anal_def_obj.account_get(cr, uid, line.product_id.id, sale_line.order_id.partner_id.id, uid, time.strftime('%Y-%m-%d'), sale_line.order_id.company_id.id, context=context) if rec: inv_line_obj.write(cr, uid, [line.id], {'analytics_id': rec.analytics_id.id}, context=context) return create_ids class account_bank_statement(osv.osv): _inherit = "account.bank.statement" _name = "account.bank.statement" def _prepare_bank_move_line(self, cr, uid, st_line, move_id, amount, company_currency_id, context=None): result = super(account_bank_statement,self)._prepare_bank_move_line(cr, uid, st_line, move_id, amount, company_currency_id, context=context) result['analytics_id'] = st_line.analytics_id.id return result def button_confirm_bank(self, cr, uid, ids, context=None): super(account_bank_statement,self).button_confirm_bank(cr, uid, ids, context=context) for st in self.browse(cr, uid, ids, context=context): for st_line in st.line_ids: if st_line.analytics_id: if not st.journal_id.analytic_journal_id: raise osv.except_osv(_('No Analytic Journal!'),_("You have to define an analytic journal on the '%s' journal.") % (st.journal_id.name,)) if not st_line.amount: continue return True class account_bank_statement_line(osv.osv): _inherit = "account.bank.statement.line" _name = "account.bank.statement.line" _columns = { 'analytics_id': fields.many2one('account.analytic.plan.instance', 'Analytic Distribution'), } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
ianyh/heroku-buildpack-python-opencv
vendor/.heroku/lib/python2.7/test/test_strptime.py
50
25849
"""PyUnit testing against strptime""" import unittest import time import locale import re import sys from test import test_support from datetime import date as datetime_date import _strptime class getlang_Tests(unittest.TestCase): """Test _getlang""" def test_basic(self): self.assertEqual(_strptime._getlang(), locale.getlocale(locale.LC_TIME)) class LocaleTime_Tests(unittest.TestCase): """Tests for _strptime.LocaleTime. All values are lower-cased when stored in LocaleTime, so make sure to compare values after running ``lower`` on them. """ def setUp(self): """Create time tuple based on current time.""" self.time_tuple = time.localtime() self.LT_ins = _strptime.LocaleTime() def compare_against_time(self, testing, directive, tuple_position, error_msg): """Helper method that tests testing against directive based on the tuple_position of time_tuple. Uses error_msg as error message. """ strftime_output = time.strftime(directive, self.time_tuple).lower() comparison = testing[self.time_tuple[tuple_position]] self.assertIn(strftime_output, testing, "%s: not found in tuple" % error_msg) self.assertEqual(comparison, strftime_output, "%s: position within tuple incorrect; %s != %s" % (error_msg, comparison, strftime_output)) def test_weekday(self): # Make sure that full and abbreviated weekday names are correct in # both string and position with tuple self.compare_against_time(self.LT_ins.f_weekday, '%A', 6, "Testing of full weekday name failed") self.compare_against_time(self.LT_ins.a_weekday, '%a', 6, "Testing of abbreviated weekday name failed") def test_month(self): # Test full and abbreviated month names; both string and position # within the tuple self.compare_against_time(self.LT_ins.f_month, '%B', 1, "Testing against full month name failed") self.compare_against_time(self.LT_ins.a_month, '%b', 1, "Testing against abbreviated month name failed") def test_am_pm(self): # Make sure AM/PM representation done properly strftime_output = time.strftime("%p", self.time_tuple).lower() self.assertIn(strftime_output, self.LT_ins.am_pm, "AM/PM representation not in tuple") if self.time_tuple[3] < 12: position = 0 else: position = 1 self.assertEqual(self.LT_ins.am_pm[position], strftime_output, "AM/PM representation in the wrong position within the tuple") def test_timezone(self): # Make sure timezone is correct timezone = time.strftime("%Z", self.time_tuple).lower() if timezone: self.assertTrue(timezone in self.LT_ins.timezone[0] or timezone in self.LT_ins.timezone[1], "timezone %s not found in %s" % (timezone, self.LT_ins.timezone)) def test_date_time(self): # Check that LC_date_time, LC_date, and LC_time are correct # the magic date is used so as to not have issues with %c when day of # the month is a single digit and has a leading space. This is not an # issue since strptime still parses it correctly. The problem is # testing these directives for correctness by comparing strftime # output. magic_date = (1999, 3, 17, 22, 44, 55, 2, 76, 0) strftime_output = time.strftime("%c", magic_date) self.assertEqual(time.strftime(self.LT_ins.LC_date_time, magic_date), strftime_output, "LC_date_time incorrect") strftime_output = time.strftime("%x", magic_date) self.assertEqual(time.strftime(self.LT_ins.LC_date, magic_date), strftime_output, "LC_date incorrect") strftime_output = time.strftime("%X", magic_date) self.assertEqual(time.strftime(self.LT_ins.LC_time, magic_date), strftime_output, "LC_time incorrect") LT = _strptime.LocaleTime() LT.am_pm = ('', '') self.assertTrue(LT.LC_time, "LocaleTime's LC directives cannot handle " "empty strings") def test_lang(self): # Make sure lang is set to what _getlang() returns # Assuming locale has not changed between now and when self.LT_ins was created self.assertEqual(self.LT_ins.lang, _strptime._getlang()) class TimeRETests(unittest.TestCase): """Tests for TimeRE.""" def setUp(self): """Construct generic TimeRE object.""" self.time_re = _strptime.TimeRE() self.locale_time = _strptime.LocaleTime() def test_pattern(self): # Test TimeRE.pattern pattern_string = self.time_re.pattern(r"%a %A %d") self.assertTrue(pattern_string.find(self.locale_time.a_weekday[2]) != -1, "did not find abbreviated weekday in pattern string '%s'" % pattern_string) self.assertTrue(pattern_string.find(self.locale_time.f_weekday[4]) != -1, "did not find full weekday in pattern string '%s'" % pattern_string) self.assertTrue(pattern_string.find(self.time_re['d']) != -1, "did not find 'd' directive pattern string '%s'" % pattern_string) def test_pattern_escaping(self): # Make sure any characters in the format string that might be taken as # regex syntax is escaped. pattern_string = self.time_re.pattern("\d+") self.assertIn(r"\\d\+", pattern_string, "%s does not have re characters escaped properly" % pattern_string) def test_compile(self): # Check that compiled regex is correct found = self.time_re.compile(r"%A").match(self.locale_time.f_weekday[6]) self.assertTrue(found and found.group('A') == self.locale_time.f_weekday[6], "re object for '%A' failed") compiled = self.time_re.compile(r"%a %b") found = compiled.match("%s %s" % (self.locale_time.a_weekday[4], self.locale_time.a_month[4])) self.assertTrue(found, "Match failed with '%s' regex and '%s' string" % (compiled.pattern, "%s %s" % (self.locale_time.a_weekday[4], self.locale_time.a_month[4]))) self.assertTrue(found.group('a') == self.locale_time.a_weekday[4] and found.group('b') == self.locale_time.a_month[4], "re object couldn't find the abbreviated weekday month in " "'%s' using '%s'; group 'a' = '%s', group 'b' = %s'" % (found.string, found.re.pattern, found.group('a'), found.group('b'))) for directive in ('a','A','b','B','c','d','H','I','j','m','M','p','S', 'U','w','W','x','X','y','Y','Z','%'): compiled = self.time_re.compile("%" + directive) found = compiled.match(time.strftime("%" + directive)) self.assertTrue(found, "Matching failed on '%s' using '%s' regex" % (time.strftime("%" + directive), compiled.pattern)) def test_blankpattern(self): # Make sure when tuple or something has no values no regex is generated. # Fixes bug #661354 test_locale = _strptime.LocaleTime() test_locale.timezone = (frozenset(), frozenset()) self.assertEqual(_strptime.TimeRE(test_locale).pattern("%Z"), '', "with timezone == ('',''), TimeRE().pattern('%Z') != ''") def test_matching_with_escapes(self): # Make sure a format that requires escaping of characters works compiled_re = self.time_re.compile("\w+ %m") found = compiled_re.match("\w+ 10") self.assertTrue(found, "Escaping failed of format '\w+ 10'") def test_locale_data_w_regex_metacharacters(self): # Check that if locale data contains regex metacharacters they are # escaped properly. # Discovered by bug #1039270 . locale_time = _strptime.LocaleTime() locale_time.timezone = (frozenset(("utc", "gmt", "Tokyo (standard time)")), frozenset("Tokyo (daylight time)")) time_re = _strptime.TimeRE(locale_time) self.assertTrue(time_re.compile("%Z").match("Tokyo (standard time)"), "locale data that contains regex metacharacters is not" " properly escaped") def test_whitespace_substitution(self): # When pattern contains whitespace, make sure it is taken into account # so as to not allow to subpatterns to end up next to each other and # "steal" characters from each other. pattern = self.time_re.pattern('%j %H') self.assertFalse(re.match(pattern, "180")) self.assertTrue(re.match(pattern, "18 0")) class StrptimeTests(unittest.TestCase): """Tests for _strptime.strptime.""" def setUp(self): """Create testing time tuple.""" self.time_tuple = time.gmtime() def test_ValueError(self): # Make sure ValueError is raised when match fails or format is bad self.assertRaises(ValueError, _strptime._strptime_time, data_string="%d", format="%A") for bad_format in ("%", "% ", "%e"): try: _strptime._strptime_time("2005", bad_format) except ValueError: continue except Exception, err: self.fail("'%s' raised %s, not ValueError" % (bad_format, err.__class__.__name__)) else: self.fail("'%s' did not raise ValueError" % bad_format) def test_unconverteddata(self): # Check ValueError is raised when there is unconverted data self.assertRaises(ValueError, _strptime._strptime_time, "10 12", "%m") def helper(self, directive, position): """Helper fxn in testing.""" strf_output = time.strftime("%" + directive, self.time_tuple) strp_output = _strptime._strptime_time(strf_output, "%" + directive) self.assertTrue(strp_output[position] == self.time_tuple[position], "testing of '%s' directive failed; '%s' -> %s != %s" % (directive, strf_output, strp_output[position], self.time_tuple[position])) def test_year(self): # Test that the year is handled properly for directive in ('y', 'Y'): self.helper(directive, 0) # Must also make sure %y values are correct for bounds set by Open Group for century, bounds in ((1900, ('69', '99')), (2000, ('00', '68'))): for bound in bounds: strp_output = _strptime._strptime_time(bound, '%y') expected_result = century + int(bound) self.assertTrue(strp_output[0] == expected_result, "'y' test failed; passed in '%s' " "and returned '%s'" % (bound, strp_output[0])) def test_month(self): # Test for month directives for directive in ('B', 'b', 'm'): self.helper(directive, 1) def test_day(self): # Test for day directives self.helper('d', 2) def test_hour(self): # Test hour directives self.helper('H', 3) strf_output = time.strftime("%I %p", self.time_tuple) strp_output = _strptime._strptime_time(strf_output, "%I %p") self.assertTrue(strp_output[3] == self.time_tuple[3], "testing of '%%I %%p' directive failed; '%s' -> %s != %s" % (strf_output, strp_output[3], self.time_tuple[3])) def test_minute(self): # Test minute directives self.helper('M', 4) def test_second(self): # Test second directives self.helper('S', 5) def test_fraction(self): # Test microseconds import datetime d = datetime.datetime(2012, 12, 20, 12, 34, 56, 78987) tup, frac = _strptime._strptime(str(d), format="%Y-%m-%d %H:%M:%S.%f") self.assertEqual(frac, d.microsecond) def test_weekday(self): # Test weekday directives for directive in ('A', 'a', 'w'): self.helper(directive,6) def test_julian(self): # Test julian directives self.helper('j', 7) def test_timezone(self): # Test timezone directives. # When gmtime() is used with %Z, entire result of strftime() is empty. # Check for equal timezone names deals with bad locale info when this # occurs; first found in FreeBSD 4.4. strp_output = _strptime._strptime_time("UTC", "%Z") self.assertEqual(strp_output.tm_isdst, 0) strp_output = _strptime._strptime_time("GMT", "%Z") self.assertEqual(strp_output.tm_isdst, 0) time_tuple = time.localtime() strf_output = time.strftime("%Z") #UTC does not have a timezone strp_output = _strptime._strptime_time(strf_output, "%Z") locale_time = _strptime.LocaleTime() if time.tzname[0] != time.tzname[1] or not time.daylight: self.assertTrue(strp_output[8] == time_tuple[8], "timezone check failed; '%s' -> %s != %s" % (strf_output, strp_output[8], time_tuple[8])) else: self.assertTrue(strp_output[8] == -1, "LocaleTime().timezone has duplicate values and " "time.daylight but timezone value not set to -1") def test_bad_timezone(self): # Explicitly test possibility of bad timezone; # when time.tzname[0] == time.tzname[1] and time.daylight tz_name = time.tzname[0] if tz_name.upper() in ("UTC", "GMT"): return try: original_tzname = time.tzname original_daylight = time.daylight time.tzname = (tz_name, tz_name) time.daylight = 1 tz_value = _strptime._strptime_time(tz_name, "%Z")[8] self.assertEqual(tz_value, -1, "%s lead to a timezone value of %s instead of -1 when " "time.daylight set to %s and passing in %s" % (time.tzname, tz_value, time.daylight, tz_name)) finally: time.tzname = original_tzname time.daylight = original_daylight def test_date_time(self): # Test %c directive for position in range(6): self.helper('c', position) def test_date(self): # Test %x directive for position in range(0,3): self.helper('x', position) def test_time(self): # Test %X directive for position in range(3,6): self.helper('X', position) def test_percent(self): # Make sure % signs are handled properly strf_output = time.strftime("%m %% %Y", self.time_tuple) strp_output = _strptime._strptime_time(strf_output, "%m %% %Y") self.assertTrue(strp_output[0] == self.time_tuple[0] and strp_output[1] == self.time_tuple[1], "handling of percent sign failed") def test_caseinsensitive(self): # Should handle names case-insensitively. strf_output = time.strftime("%B", self.time_tuple) self.assertTrue(_strptime._strptime_time(strf_output.upper(), "%B"), "strptime does not handle ALL-CAPS names properly") self.assertTrue(_strptime._strptime_time(strf_output.lower(), "%B"), "strptime does not handle lowercase names properly") self.assertTrue(_strptime._strptime_time(strf_output.capitalize(), "%B"), "strptime does not handle capword names properly") def test_defaults(self): # Default return value should be (1900, 1, 1, 0, 0, 0, 0, 1, 0) defaults = (1900, 1, 1, 0, 0, 0, 0, 1, -1) strp_output = _strptime._strptime_time('1', '%m') self.assertTrue(strp_output == defaults, "Default values for strptime() are incorrect;" " %s != %s" % (strp_output, defaults)) def test_escaping(self): # Make sure all characters that have regex significance are escaped. # Parentheses are in a purposeful order; will cause an error of # unbalanced parentheses when the regex is compiled if they are not # escaped. # Test instigated by bug #796149 . need_escaping = ".^$*+?{}\[]|)(" self.assertTrue(_strptime._strptime_time(need_escaping, need_escaping)) def test_feb29_on_leap_year_without_year(self): time.strptime("Feb 29", "%b %d") def test_mar1_comes_after_feb29_even_when_omitting_the_year(self): self.assertLess( time.strptime("Feb 29", "%b %d"), time.strptime("Mar 1", "%b %d")) class Strptime12AMPMTests(unittest.TestCase): """Test a _strptime regression in '%I %p' at 12 noon (12 PM)""" def test_twelve_noon_midnight(self): eq = self.assertEqual eq(time.strptime('12 PM', '%I %p')[3], 12) eq(time.strptime('12 AM', '%I %p')[3], 0) eq(_strptime._strptime_time('12 PM', '%I %p')[3], 12) eq(_strptime._strptime_time('12 AM', '%I %p')[3], 0) class JulianTests(unittest.TestCase): """Test a _strptime regression that all julian (1-366) are accepted""" def test_all_julian_days(self): eq = self.assertEqual for i in range(1, 367): # use 2004, since it is a leap year, we have 366 days eq(_strptime._strptime_time('%d 2004' % i, '%j %Y')[7], i) class CalculationTests(unittest.TestCase): """Test that strptime() fills in missing info correctly""" def setUp(self): self.time_tuple = time.gmtime() def test_julian_calculation(self): # Make sure that when Julian is missing that it is calculated format_string = "%Y %m %d %H %M %S %w %Z" result = _strptime._strptime_time(time.strftime(format_string, self.time_tuple), format_string) self.assertTrue(result.tm_yday == self.time_tuple.tm_yday, "Calculation of tm_yday failed; %s != %s" % (result.tm_yday, self.time_tuple.tm_yday)) def test_gregorian_calculation(self): # Test that Gregorian date can be calculated from Julian day format_string = "%Y %H %M %S %w %j %Z" result = _strptime._strptime_time(time.strftime(format_string, self.time_tuple), format_string) self.assertTrue(result.tm_year == self.time_tuple.tm_year and result.tm_mon == self.time_tuple.tm_mon and result.tm_mday == self.time_tuple.tm_mday, "Calculation of Gregorian date failed;" "%s-%s-%s != %s-%s-%s" % (result.tm_year, result.tm_mon, result.tm_mday, self.time_tuple.tm_year, self.time_tuple.tm_mon, self.time_tuple.tm_mday)) def test_day_of_week_calculation(self): # Test that the day of the week is calculated as needed format_string = "%Y %m %d %H %S %j %Z" result = _strptime._strptime_time(time.strftime(format_string, self.time_tuple), format_string) self.assertTrue(result.tm_wday == self.time_tuple.tm_wday, "Calculation of day of the week failed;" "%s != %s" % (result.tm_wday, self.time_tuple.tm_wday)) def test_week_of_year_and_day_of_week_calculation(self): # Should be able to infer date if given year, week of year (%U or %W) # and day of the week def test_helper(ymd_tuple, test_reason): for directive in ('W', 'U'): format_string = "%%Y %%%s %%w" % directive dt_date = datetime_date(*ymd_tuple) strp_input = dt_date.strftime(format_string) strp_output = _strptime._strptime_time(strp_input, format_string) self.assertTrue(strp_output[:3] == ymd_tuple, "%s(%s) test failed w/ '%s': %s != %s (%s != %s)" % (test_reason, directive, strp_input, strp_output[:3], ymd_tuple, strp_output[7], dt_date.timetuple()[7])) test_helper((1901, 1, 3), "week 0") test_helper((1901, 1, 8), "common case") test_helper((1901, 1, 13), "day on Sunday") test_helper((1901, 1, 14), "day on Monday") test_helper((1905, 1, 1), "Jan 1 on Sunday") test_helper((1906, 1, 1), "Jan 1 on Monday") test_helper((1906, 1, 7), "first Sunday in a year starting on Monday") test_helper((1905, 12, 31), "Dec 31 on Sunday") test_helper((1906, 12, 31), "Dec 31 on Monday") test_helper((2008, 12, 29), "Monday in the last week of the year") test_helper((2008, 12, 22), "Monday in the second-to-last week of the " "year") test_helper((1978, 10, 23), "randomly chosen date") test_helper((2004, 12, 18), "randomly chosen date") test_helper((1978, 10, 23), "year starting and ending on Monday while " "date not on Sunday or Monday") test_helper((1917, 12, 17), "year starting and ending on Monday with " "a Monday not at the beginning or end " "of the year") test_helper((1917, 12, 31), "Dec 31 on Monday with year starting and " "ending on Monday") test_helper((2007, 01, 07), "First Sunday of 2007") test_helper((2007, 01, 14), "Second Sunday of 2007") test_helper((2006, 12, 31), "Last Sunday of 2006") test_helper((2006, 12, 24), "Second to last Sunday of 2006") class CacheTests(unittest.TestCase): """Test that caching works properly.""" def test_time_re_recreation(self): # Make sure cache is recreated when current locale does not match what # cached object was created with. _strptime._strptime_time("10", "%d") _strptime._strptime_time("2005", "%Y") _strptime._TimeRE_cache.locale_time.lang = "Ni" original_time_re = _strptime._TimeRE_cache _strptime._strptime_time("10", "%d") self.assertIsNot(original_time_re, _strptime._TimeRE_cache) self.assertEqual(len(_strptime._regex_cache), 1) def test_regex_cleanup(self): # Make sure cached regexes are discarded when cache becomes "full". try: del _strptime._regex_cache['%d'] except KeyError: pass bogus_key = 0 while len(_strptime._regex_cache) <= _strptime._CACHE_MAX_SIZE: _strptime._regex_cache[bogus_key] = None bogus_key += 1 _strptime._strptime_time("10", "%d") self.assertEqual(len(_strptime._regex_cache), 1) def test_new_localetime(self): # A new LocaleTime instance should be created when a new TimeRE object # is created. locale_time_id = _strptime._TimeRE_cache.locale_time _strptime._TimeRE_cache.locale_time.lang = "Ni" _strptime._strptime_time("10", "%d") self.assertIsNot(locale_time_id, _strptime._TimeRE_cache.locale_time) def test_TimeRE_recreation(self): # The TimeRE instance should be recreated upon changing the locale. locale_info = locale.getlocale(locale.LC_TIME) try: locale.setlocale(locale.LC_TIME, ('en_US', 'UTF8')) except locale.Error: return try: _strptime._strptime_time('10', '%d') # Get id of current cache object. first_time_re = _strptime._TimeRE_cache try: # Change the locale and force a recreation of the cache. locale.setlocale(locale.LC_TIME, ('de_DE', 'UTF8')) _strptime._strptime_time('10', '%d') # Get the new cache object's id. second_time_re = _strptime._TimeRE_cache # They should not be equal. self.assertIsNot(first_time_re, second_time_re) # Possible test locale is not supported while initial locale is. # If this is the case just suppress the exception and fall-through # to the resetting to the original locale. except locale.Error: pass # Make sure we don't trample on the locale setting once we leave the # test. finally: locale.setlocale(locale.LC_TIME, locale_info) def test_main(): test_support.run_unittest( getlang_Tests, LocaleTime_Tests, TimeRETests, StrptimeTests, Strptime12AMPMTests, JulianTests, CalculationTests, CacheTests ) if __name__ == '__main__': test_main()
mit
samdowd/drumm-farm
drumm_env/lib/python2.7/site-packages/django/utils/translation/trans_null.py
467
1408
# These are versions of the functions in django.utils.translation.trans_real # that don't actually do anything. This is purely for performance, so that # settings.USE_I18N = False can use this module rather than trans_real.py. from django.conf import settings from django.utils.encoding import force_text def ngettext(singular, plural, number): if number == 1: return singular return plural ngettext_lazy = ngettext def ungettext(singular, plural, number): return force_text(ngettext(singular, plural, number)) def pgettext(context, message): return ugettext(message) def npgettext(context, singular, plural, number): return ungettext(singular, plural, number) activate = lambda x: None deactivate = deactivate_all = lambda: None get_language = lambda: settings.LANGUAGE_CODE get_language_bidi = lambda: settings.LANGUAGE_CODE in settings.LANGUAGES_BIDI check_for_language = lambda x: True def gettext(message): return message def ugettext(message): return force_text(gettext(message)) gettext_noop = gettext_lazy = _ = gettext def to_locale(language): p = language.find('-') if p >= 0: return language[:p].lower() + '_' + language[p + 1:].upper() else: return language.lower() def get_language_from_request(request, check_path=False): return settings.LANGUAGE_CODE def get_language_from_path(request): return None
mit
tiagofrepereira2012/tensorflow
tensorflow/python/tools/optimize_for_inference.py
106
4714
# pylint: disable=g-bad-file-header # Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== r"""Removes parts of a graph that are only needed for training. There are several common transformations that can be applied to GraphDefs created to train a model, that help reduce the amount of computation needed when the network is used only for inference. These include: - Removing training-only operations like checkpoint saving. - Stripping out parts of the graph that are never reached. - Removing debug operations like CheckNumerics. - Folding batch normalization ops into the pre-calculated weights. - Fusing common operations into unified versions. This script takes either a frozen binary GraphDef file (where the weight variables have been converted into constants by the freeze_graph script), or a text GraphDef proto file (the weight variables are stored in a separate checkpoint file), and outputs a new GraphDef with the optimizations applied. If the input graph is a text graph file, make sure to include the node that restores the variable weights in output_names. That node is usually named "restore_all". An example of command-line usage is: bazel build tensorflow/python/tools:optimize_for_inference && \ bazel-bin/tensorflow/python/tools/optimize_for_inference \ --input=frozen_inception_graph.pb \ --output=optimized_inception_graph.pb \ --frozen_graph=True \ --input_names=Mul \ --output_names=softmax """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import os import sys from google.protobuf import text_format from tensorflow.core.framework import graph_pb2 from tensorflow.python.framework import dtypes from tensorflow.python.framework import graph_io from tensorflow.python.platform import app from tensorflow.python.platform import gfile from tensorflow.python.tools import optimize_for_inference_lib FLAGS = None def main(unused_args): if not gfile.Exists(FLAGS.input): print("Input graph file '" + FLAGS.input + "' does not exist!") return -1 input_graph_def = graph_pb2.GraphDef() with gfile.Open(FLAGS.input, "rb") as f: data = f.read() if FLAGS.frozen_graph: input_graph_def.ParseFromString(data) else: text_format.Merge(data.decode("utf-8"), input_graph_def) output_graph_def = optimize_for_inference_lib.optimize_for_inference( input_graph_def, FLAGS.input_names.split(","), FLAGS.output_names.split(","), FLAGS.placeholder_type_enum) if FLAGS.frozen_graph: f = gfile.FastGFile(FLAGS.output, "w") f.write(output_graph_def.SerializeToString()) else: graph_io.write_graph(output_graph_def, os.path.dirname(FLAGS.output), os.path.basename(FLAGS.output)) return 0 def parse_args(): """Parses command line arguments.""" parser = argparse.ArgumentParser() parser.register("type", "bool", lambda v: v.lower() == "true") parser.add_argument( "--input", type=str, default="", help="TensorFlow \'GraphDef\' file to load.") parser.add_argument( "--output", type=str, default="", help="File to save the output graph to.") parser.add_argument( "--input_names", type=str, default="", help="Input node names, comma separated.") parser.add_argument( "--output_names", type=str, default="", help="Output node names, comma separated.") parser.add_argument( "--frozen_graph", nargs="?", const=True, type="bool", default=True, help="""\ If true, the input graph is a binary frozen GraphDef file; if false, it is a text GraphDef proto file.\ """) parser.add_argument( "--placeholder_type_enum", type=int, default=dtypes.float32.as_datatype_enum, help="The AttrValue enum to use for placeholders.") return parser.parse_known_args() if __name__ == "__main__": FLAGS, unparsed = parse_args() app.run(main=main, argv=[sys.argv[0]] + unparsed)
apache-2.0
subena-io/subena
base.py
1
1245
#!/usr/local/bin/python2.7 # -*- coding: utf-8-sig -*- import argparse import logging import os import sqlalchemy from sqlalchemy.ext.declarative.api import declarative_base from sqlalchemy.orm.session import sessionmaker #if no env variable has been defined, a default one is set if not(os.environ.has_key("SUBDB")): os.environ['SUBDB'] = 'mysql://root:cnim@127.0.0.1:3306/sub_ai' URLS = { 'SQL':os.environ['SUBDB'], } #print message or not parser = argparse.ArgumentParser() parser.add_argument('-v','--verbose',action='store_true') parser.add_argument('-a','--alerts',action='store_true') args = parser.parse_args() if args.verbose: logging.basicConfig(format="%(levelname)s: %(message)s", level=logging.DEBUG) else: logging.basicConfig(format="%(levelname)s: %(message)s", level=logging.WARNING) try: engine = sqlalchemy.create_engine(URLS['SQL']) Session = sessionmaker(bind=engine) Base = declarative_base(bind=engine) DBSession = sessionmaker(bind = engine) Base.metadata.create_all(engine) except Exception: logging.error('Database is not reachable with provided path : %s',URLS['SQL']) logging.error('Please check database instance is running and database name exists') exit(0)
apache-2.0
arenadata/ambari
ambari-server/src/main/resources/stacks/ADH/1.4/services/GIRAPH/package/scripts/giraph.py
2
1493
""" Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Ambari Agent """ import sys import os import logging from resource_management import * from resource_management.libraries.functions.check_process_status import check_process_status from resource_management.libraries.resources.hdfs_resource import HdfsResource from resource_management.core.resources.service import Service from resource_management.core.exceptions import ClientComponentHasNoStatus class Giraph(Script): def install(self, env): import params env.set_params(params) self.install_packages(env) def configure(self, env, upgrade_type=None): import params env.set_params(params) def status(self, env): raise ClientComponentHasNoStatus() if __name__ == "__main__": Giraph().execute()
apache-2.0
paulcronk/psinsights
psinsights/error.py
1
1641
############################################################################### # Copyright 2012 FastSoft Inc. # Copyright 2012 Devin Anderson <danderson (at) fastsoft (dot) com> # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy # of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. ############################################################################### from psinsights.issue import Issue as _Issue class Error(Exception): """Exception class for service errors.""" __issues = None def __del__(self): self.__data = None self.__issues = None def __init__(self, data): data = data["error"] code = data["code"] message = data["message"] super(Error, self).__init__((message, code)) self.__code = code self.__data = data self.__message = message @property def code(self): return self.__code @property def issues(self): issues = self.__issues if issues is None: issues = tuple((_Issue(d) for d in self.__data["errors"])) self.__issues = issues return issues @property def message(self): return self.__message
apache-2.0
willium/browser-extension
tools/addon-sdk-1.16/python-lib/cuddlefish/tests/test_preflight.py
37
5860
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import os, shutil import simplejson as json import unittest import hashlib import base64 from cuddlefish import preflight from StringIO import StringIO class Util(unittest.TestCase): def get_basedir(self): return os.path.join(".test_tmp", self.id()) def make_basedir(self): basedir = self.get_basedir() if os.path.isdir(basedir): here = os.path.abspath(os.getcwd()) assert os.path.abspath(basedir).startswith(here) # safety shutil.rmtree(basedir) os.makedirs(basedir) return basedir def test_base62(self): for i in range(1000): h = hashlib.sha1(str(i)).digest() s1 = base64.b64encode(h, "AB").strip("=") s2 = base64.b64encode(h).strip("=").replace("+","A").replace("/","B") self.failUnlessEqual(s1, s2) def write(self, config): basedir = self.get_basedir() fn = os.path.join(basedir, "package.json") open(fn,"w").write(config) def read(self): basedir = self.get_basedir() fn = os.path.join(basedir, "package.json") return open(fn,"r").read() def get_cfg(self): cfg = json.loads(self.read()) if "name" not in cfg: # the cfx parser always provides a name, even if package.json # doesn't contain one cfg["name"] = "pretend name" return cfg def parse(self, keydata): fields = {} fieldnames = [] for line in keydata.split("\n"): if line.strip(): k,v = line.split(":", 1) k = k.strip() ; v = v.strip() fields[k] = v fieldnames.append(k) return fields, fieldnames def test_preflight(self): basedir = self.make_basedir() fn = os.path.join(basedir, "package.json") # empty config is not ok: need id (name is automatically supplied) config_orig = "{}" self.write(config_orig) out = StringIO() cfg = self.get_cfg() config_was_ok, modified = preflight.preflight_config(cfg, fn, stderr=out) self.failUnlessEqual(config_was_ok, False) self.failUnlessEqual(modified, True) backup_fn = os.path.join(basedir, "package.json.backup") config_backup = open(backup_fn,"r").read() self.failUnlessEqual(config_backup, config_orig) config = json.loads(self.read()) self.failIf("name" in config) self.failUnless("id" in config) self.failUnless(config["id"].startswith("jid1-"), config["id"]) self.failUnlessEqual(out.getvalue().strip(), "No 'id' in package.json: creating a new ID for you.") os.unlink(backup_fn) # just a name? we add the id config_orig = '{"name": "my-awesome-package"}' self.write(config_orig) out = StringIO() cfg = self.get_cfg() config_was_ok, modified = preflight.preflight_config(cfg, fn, stderr=out) self.failUnlessEqual(config_was_ok, False) self.failUnlessEqual(modified, True) backup_fn = os.path.join(basedir, "package.json.backup") config_backup = open(backup_fn,"r").read() self.failUnlessEqual(config_backup, config_orig) config = json.loads(self.read()) self.failUnlessEqual(config["name"], "my-awesome-package") self.failUnless("id" in config) self.failUnless(config["id"].startswith("jid1-"), config["id"]) jid = str(config["id"]) self.failUnlessEqual(out.getvalue().strip(), "No 'id' in package.json: creating a new ID for you.") os.unlink(backup_fn) # name and valid id? great! ship it! config2 = '{"name": "my-awesome-package", "id": "%s"}' % jid self.write(config2) out = StringIO() cfg = self.get_cfg() config_was_ok, modified = preflight.preflight_config(cfg, fn, stderr=out) self.failUnlessEqual(config_was_ok, True) self.failUnlessEqual(modified, False) config2a = self.read() self.failUnlessEqual(config2a, config2) self.failUnlessEqual(out.getvalue().strip(), "") # name and anonymous ID? without asking to see its papers, ship it config3 = '{"name": "my-old-skool-package", "id": "anonid0-deadbeef"}' self.write(config3) out = StringIO() cfg = self.get_cfg() config_was_ok, modified = preflight.preflight_config(cfg, fn, stderr=out) self.failUnlessEqual(config_was_ok, True) self.failUnlessEqual(modified, False) config3a = self.read() self.failUnlessEqual(config3a, config3) self.failUnlessEqual(out.getvalue().strip(), "") # name and old-style ID? with nostalgic trepidation, ship it config4 = '{"name": "my-old-skool-package", "id": "foo@bar.baz"}' self.write(config4) out = StringIO() cfg = self.get_cfg() config_was_ok, modified = preflight.preflight_config(cfg, fn, stderr=out) self.failUnlessEqual(config_was_ok, True) self.failUnlessEqual(modified, False) config4a = self.read() self.failUnlessEqual(config4a, config4) self.failUnlessEqual(out.getvalue().strip(), "") if __name__ == '__main__': unittest.main()
mit
Danfocus/Flexget
flexget/plugins/cli/win32_service.py
4
2727
from __future__ import unicode_literals, division, absolute_import from builtins import * # noqa pylint: disable=unused-import, redefined-builtin import argparse import logging import os import socket import sys import flexget from flexget import options from flexget.event import event from flexget.terminal import console log = logging.getLogger('win32_service') try: import servicemanager import win32event import win32service import win32serviceutil class AppServerSvc(win32serviceutil.ServiceFramework): _svc_name_ = 'FlexGet' _svc_display_name_ = 'FlexGet Daemon' _svc_description_ = 'Runs FlexGet tasks according to defined schedules' def __init__(self, args): win32serviceutil.ServiceFramework.__init__(self, args) self.hWaitStop = win32event.CreateEvent(None, 0, 0, None) socket.setdefaulttimeout(60) self.manager = None def SvcStop(self): self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) from flexget.manager import manager manager.shutdown(finish_queue=False) self.ReportServiceStatus(win32service.SERVICE_STOPPED) def SvcDoRun(self): servicemanager.LogMsg( servicemanager.EVENTLOG_INFORMATION_TYPE, servicemanager.PYS_SERVICE_STARTED, (self._svc_name_, ''), ) flexget.main(['daemon', 'start']) except ImportError: pass def do_cli(manager, options): import win32file import win32serviceutil if hasattr(sys, 'real_prefix'): # We are in a virtualenv, there is some special setup if not os.path.exists(os.path.join(sys.prefix, 'python.exe')): console('Creating a hard link to virtualenv python.exe in root of virtualenv') win32file.CreateHardLink( os.path.join(sys.prefix, 'python.exe'), os.path.join(sys.prefix, 'Scripts', 'python.exe'), ) argv = options.args if options.help: argv = [] # Hack sys.argv a bit so that we get a better usage message sys.argv[0] = 'flexget service' win32serviceutil.HandleCommandLine(AppServerSvc, argv=['flexget service'] + argv) @event('options.register') def register_parser_arguments(): if not sys.platform.startswith('win'): return # Still not fully working. Hidden for now. parser = options.register_command( 'service', do_cli, # help='set up or control a windows service for the daemon', add_help=False, ) parser.add_argument('--help', '-h', action='store_true') parser.add_argument('args', nargs=argparse.REMAINDER)
mit
PeterWangPo/phantomjs
src/qt/qtwebkit/Tools/Scripts/webkitpy/style/checkers/common.py
203
3229
# Copyright (C) 2010 Chris Jerdonek (cjerdonek@webkit.org) # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY # EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Supports style checking not specific to any one file type.""" # FIXME: Test this list in the same way that the list of CppChecker # categories is tested, for example by checking that all of its # elements appear in the unit tests. This should probably be done # after moving the relevant cpp_unittest.ErrorCollector code # into a shared location and refactoring appropriately. categories = set([ "whitespace/carriage_return", "whitespace/tab"]) class CarriageReturnChecker(object): """Supports checking for and handling carriage returns.""" def __init__(self, handle_style_error): self._handle_style_error = handle_style_error def check(self, lines): """Check for and strip trailing carriage returns from lines.""" for line_number in range(len(lines)): if not lines[line_number].endswith("\r"): continue self._handle_style_error(line_number + 1, # Correct for offset. "whitespace/carriage_return", 1, "One or more unexpected \\r (^M) found; " "better to use only a \\n") lines[line_number] = lines[line_number].rstrip("\r") return lines class TabChecker(object): """Supports checking for and handling tabs.""" def __init__(self, file_path, handle_style_error): self.file_path = file_path self.handle_style_error = handle_style_error def check(self, lines): # FIXME: share with cpp_style. for line_number, line in enumerate(lines): if "\t" in line: self.handle_style_error(line_number + 1, "whitespace/tab", 5, "Line contains tab character.")
bsd-3-clause